summaryrefslogtreecommitdiff
path: root/src/mongo/db/pipeline
diff options
context:
space:
mode:
authorCharlie Swanson <cswanson310@gmail.com>2016-08-29 14:26:29 -0400
committerCharlie Swanson <cswanson310@gmail.com>2016-09-01 14:08:25 -0400
commit698cd2555dabf2ab6c1ed4c504d1e2546da0f57a (patch)
tree206a17f69cf6a1720cb153ba90c29acfd7f565f2 /src/mongo/db/pipeline
parentb1014fe1b40a69cd90b27cb336a170317eecc6b7 (diff)
downloadmongo-698cd2555dabf2ab6c1ed4c504d1e2546da0f57a.tar.gz
SERVER-24153 Split document_source_test.cpp into one file per stage.
Diffstat (limited to 'src/mongo/db/pipeline')
-rw-r--r--src/mongo/db/pipeline/SConscript34
-rw-r--r--src/mongo/db/pipeline/dependencies_test.cpp135
-rw-r--r--src/mongo/db/pipeline/document_source.h29
-rw-r--r--src/mongo/db/pipeline/document_source_add_fields.cpp17
-rw-r--r--src/mongo/db/pipeline/document_source_add_fields_test.cpp137
-rw-r--r--src/mongo/db/pipeline/document_source_bucket_auto_test.cpp626
-rw-r--r--src/mongo/db/pipeline/document_source_bucket_test.cpp286
-rw-r--r--src/mongo/db/pipeline/document_source_count_test.cpp134
-rw-r--r--src/mongo/db/pipeline/document_source_geo_near_test.cpp85
-rw-r--r--src/mongo/db/pipeline/document_source_graph_lookup_test.cpp8
-rw-r--r--src/mongo/db/pipeline/document_source_group_test.cpp1048
-rw-r--r--src/mongo/db/pipeline/document_source_limit_test.cpp103
-rw-r--r--src/mongo/db/pipeline/document_source_lookup_test.cpp129
-rw-r--r--src/mongo/db/pipeline/document_source_match.cpp12
-rw-r--r--src/mongo/db/pipeline/document_source_match_test.cpp343
-rw-r--r--src/mongo/db/pipeline/document_source_mock_test.cpp72
-rw-r--r--src/mongo/db/pipeline/document_source_project.cpp12
-rw-r--r--src/mongo/db/pipeline/document_source_project_test.cpp173
-rw-r--r--src/mongo/db/pipeline/document_source_redact.cpp5
-rw-r--r--src/mongo/db/pipeline/document_source_redact_test.cpp61
-rw-r--r--src/mongo/db/pipeline/document_source_replace_root_test.cpp339
-rw-r--r--src/mongo/db/pipeline/document_source_sample_test.cpp387
-rw-r--r--src/mongo/db/pipeline/document_source_sort_by_count_test.cpp138
-rw-r--r--src/mongo/db/pipeline/document_source_sort_test.cpp352
-rw-r--r--src/mongo/db/pipeline/document_source_test.cpp4953
-rw-r--r--src/mongo/db/pipeline/document_source_unwind_test.cpp811
26 files changed, 5454 insertions, 4975 deletions
diff --git a/src/mongo/db/pipeline/SConscript b/src/mongo/db/pipeline/SConscript
index f7989a73ec7..48bc611eb55 100644
--- a/src/mongo/db/pipeline/SConscript
+++ b/src/mongo/db/pipeline/SConscript
@@ -116,19 +116,36 @@ env.Library(
env.CppUnitTest(
target='document_source_test',
- source='document_source_test.cpp',
+ source=[
+ 'document_source_add_fields_test.cpp',
+ 'document_source_bucket_auto_test.cpp',
+ 'document_source_bucket_test.cpp',
+ 'document_source_count_test.cpp',
+ 'document_source_geo_near_test.cpp',
+ 'document_source_group_test.cpp',
+ 'document_source_limit_test.cpp',
+ 'document_source_lookup_test.cpp',
+ 'document_source_match_test.cpp',
+ 'document_source_mock_test.cpp',
+ 'document_source_project_test.cpp',
+ 'document_source_redact_test.cpp',
+ 'document_source_replace_root_test.cpp',
+ 'document_source_sample_test.cpp',
+ 'document_source_sort_by_count_test.cpp',
+ 'document_source_sort_test.cpp',
+ 'document_source_test.cpp',
+ 'document_source_unwind_test.cpp',
+ ],
LIBDEPS=[
'document_source',
'document_source_lookup',
'document_value_test_util',
'$BUILD_DIR/mongo/db/auth/authorization_manager_mock_init',
+ '$BUILD_DIR/mongo/db/query/query_test_service_context',
'$BUILD_DIR/mongo/db/service_context',
'$BUILD_DIR/mongo/util/clock_source_mock',
- '$BUILD_DIR/mongo/executor/thread_pool_task_executor',
- '$BUILD_DIR/mongo/executor/network_interface_thread_pool',
- '$BUILD_DIR/mongo/executor/network_interface_factory'
- ],
- )
+ ],
+)
env.Library(
target='dependencies',
@@ -345,7 +362,10 @@ env.CppUnitTest(
env.CppUnitTest(
target='pipeline_test',
- source='pipeline_test.cpp',
+ source=[
+ 'dependencies_test.cpp',
+ 'pipeline_test.cpp',
+ ],
LIBDEPS=[
'$BUILD_DIR/mongo/db/auth/authorization_manager_mock_init',
'$BUILD_DIR/mongo/db/query/collation/collator_interface_mock',
diff --git a/src/mongo/db/pipeline/dependencies_test.cpp b/src/mongo/db/pipeline/dependencies_test.cpp
new file mode 100644
index 00000000000..9f2294d46aa
--- /dev/null
+++ b/src/mongo/db/pipeline/dependencies_test.cpp
@@ -0,0 +1,135 @@
+/**
+ * Copyright (C) 2016 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include <set>
+#include <string>
+
+#include "mongo/bson/bsonmisc.h"
+#include "mongo/bson/bsonobj.h"
+#include "mongo/bson/bsonobjbuilder.h"
+#include "mongo/db/pipeline/dependencies.h"
+#include "mongo/db/pipeline/document.h"
+#include "mongo/unittest/unittest.h"
+
+namespace mongo {
+namespace {
+using std::set;
+using std::string;
+
+static const BSONObj metaTextScore = BSON("$meta"
+ << "textScore");
+
+template <size_t ArrayLen>
+set<string> arrayToSet(const char* (&array)[ArrayLen]) {
+ set<string> out;
+ for (size_t i = 0; i < ArrayLen; i++)
+ out.insert(array[i]);
+ return out;
+}
+
+TEST(DependenciesToProjectionTest, ShouldIncludeAllFieldsAndExcludeIdIfNotSpecified) {
+ const char* array[] = {"a", "b"};
+ DepsTracker deps;
+ deps.fields = arrayToSet(array);
+ ASSERT_BSONOBJ_EQ(deps.toProjection(), BSON("a" << 1 << "b" << 1 << "_id" << 0));
+}
+
+TEST(DependenciesToProjectionTest, ShouldIncludeFieldEvenIfSuffixOfAnotherIncludedField) {
+ const char* array[] = {"a", "ab"};
+ DepsTracker deps;
+ deps.fields = arrayToSet(array);
+ ASSERT_BSONOBJ_EQ(deps.toProjection(), BSON("a" << 1 << "ab" << 1 << "_id" << 0));
+}
+
+TEST(DependenciesToProjectionTest, ShouldNotIncludeSubFieldIfTopLevelAlreadyIncluded) {
+ const char* array[] = {"a", "b", "a.b"}; // a.b included by a
+ DepsTracker deps;
+ deps.fields = arrayToSet(array);
+ ASSERT_BSONOBJ_EQ(deps.toProjection(), BSON("a" << 1 << "b" << 1 << "_id" << 0));
+}
+
+TEST(DependenciesToProjectionTest, ShouldIncludeIdIfNeeded) {
+ const char* array[] = {"a", "_id"};
+ DepsTracker deps;
+ deps.fields = arrayToSet(array);
+ ASSERT_BSONOBJ_EQ(deps.toProjection(), BSON("a" << 1 << "_id" << 1));
+}
+
+TEST(DependenciesToProjectionTest, ShouldIncludeEntireIdEvenIfOnlyASubFieldIsNeeded) {
+ const char* array[] = {"a", "_id.a"}; // still include whole _id (SERVER-7502)
+ DepsTracker deps;
+ deps.fields = arrayToSet(array);
+ ASSERT_BSONOBJ_EQ(deps.toProjection(), BSON("a" << 1 << "_id" << 1));
+}
+
+TEST(DependenciesToProjectionTest, ShouldNotIncludeSubFieldOfIdIfIdIncluded) {
+ const char* array[] = {"a", "_id", "_id.a"}; // handle both _id and subfield
+ DepsTracker deps;
+ deps.fields = arrayToSet(array);
+ ASSERT_BSONOBJ_EQ(deps.toProjection(), BSON("a" << 1 << "_id" << 1));
+}
+
+TEST(DependenciesToProjectionTest, ShouldIncludeFieldPrefixedById) {
+ const char* array[] = {"a", "_id", "_id_a"}; // _id prefixed but non-subfield
+ DepsTracker deps;
+ deps.fields = arrayToSet(array);
+ ASSERT_BSONOBJ_EQ(deps.toProjection(), BSON("_id_a" << 1 << "a" << 1 << "_id" << 1));
+}
+
+TEST(DependenciesToProjectionTest, ShouldOutputEmptyObjectIfEntireDocumentNeeded) {
+ const char* array[] = {"a"}; // fields ignored with needWholeDocument
+ DepsTracker deps;
+ deps.fields = arrayToSet(array);
+ deps.needWholeDocument = true;
+ ASSERT_BSONOBJ_EQ(deps.toProjection(), BSONObj());
+}
+
+TEST(DependenciesToProjectionTest, ShouldOnlyRequestTextScoreIfEntireDocumentAndTextScoreNeeded) {
+ const char* array[] = {"a"}; // needTextScore with needWholeDocument
+ DepsTracker deps(DepsTracker::MetadataAvailable::kTextScore);
+ deps.fields = arrayToSet(array);
+ deps.needWholeDocument = true;
+ deps.setNeedTextScore(true);
+ ASSERT_BSONOBJ_EQ(deps.toProjection(), BSON(Document::metaFieldTextScore << metaTextScore));
+}
+
+TEST(DependenciesToProjectionTest,
+ ShouldRequireFieldsAndTextScoreIfTextScoreNeededWithoutWholeDocument) {
+ const char* array[] = {"a"}; // needTextScore without needWholeDocument
+ DepsTracker deps(DepsTracker::MetadataAvailable::kTextScore);
+ deps.fields = arrayToSet(array);
+ deps.setNeedTextScore(true);
+ ASSERT_BSONOBJ_EQ(
+ deps.toProjection(),
+ BSON(Document::metaFieldTextScore << metaTextScore << "a" << 1 << "_id" << 0));
+}
+
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source.h b/src/mongo/db/pipeline/document_source.h
index e9885f2aaa5..f87a81c9be1 100644
--- a/src/mongo/db/pipeline/document_source.h
+++ b/src/mongo/db/pipeline/document_source.h
@@ -865,10 +865,13 @@ public:
GetDepsReturn getDependencies(DepsTracker* deps) const final;
/**
- Create a filter.
+ * Convenience method for creating a $match stage.
+ */
+ static boost::intrusive_ptr<DocumentSourceMatch> create(
+ BSONObj filter, const boost::intrusive_ptr<ExpressionContext>& expCtx);
- @param pBsonElement the raw BSON specification for the filter
- @returns the filter
+ /**
+ * Parses a $match stage from 'elem'.
*/
static boost::intrusive_ptr<DocumentSource> createFromBson(
BSONElement elem, const boost::intrusive_ptr<ExpressionContext>& pCtx);
@@ -2063,6 +2066,15 @@ private:
*/
class DocumentSourceProject final {
public:
+ /**
+ * Convenience method to create a $project stage from 'projectSpec'.
+ */
+ static boost::intrusive_ptr<DocumentSource> create(
+ BSONObj projectSpec, const boost::intrusive_ptr<ExpressionContext>& expCtx);
+
+ /**
+ * Parses a $project stage from the user-supplied BSON.
+ */
static boost::intrusive_ptr<DocumentSource> createFromBson(
BSONElement elem, const boost::intrusive_ptr<ExpressionContext>& pExpCtx);
@@ -2076,8 +2088,17 @@ private:
*/
class DocumentSourceAddFields final {
public:
+ /**
+ * Convenience method for creating a $addFields stage from 'addFieldsSpec'.
+ */
+ static boost::intrusive_ptr<DocumentSource> create(
+ BSONObj addFieldsSpec, const boost::intrusive_ptr<ExpressionContext>& expCtx);
+
+ /**
+ * Parses a $addFields stage from the user-supplied BSON.
+ */
static boost::intrusive_ptr<DocumentSource> createFromBson(
- BSONElement elem, const boost::intrusive_ptr<ExpressionContext>& pExpCtx);
+ BSONElement elem, const boost::intrusive_ptr<ExpressionContext>& expCtx);
private:
DocumentSourceAddFields() = default;
diff --git a/src/mongo/db/pipeline/document_source_add_fields.cpp b/src/mongo/db/pipeline/document_source_add_fields.cpp
index 6af83b944e6..df2003f2610 100644
--- a/src/mongo/db/pipeline/document_source_add_fields.cpp
+++ b/src/mongo/db/pipeline/document_source_add_fields.cpp
@@ -42,17 +42,22 @@ using parsed_aggregation_projection::ParsedAddFields;
REGISTER_DOCUMENT_SOURCE(addFields, DocumentSourceAddFields::createFromBson);
+intrusive_ptr<DocumentSource> DocumentSourceAddFields::create(
+ BSONObj addFieldsSpec, const intrusive_ptr<ExpressionContext>& expCtx) {
+ intrusive_ptr<DocumentSourceSingleDocumentTransformation> addFields(
+ new DocumentSourceSingleDocumentTransformation(
+ expCtx, ParsedAddFields::create(addFieldsSpec), "$addFields"));
+ addFields->injectExpressionContext(expCtx);
+ return addFields;
+}
+
intrusive_ptr<DocumentSource> DocumentSourceAddFields::createFromBson(
BSONElement elem, const intrusive_ptr<ExpressionContext>& expCtx) {
-
- // Confirm that the stage was called with an object.
uassert(40272,
str::stream() << "$addFields specification stage must be an object, got "
<< typeName(elem.type()),
elem.type() == Object);
- // Create the AddFields aggregation stage.
- return new DocumentSourceSingleDocumentTransformation(
- expCtx, ParsedAddFields::create(elem.Obj()), "$addFields");
-};
+ return DocumentSourceAddFields::create(elem.Obj(), expCtx);
+}
}
diff --git a/src/mongo/db/pipeline/document_source_add_fields_test.cpp b/src/mongo/db/pipeline/document_source_add_fields_test.cpp
new file mode 100644
index 00000000000..63896f31e27
--- /dev/null
+++ b/src/mongo/db/pipeline/document_source_add_fields_test.cpp
@@ -0,0 +1,137 @@
+/**
+ * Copyright (C) 2016 MongoDB, Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects for
+ * all of the code used other than as permitted herein. If you modify file(s)
+ * with this exception, you may extend this exception to your version of the
+ * file(s), but you are not obligated to do so. If you do not wish to do so,
+ * delete this exception statement from your version. If you delete this
+ * exception statement from all source files in the program, then also delete
+ * it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include <vector>
+
+#include "mongo/db/pipeline/aggregation_context_fixture.h"
+#include "mongo/db/pipeline/document.h"
+#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/pipeline/document_value_test_util.h"
+#include "mongo/unittest/unittest.h"
+#include "mongo/util/assert_util.h"
+
+namespace mongo {
+namespace {
+
+using std::vector;
+
+//
+// DocumentSourceAddFields delegates much of its responsibilities to the ParsedAddFields, which
+// derives from ParsedAggregationProjection.
+// Most of the functional tests are testing ParsedAddFields directly. These are meant as
+// simpler integration tests.
+//
+
+// This provides access to getExpCtx(), but we'll use a different name for this test suite.
+using AddFieldsTest = AggregationContextFixture;
+
+TEST_F(AddFieldsTest, ShouldKeepUnspecifiedFieldsReplaceExistingFieldsAndAddNewFields) {
+ auto addFields =
+ DocumentSourceAddFields::create(BSON("e" << 2 << "b" << BSON("c" << 3)), getExpCtx());
+ auto mock =
+ DocumentSourceMock::create({Document{{"a", 1}, {"b", Document{{"c", 1}}}, {"d", 1}}});
+ addFields->setSource(mock.get());
+
+ auto next = addFields->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ Document expected = Document{{"a", 1}, {"b", Document{{"c", 3}}}, {"d", 1}, {"e", 2}};
+ ASSERT_DOCUMENT_EQ(next.releaseDocument(), expected);
+
+ ASSERT_TRUE(addFields->getNext().isEOF());
+ ASSERT_TRUE(addFields->getNext().isEOF());
+ ASSERT_TRUE(addFields->getNext().isEOF());
+}
+
+TEST_F(AddFieldsTest, ShouldOptimizeInnerExpressions) {
+ auto addFields = DocumentSourceAddFields::create(
+ BSON("a" << BSON("$and" << BSON_ARRAY(BSON("$const" << true)))), getExpCtx());
+ addFields->optimize();
+ // The $and should have been replaced with its only argument.
+ vector<Value> serializedArray;
+ addFields->serializeToArray(serializedArray);
+ ASSERT_BSONOBJ_EQ(serializedArray[0].getDocument().toBson(),
+ fromjson("{$addFields: {a: {$const: true}}}"));
+}
+
+TEST_F(AddFieldsTest, ShouldErrorOnNonObjectSpec) {
+ BSONObj spec = BSON("$addFields"
+ << "foo");
+ BSONElement specElement = spec.firstElement();
+ ASSERT_THROWS_CODE(
+ DocumentSourceAddFields::createFromBson(specElement, getExpCtx()), UserException, 40272);
+}
+
+TEST_F(AddFieldsTest, ShouldBeAbleToProcessMultipleDocuments) {
+ auto addFields = DocumentSourceAddFields::create(BSON("a" << 10), getExpCtx());
+ auto mock =
+ DocumentSourceMock::create({Document{{"a", 1}, {"b", 2}}, Document{{"c", 3}, {"d", 4}}});
+ addFields->setSource(mock.get());
+
+ auto next = addFields->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ Document expected = Document{{"a", 10}, {"b", 2}};
+ ASSERT_DOCUMENT_EQ(next.releaseDocument(), expected);
+
+ next = addFields->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ expected = Document{{"c", 3}, {"d", 4}, {"a", 10}};
+ ASSERT_DOCUMENT_EQ(next.releaseDocument(), expected);
+
+ ASSERT_TRUE(addFields->getNext().isEOF());
+ ASSERT_TRUE(addFields->getNext().isEOF());
+ ASSERT_TRUE(addFields->getNext().isEOF());
+}
+
+TEST_F(AddFieldsTest, ShouldAddReferencedFieldsToDependencies) {
+ auto addFields = DocumentSourceAddFields::create(
+ fromjson("{a: true, x: '$b', y: {$and: ['$c','$d']}, z: {$meta: 'textScore'}}"),
+ getExpCtx());
+ DepsTracker dependencies(DepsTracker::MetadataAvailable::kTextScore);
+ ASSERT_EQUALS(DocumentSource::SEE_NEXT, addFields->getDependencies(&dependencies));
+ ASSERT_EQUALS(3U, dependencies.fields.size());
+
+ // No implicit _id dependency.
+ ASSERT_EQUALS(0U, dependencies.fields.count("_id"));
+
+ // Replaced field is not dependent.
+ ASSERT_EQUALS(0U, dependencies.fields.count("a"));
+
+ // Field path expression dependency.
+ ASSERT_EQUALS(1U, dependencies.fields.count("b"));
+
+ // Nested expression dependencies.
+ ASSERT_EQUALS(1U, dependencies.fields.count("c"));
+ ASSERT_EQUALS(1U, dependencies.fields.count("d"));
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(true, dependencies.getNeedTextScore());
+}
+
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_bucket_auto_test.cpp b/src/mongo/db/pipeline/document_source_bucket_auto_test.cpp
new file mode 100644
index 00000000000..1950c23f0d9
--- /dev/null
+++ b/src/mongo/db/pipeline/document_source_bucket_auto_test.cpp
@@ -0,0 +1,626 @@
+/**
+ * Copyright (C) 2016 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include <boost/intrusive_ptr.hpp>
+#include <deque>
+#include <vector>
+
+#include "mongo/bson/bsonobj.h"
+#include "mongo/bson/bsontypes.h"
+#include "mongo/bson/json.h"
+#include "mongo/db/pipeline/aggregation_context_fixture.h"
+#include "mongo/db/pipeline/dependencies.h"
+#include "mongo/db/pipeline/document.h"
+#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/pipeline/document_value_test_util.h"
+#include "mongo/db/pipeline/value.h"
+#include "mongo/unittest/unittest.h"
+
+namespace mongo {
+namespace {
+using std::vector;
+using std::deque;
+using boost::intrusive_ptr;
+
+class BucketAutoTests : public AggregationContextFixture {
+public:
+ intrusive_ptr<DocumentSource> createBucketAuto(BSONObj bucketAutoSpec) {
+ return DocumentSourceBucketAuto::createFromBson(bucketAutoSpec.firstElement(), getExpCtx());
+ }
+
+ vector<Document> getResults(BSONObj bucketAutoSpec, deque<Document> docs) {
+ auto bucketAutoStage = createBucketAuto(bucketAutoSpec);
+ assertBucketAutoType(bucketAutoStage);
+
+ auto source = DocumentSourceMock::create(docs);
+ bucketAutoStage->setSource(source.get());
+
+ vector<Document> results;
+ for (auto next = bucketAutoStage->getNext(); next.isAdvanced();
+ next = bucketAutoStage->getNext()) {
+ results.push_back(next.releaseDocument());
+ }
+
+ return results;
+ }
+
+ void testSerialize(BSONObj bucketAutoSpec, BSONObj expectedObj) {
+ auto bucketAutoStage = createBucketAuto(bucketAutoSpec);
+ assertBucketAutoType(bucketAutoStage);
+
+ const bool explain = true;
+ vector<Value> explainedStages;
+ bucketAutoStage->serializeToArray(explainedStages, explain);
+ ASSERT_EQUALS(explainedStages.size(), 1UL);
+
+ Value expectedExplain = Value(expectedObj);
+
+ auto bucketAutoExplain = explainedStages[0];
+ ASSERT_VALUE_EQ(bucketAutoExplain["$bucketAuto"], expectedExplain);
+ }
+
+private:
+ void assertBucketAutoType(intrusive_ptr<DocumentSource> documentSource) {
+ const auto* bucketAutoStage = dynamic_cast<DocumentSourceBucketAuto*>(documentSource.get());
+ ASSERT(bucketAutoStage);
+ }
+};
+
+TEST_F(BucketAutoTests, ReturnsNoBucketsWhenSourceIsEmpty) {
+ auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets: 1}}");
+ auto results = getResults(bucketAutoSpec, {});
+ ASSERT_EQUALS(results.size(), 0UL);
+}
+
+TEST_F(BucketAutoTests, Returns1Of1RequestedBucketWhenAllUniqueValues) {
+ auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets: 1}}");
+
+ // Values are 1, 2, 3, 4
+ auto intDocs = {Document{{"x", 4}}, Document{{"x", 1}}, Document{{"x", 3}}, Document{{"x", 2}}};
+ auto results = getResults(bucketAutoSpec, intDocs);
+ ASSERT_EQUALS(results.size(), 1UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 1, max : 4}, count : 4}")));
+
+ // Values are 'a', 'b', 'c', 'd'
+ auto stringDocs = {
+ Document{{"x", "d"}}, Document{{"x", "b"}}, Document{{"x", "a"}}, Document{{"x", "c"}}};
+ results = getResults(bucketAutoSpec, stringDocs);
+ ASSERT_EQUALS(results.size(), 1UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 'a', max : 'd'}, count : 4}")));
+}
+
+TEST_F(BucketAutoTests, Returns1Of1RequestedBucketWithNonUniqueValues) {
+ auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets: 1}}");
+
+ // Values are 1, 2, 7, 7, 7
+ auto docs = {Document{{"x", 7}},
+ Document{{"x", 1}},
+ Document{{"x", 7}},
+ Document{{"x", 2}},
+ Document{{"x", 7}}};
+ auto results = getResults(bucketAutoSpec, docs);
+ ASSERT_EQUALS(results.size(), 1UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 1, max : 7}, count : 5}")));
+}
+
+TEST_F(BucketAutoTests, Returns1Of1RequestedBucketWhen1ValueInSource) {
+ auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets: 1}}");
+ auto intDocs = {Document{{"x", 1}}};
+ auto results = getResults(bucketAutoSpec, intDocs);
+ ASSERT_EQUALS(results.size(), 1UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 1, max : 1}, count : 1}")));
+
+ auto stringDocs = {Document{{"x", "a"}}};
+ results = getResults(bucketAutoSpec, stringDocs);
+ ASSERT_EQUALS(results.size(), 1UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 'a', max : 'a'}, count : 1}")));
+}
+
+TEST_F(BucketAutoTests, Returns2Of2RequestedBucketsWhenSmallestValueHasManyDuplicates) {
+ auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2}}");
+
+ // Values are 1, 1, 1, 1, 2
+ auto docs = {Document{{"x", 1}},
+ Document{{"x", 1}},
+ Document{{"x", 1}},
+ Document{{"x", 2}},
+ Document{{"x", 1}}};
+ auto results = getResults(bucketAutoSpec, docs);
+ ASSERT_EQUALS(results.size(), 2UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 1, max : 2}, count : 4}")));
+ ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 2, max : 2}, count : 1}")));
+}
+
+TEST_F(BucketAutoTests, Returns2Of2RequestedBucketsWhenLargestValueHasManyDuplicates) {
+ auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2}}");
+
+ // Values are 0, 1, 2, 3, 4, 5, 5, 5, 5
+ auto docs = {Document{{"x", 5}},
+ Document{{"x", 0}},
+ Document{{"x", 2}},
+ Document{{"x", 3}},
+ Document{{"x", 5}},
+ Document{{"x", 1}},
+ Document{{"x", 5}},
+ Document{{"x", 4}},
+ Document{{"x", 5}}};
+ auto results = getResults(bucketAutoSpec, docs);
+
+ ASSERT_EQUALS(results.size(), 2UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 5}, count : 5}")));
+ ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 5, max : 5}, count : 4}")));
+}
+
+TEST_F(BucketAutoTests, Returns3Of3RequestedBucketsWhenAllUniqueValues) {
+ auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 3}}");
+
+ // Values are 0, 1, 2, 3, 4, 5, 6, 7
+ auto docs = {Document{{"x", 2}},
+ Document{{"x", 4}},
+ Document{{"x", 1}},
+ Document{{"x", 7}},
+ Document{{"x", 0}},
+ Document{{"x", 5}},
+ Document{{"x", 3}},
+ Document{{"x", 6}}};
+ auto results = getResults(bucketAutoSpec, docs);
+
+ ASSERT_EQUALS(results.size(), 3UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 3}, count : 3}")));
+ ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 3, max : 6}, count : 3}")));
+ ASSERT_DOCUMENT_EQ(results[2], Document(fromjson("{_id : {min : 6, max : 7}, count : 2}")));
+}
+
+TEST_F(BucketAutoTests, Returns2Of3RequestedBucketsWhenLargestValueHasManyDuplicates) {
+ // In this case, two buckets will be made because the approximate bucket size calculated will be
+ // 7/3, which rounds to 2. Therefore, the boundaries will be calculated so that values 0 and 1
+ // into the first bucket. All of the 2 values will then fall into a second bucket.
+ auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 3}}");
+
+ // Values are 0, 1, 2, 2, 2, 2, 2
+ auto docs = {Document{{"x", 2}},
+ Document{{"x", 0}},
+ Document{{"x", 2}},
+ Document{{"x", 2}},
+ Document{{"x", 1}},
+ Document{{"x", 2}},
+ Document{{"x", 2}}};
+ auto results = getResults(bucketAutoSpec, docs);
+
+ ASSERT_EQUALS(results.size(), 2UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 2}, count : 2}")));
+ ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 2, max : 2}, count : 5}")));
+}
+
+TEST_F(BucketAutoTests, Returns1Of3RequestedBucketsWhenLargestValueHasManyDuplicates) {
+ // In this case, one bucket will be made because the approximate bucket size calculated will be
+ // 8/3, which rounds to 3. Therefore, the boundaries will be calculated so that values 0, 1, and
+ // 2 fall into the first bucket. Since 2 is repeated many times, all of the 2 values will be
+ // pulled into the first bucket.
+ auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 3}}");
+
+ // Values are 0, 1, 2, 2, 2, 2, 2, 2
+ auto docs = {Document{{"x", 2}},
+ Document{{"x", 2}},
+ Document{{"x", 0}},
+ Document{{"x", 2}},
+ Document{{"x", 2}},
+ Document{{"x", 2}},
+ Document{{"x", 1}},
+ Document{{"x", 2}}};
+ auto results = getResults(bucketAutoSpec, docs);
+
+ ASSERT_EQUALS(results.size(), 1UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 2}, count : 8}")));
+}
+
+TEST_F(BucketAutoTests, Returns3Of3RequestedBucketsWhen3ValuesInSource) {
+ auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 3}}");
+ auto docs = {Document{{"x", 0}}, Document{{"x", 1}}, Document{{"x", 2}}};
+ auto results = getResults(bucketAutoSpec, docs);
+
+ ASSERT_EQUALS(results.size(), 3UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 1}, count : 1}")));
+ ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 1, max : 2}, count : 1}")));
+ ASSERT_DOCUMENT_EQ(results[2], Document(fromjson("{_id : {min : 2, max : 2}, count : 1}")));
+}
+
+TEST_F(BucketAutoTests, Returns3Of10RequestedBucketsWhen3ValuesInSource) {
+ auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 10}}");
+ auto docs = {Document{{"x", 0}}, Document{{"x", 1}}, Document{{"x", 2}}};
+ auto results = getResults(bucketAutoSpec, docs);
+
+ ASSERT_EQUALS(results.size(), 3UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 1}, count : 1}")));
+ ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 1, max : 2}, count : 1}")));
+ ASSERT_DOCUMENT_EQ(results[2], Document(fromjson("{_id : {min : 2, max : 2}, count : 1}")));
+}
+
+TEST_F(BucketAutoTests, EvaluatesAccumulatorsInOutputField) {
+ auto bucketAutoSpec =
+ fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, output : {avg : {$avg : '$x'}}}}");
+ auto docs = {Document{{"x", 0}}, Document{{"x", 2}}, Document{{"x", 4}}, Document{{"x", 6}}};
+ auto results = getResults(bucketAutoSpec, docs);
+
+ ASSERT_EQUALS(results.size(), 2UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 4}, avg : 1}")));
+ ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 4, max : 6}, avg : 5}")));
+}
+
+TEST_F(BucketAutoTests, EvaluatesNonFieldPathExpressionInGroupByField) {
+ auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : {$add : ['$x', 1]}, buckets : 2}}");
+ auto docs = {Document{{"x", 0}}, Document{{"x", 1}}, Document{{"x", 2}}, Document{{"x", 3}}};
+ auto results = getResults(bucketAutoSpec, docs);
+
+ ASSERT_EQUALS(results.size(), 2UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 1, max : 3}, count : 2}")));
+ ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 3, max : 4}, count : 2}")));
+}
+
+TEST_F(BucketAutoTests, RespectsCanonicalTypeOrderingOfValues) {
+ auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2}}");
+ auto docs = {Document{{"x", "a"}},
+ Document{{"x", 1}},
+ Document{{"x", "b"}},
+ Document{{"x", 2}},
+ Document{{"x", 0.0}}};
+ auto results = getResults(bucketAutoSpec, docs);
+
+ ASSERT_EQUALS(results.size(), 2UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0.0, max : 'a'}, count : 3}")));
+ ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 'a', max : 'b'}, count : 2}")));
+}
+
+TEST_F(BucketAutoTests, SourceNameIsBucketAuto) {
+ auto bucketAuto = createBucketAuto(fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2}}"));
+ ASSERT_EQUALS(std::string(bucketAuto->getSourceName()), "$bucketAuto");
+}
+
+TEST_F(BucketAutoTests, ShouldAddDependenciesOfGroupByFieldAndComputedFields) {
+ auto bucketAuto =
+ createBucketAuto(fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, output: {field1 : "
+ "{$sum : '$a'}, field2 : {$avg : '$b'}}}}"));
+
+ DepsTracker dependencies;
+ ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_ALL, bucketAuto->getDependencies(&dependencies));
+ ASSERT_EQUALS(3U, dependencies.fields.size());
+
+ // Dependency from 'groupBy'
+ ASSERT_EQUALS(1U, dependencies.fields.count("x"));
+
+ // Dependencies from 'output'
+ ASSERT_EQUALS(1U, dependencies.fields.count("a"));
+ ASSERT_EQUALS(1U, dependencies.fields.count("b"));
+
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(false, dependencies.getNeedTextScore());
+}
+
+TEST_F(BucketAutoTests, ShouldNeedTextScoreInDependenciesFromGroupByField) {
+ auto bucketAuto =
+ createBucketAuto(fromjson("{$bucketAuto : {groupBy : {$meta: 'textScore'}, buckets : 2}}"));
+
+ DepsTracker dependencies(DepsTracker::MetadataAvailable::kTextScore);
+ ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_ALL, bucketAuto->getDependencies(&dependencies));
+ ASSERT_EQUALS(0U, dependencies.fields.size());
+
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(true, dependencies.getNeedTextScore());
+}
+
+TEST_F(BucketAutoTests, ShouldNeedTextScoreInDependenciesFromOutputField) {
+ auto bucketAuto =
+ createBucketAuto(fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, output: {avg : "
+ "{$avg : {$meta : 'textScore'}}}}}"));
+
+ DepsTracker dependencies(DepsTracker::MetadataAvailable::kTextScore);
+ ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_ALL, bucketAuto->getDependencies(&dependencies));
+ ASSERT_EQUALS(1U, dependencies.fields.size());
+
+ // Dependency from 'groupBy'
+ ASSERT_EQUALS(1U, dependencies.fields.count("x"));
+
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(true, dependencies.getNeedTextScore());
+}
+
+TEST_F(BucketAutoTests, SerializesDefaultAccumulatorIfOutputFieldIsNotSpecified) {
+ BSONObj spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2}}");
+ BSONObj expected =
+ fromjson("{groupBy : '$x', buckets : 2, output : {count : {$sum : {$const : 1}}}}");
+
+ testSerialize(spec, expected);
+}
+
+TEST_F(BucketAutoTests, SerializesOutputFieldIfSpecified) {
+ BSONObj spec =
+ fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, output : {field : {$avg : '$x'}}}}");
+ BSONObj expected = fromjson("{groupBy : '$x', buckets : 2, output : {field : {$avg : '$x'}}}");
+
+ testSerialize(spec, expected);
+}
+
+TEST_F(BucketAutoTests, SerializesGranularityFieldIfSpecified) {
+ BSONObj spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity : 'R5'}}");
+ BSONObj expected = fromjson(
+ "{groupBy : '$x', buckets : 2, granularity : 'R5', output : {count : {$sum : {$const : "
+ "1}}}}");
+
+ testSerialize(spec, expected);
+}
+
+TEST_F(BucketAutoTests, ShouldBeAbleToReParseSerializedStage) {
+ auto bucketAuto =
+ createBucketAuto(fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity: 'R5', "
+ "output : {field : {$avg : '$x'}}}}"));
+ vector<Value> serialization;
+ bucketAuto->serializeToArray(serialization);
+ ASSERT_EQUALS(serialization.size(), 1UL);
+ ASSERT_EQUALS(serialization[0].getType(), BSONType::Object);
+
+ ASSERT_EQUALS(serialization[0].getDocument().size(), 1UL);
+ ASSERT_EQUALS(serialization[0].getDocument()["$bucketAuto"].getType(), BSONType::Object);
+
+ auto serializedBson = serialization[0].getDocument().toBson();
+ auto roundTripped = createBucketAuto(serializedBson);
+
+ vector<Value> newSerialization;
+ roundTripped->serializeToArray(newSerialization);
+
+ ASSERT_EQUALS(newSerialization.size(), 1UL);
+ ASSERT_VALUE_EQ(newSerialization[0], serialization[0]);
+}
+
+TEST_F(BucketAutoTests, ReturnsNoBucketsWhenNoBucketsAreSpecifiedInCreate) {
+ auto docs = {Document{{"x", 1}}};
+ auto mock = DocumentSourceMock::create(docs);
+ auto bucketAuto = DocumentSourceBucketAuto::create(getExpCtx());
+
+ bucketAuto->setSource(mock.get());
+ ASSERT(bucketAuto->getNext().isEOF());
+}
+
+TEST_F(BucketAutoTests, FailsWithInvalidNumberOfBuckets) {
+ auto spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 'test'}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40241);
+
+ spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2147483648}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40242);
+
+ spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 1.5}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40242);
+
+ spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 0}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40243);
+
+ spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : -1}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40243);
+}
+
+TEST_F(BucketAutoTests, FailsWithNonExpressionGroupBy) {
+ auto spec = fromjson("{$bucketAuto : {groupBy : 'test', buckets : 1}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40239);
+
+ spec = fromjson("{$bucketAuto : {groupBy : {test : 'test'}, buckets : 1}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40239);
+}
+
+TEST_F(BucketAutoTests, FailsWithNonObjectArgument) {
+ auto spec = fromjson("{$bucketAuto : 'test'}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40240);
+
+ spec = fromjson("{$bucketAuto : [1, 2, 3]}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40240);
+}
+
+TEST_F(BucketAutoTests, FailsWithNonObjectOutput) {
+ auto spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 1, output : 'test'}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40244);
+
+ spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 1, output : [1, 2, 3]}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40244);
+
+ spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 1, output : 1}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40244);
+}
+
+TEST_F(BucketAutoTests, FailsWhenGroupByMissing) {
+ auto spec = fromjson("{$bucketAuto : {buckets : 1}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40246);
+}
+
+TEST_F(BucketAutoTests, FailsWhenBucketsMissing) {
+ auto spec = fromjson("{$bucketAuto : {groupBy : '$x'}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40246);
+}
+
+TEST_F(BucketAutoTests, FailsWithUnknownField) {
+ auto spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 1, field : 'test'}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40245);
+}
+
+TEST_F(BucketAutoTests, FailsWithInvalidExpressionToAccumulator) {
+ auto spec = fromjson(
+ "{$bucketAuto : {groupBy : '$x', buckets : 1, output : {avg : {$avg : ['$x', 1]}}}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40237);
+
+ spec = fromjson(
+ "{$bucketAuto : {groupBy : '$x', buckets : 1, output : {test : {$avg : '$x', $sum : "
+ "'$x'}}}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40238);
+}
+
+TEST_F(BucketAutoTests, FailsWithNonAccumulatorObjectOutputField) {
+ auto spec =
+ fromjson("{$bucketAuto : {groupBy : '$x', buckets : 1, output : {field : 'test'}}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40234);
+
+ spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 1, output : {field : 1}}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40234);
+
+ spec = fromjson(
+ "{$bucketAuto : {groupBy : '$x', buckets : 1, output : {test : {field : 'test'}}}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40234);
+}
+
+TEST_F(BucketAutoTests, FailsWithInvalidOutputFieldName) {
+ auto spec = fromjson(
+ "{$bucketAuto : {groupBy : '$x', buckets : 1, output : {'field.test' : {$avg : '$x'}}}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40235);
+
+ spec = fromjson(
+ "{$bucketAuto : {groupBy : '$x', buckets : 1, output : {'$field' : {$avg : '$x'}}}}");
+ ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40236);
+}
+
+TEST_F(BucketAutoTests, FailsWhenBufferingTooManyDocuments) {
+ std::deque<Document> inputs;
+ auto largeStr = std::string(1000, 'b');
+ auto inputDoc = Document{{"a", largeStr}};
+ ASSERT_GTE(inputDoc.getApproximateSize(), 1000UL);
+ inputs.push_back(inputDoc);
+ inputs.push_back(Document{{"a", largeStr}});
+ auto mock = DocumentSourceMock::create(inputs);
+
+ const uint64_t maxMemoryUsageBytes = 1000;
+ const int numBuckets = 1;
+ auto bucketAuto =
+ DocumentSourceBucketAuto::create(getExpCtx(), numBuckets, maxMemoryUsageBytes);
+ bucketAuto->setSource(mock.get());
+ ASSERT_THROWS_CODE(bucketAuto->getNext(), UserException, 16819);
+}
+
+TEST_F(BucketAutoTests, ShouldRoundUpMaximumBoundariesWithGranularitySpecified) {
+ auto bucketAutoSpec =
+ fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity : 'R5'}}");
+
+ // Values are 0, 15, 24, 30, 50
+ auto docs = {Document{{"x", 24}},
+ Document{{"x", 15}},
+ Document{{"x", 30}},
+ Document{{"x", 50}},
+ Document{{"x", 0}}};
+ auto results = getResults(bucketAutoSpec, docs);
+
+ ASSERT_EQUALS(results.size(), 2UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 25}, count : 3}")));
+ ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 25, max : 63}, count : 2}")));
+}
+
+TEST_F(BucketAutoTests, ShouldRoundDownFirstMinimumBoundaryWithGranularitySpecified) {
+ auto bucketAutoSpec =
+ fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity : 'R5'}}");
+
+ // Values are 1, 15, 24, 30, 50
+ auto docs = {Document{{"x", 24}},
+ Document{{"x", 15}},
+ Document{{"x", 30}},
+ Document{{"x", 50}},
+ Document{{"x", 1}}};
+ auto results = getResults(bucketAutoSpec, docs);
+
+ ASSERT_EQUALS(results.size(), 2UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0.63, max : 25}, count : 3}")));
+ ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 25, max : 63}, count : 2}")));
+}
+
+TEST_F(BucketAutoTests, ShouldAbsorbAllValuesSmallerThanAdjustedBoundaryWithGranularitySpecified) {
+ auto bucketAutoSpec =
+ fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity : 'R5'}}");
+
+ auto docs = {Document{{"x", 0}},
+ Document{{"x", 5}},
+ Document{{"x", 10}},
+ Document{{"x", 15}},
+ Document{{"x", 30}}};
+ auto results = getResults(bucketAutoSpec, docs);
+
+ ASSERT_EQUALS(results.size(), 2UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 16}, count : 4}")));
+ ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 16, max : 40}, count : 1}")));
+}
+
+TEST_F(BucketAutoTests, ShouldBeAbleToAbsorbAllValuesIntoOneBucketWithGranularitySpecified) {
+ auto bucketAutoSpec =
+ fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity : 'R5'}}");
+
+ auto docs = {Document{{"x", 0}},
+ Document{{"x", 5}},
+ Document{{"x", 10}},
+ Document{{"x", 14}},
+ Document{{"x", 15}}};
+ auto results = getResults(bucketAutoSpec, docs);
+
+ ASSERT_EQUALS(results.size(), 1UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 16}, count : 5}")));
+}
+
+TEST_F(BucketAutoTests, ShouldNotRoundZeroInFirstBucketWithGranularitySpecified) {
+ auto bucketAutoSpec =
+ fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity : 'R5'}}");
+
+ auto docs = {Document{{"x", 0}}, Document{{"x", 0}}, Document{{"x", 1}}, Document{{"x", 1}}};
+ auto results = getResults(bucketAutoSpec, docs);
+
+ ASSERT_EQUALS(results.size(), 2UL);
+ ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 0.63}, count : 2}")));
+ ASSERT_DOCUMENT_EQ(results[1],
+ Document(fromjson("{_id : {min : 0.63, max : 1.6}, count : 2}")));
+}
+
+TEST_F(BucketAutoTests, ShouldFailOnNaNWhenGranularitySpecified) {
+ auto bucketAutoSpec =
+ fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity : 'R5'}}");
+
+ auto docs = {Document{{"x", 0}},
+ Document{{"x", std::nan("NaN")}},
+ Document{{"x", 1}},
+ Document{{"x", 1}}};
+ ASSERT_THROWS_CODE(getResults(bucketAutoSpec, docs), UserException, 40259);
+}
+
+TEST_F(BucketAutoTests, ShouldFailOnNonNumericValuesWhenGranularitySpecified) {
+ auto bucketAutoSpec =
+ fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity : 'R5'}}");
+
+ auto docs = {
+ Document{{"x", 0}}, Document{{"x", "test"}}, Document{{"x", 1}}, Document{{"x", 1}}};
+ ASSERT_THROWS_CODE(getResults(bucketAutoSpec, docs), UserException, 40258);
+}
+
+TEST_F(BucketAutoTests, ShouldFailOnNegativeNumbersWhenGranularitySpecified) {
+ auto bucketAutoSpec =
+ fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity : 'R5'}}");
+
+ auto docs = {Document{{"x", 0}}, Document{{"x", -1}}, Document{{"x", 1}}, Document{{"x", 2}}};
+ ASSERT_THROWS_CODE(getResults(bucketAutoSpec, docs), UserException, 40260);
+}
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_bucket_test.cpp b/src/mongo/db/pipeline/document_source_bucket_test.cpp
new file mode 100644
index 00000000000..4f6f3c7f9c3
--- /dev/null
+++ b/src/mongo/db/pipeline/document_source_bucket_test.cpp
@@ -0,0 +1,286 @@
+/**
+ * Copyright (C) 2016 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include <boost/intrusive_ptr.hpp>
+#include <vector>
+
+#include "mongo/bson/bsonobj.h"
+#include "mongo/bson/json.h"
+#include "mongo/db/pipeline/aggregation_context_fixture.h"
+#include "mongo/db/pipeline/document.h"
+#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/pipeline/document_value_test_util.h"
+#include "mongo/db/pipeline/value.h"
+#include "mongo/db/pipeline/value_comparator.h"
+#include "mongo/unittest/unittest.h"
+
+namespace mongo {
+namespace {
+
+using std::vector;
+using boost::intrusive_ptr;
+
+class BucketReturnsGroupAndSort : public AggregationContextFixture {
+public:
+ void testCreateFromBsonResult(BSONObj bucketSpec, Value expectedGroupExplain) {
+ vector<intrusive_ptr<DocumentSource>> result =
+ DocumentSourceBucket::createFromBson(bucketSpec.firstElement(), getExpCtx());
+
+ ASSERT_EQUALS(result.size(), 2UL);
+
+ const auto* groupStage = dynamic_cast<DocumentSourceGroup*>(result[0].get());
+ ASSERT(groupStage);
+
+ const auto* sortStage = dynamic_cast<DocumentSourceSort*>(result[1].get());
+ ASSERT(sortStage);
+
+ // Serialize the DocumentSourceGroup and DocumentSourceSort from $bucket so that we can
+ // check the explain output to make sure $group and $sort have the correct fields.
+ const bool explain = true;
+ vector<Value> explainedStages;
+ groupStage->serializeToArray(explainedStages, explain);
+ sortStage->serializeToArray(explainedStages, explain);
+ ASSERT_EQUALS(explainedStages.size(), 2UL);
+
+ auto groupExplain = explainedStages[0];
+ ASSERT_VALUE_EQ(groupExplain["$group"], expectedGroupExplain);
+
+ auto sortExplain = explainedStages[1];
+
+ auto expectedSortExplain = Value{Document{{"sortKey", Document{{"_id", 1}}}}};
+ ASSERT_VALUE_EQ(sortExplain["$sort"], expectedSortExplain);
+ }
+};
+
+TEST_F(BucketReturnsGroupAndSort, BucketUsesDefaultOutputWhenNoOutputSpecified) {
+ const auto spec =
+ fromjson("{$bucket : {groupBy :'$x', boundaries : [ 0, 2 ], default : 'other'}}");
+ auto expectedGroupExplain =
+ Value(fromjson("{_id : {$switch : {branches : [{case : {$and : [{$gte : ['$x', {$const : "
+ "0}]}, {$lt : ['$x', {$const : 2}]}]}, then : {$const : 0}}], default : "
+ "{$const : 'other'}}}, count : {$sum : {$const : 1}}}"));
+
+ testCreateFromBsonResult(spec, expectedGroupExplain);
+}
+
+TEST_F(BucketReturnsGroupAndSort, BucketSucceedsWhenOutputSpecified) {
+ const auto spec = fromjson(
+ "{$bucket : {groupBy : '$x', boundaries : [0, 2], output : { number : {$sum : 1}}}}");
+ auto expectedGroupExplain = Value(fromjson(
+ "{_id : {$switch : {branches : [{case : {$and : [{$gte : ['$x', {$const : 0}]}, {$lt : "
+ "['$x', {$const : 2}]}]}, then : {$const : 0}}]}}, number : {$sum : {$const : 1}}}"));
+
+ testCreateFromBsonResult(spec, expectedGroupExplain);
+}
+
+TEST_F(BucketReturnsGroupAndSort, BucketSucceedsWhenNoDefaultSpecified) {
+ const auto spec = fromjson("{$bucket : { groupBy : '$x', boundaries : [0, 2]}}");
+ auto expectedGroupExplain = Value(fromjson(
+ "{_id : {$switch : {branches : [{case : {$and : [{$gte : ['$x', {$const : 0}]}, {$lt : "
+ "['$x', {$const : 2}]}]}, then : {$const : 0}}]}}, count : {$sum : {$const : 1}}}"));
+
+ testCreateFromBsonResult(spec, expectedGroupExplain);
+}
+
+TEST_F(BucketReturnsGroupAndSort, BucketSucceedsWhenBoundariesAreSameCanonicalType) {
+ const auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [0, 1.5]}}");
+ auto expectedGroupExplain = Value(fromjson(
+ "{_id : {$switch : {branches : [{case : {$and : [{$gte : ['$x', {$const : 0}]}, {$lt : "
+ "['$x', {$const : 1.5}]}]}, then : {$const : 0}}]}},count : {$sum : {$const : 1}}}"));
+
+ testCreateFromBsonResult(spec, expectedGroupExplain);
+}
+
+TEST_F(BucketReturnsGroupAndSort, BucketSucceedsWhenBoundariesAreConstantExpressions) {
+ const auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [0, {$add : [4, 5]}]}}");
+ auto expectedGroupExplain = Value(fromjson(
+ "{_id : {$switch : {branches : [{case : {$and : [{$gte : ['$x', {$const : 0}]}, {$lt : "
+ "['$x', {$const : 9}]}]}, then : {$const : 0}}]}}, count : {$sum : {$const : 1}}}"));
+
+ testCreateFromBsonResult(spec, expectedGroupExplain);
+}
+
+TEST_F(BucketReturnsGroupAndSort, BucketSucceedsWhenDefaultIsConstantExpression) {
+ const auto spec =
+ fromjson("{$bucket : {groupBy : '$x', boundaries : [0, 1], default: {$add : [4, 5]}}}");
+ auto expectedGroupExplain =
+ Value(fromjson("{_id : {$switch : {branches : [{case : {$and : [{$gte : ['$x', {$const :"
+ "0}]}, {$lt : ['$x', {$const : 1}]}]}, then : {$const : 0}}], default : "
+ "{$const : 9}}}, count : {$sum : {$const : 1}}}"));
+
+ testCreateFromBsonResult(spec, expectedGroupExplain);
+}
+
+TEST_F(BucketReturnsGroupAndSort, BucketSucceedsWithMultipleBoundaryValues) {
+ auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [0, 1, 2]}}");
+ auto expectedGroupExplain =
+ Value(fromjson("{_id : {$switch : {branches : [{case : {$and : [{$gte : ['$x', {$const : "
+ "0}]}, {$lt : ['$x', {$const : 1}]}]}, then : {$const : 0}}, {case : {$and "
+ ": [{$gte : ['$x', {$const : 1}]}, {$lt : ['$x', {$const : 2}]}]}, then : "
+ "{$const : 1}}]}}, count : {$sum : {$const : 1}}}"));
+
+ testCreateFromBsonResult(spec, expectedGroupExplain);
+}
+
+class InvalidBucketSpec : public AggregationContextFixture {
+public:
+ vector<intrusive_ptr<DocumentSource>> createBucket(BSONObj bucketSpec) {
+ auto sources = DocumentSourceBucket::createFromBson(bucketSpec.firstElement(), getExpCtx());
+ for (auto&& source : sources) {
+ source->injectExpressionContext(getExpCtx());
+ }
+ return sources;
+ }
+};
+
+TEST_F(InvalidBucketSpec, BucketFailsWithNonObject) {
+ auto spec = fromjson("{$bucket : 1}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40201);
+
+ spec = fromjson("{$bucket : 'test'}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40201);
+}
+
+TEST_F(InvalidBucketSpec, BucketFailsWithUnknownField) {
+ const auto spec =
+ fromjson("{$bucket : {groupBy : '$x', boundaries : [0, 1, 2], unknown : 'field'}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40197);
+}
+
+TEST_F(InvalidBucketSpec, BucketFailsWithNoGroupBy) {
+ const auto spec = fromjson("{$bucket : {boundaries : [0, 1, 2]}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40198);
+}
+
+TEST_F(InvalidBucketSpec, BucketFailsWithNoBoundaries) {
+ const auto spec = fromjson("{$bucket : {groupBy : '$x'}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40198);
+}
+
+TEST_F(InvalidBucketSpec, BucketFailsWithNonExpressionGroupBy) {
+ auto spec = fromjson("{$bucket : {groupBy : {test : 'obj'}, boundaries : [0, 1, 2]}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40202);
+
+ spec = fromjson("{$bucket : {groupBy : 'test', boundaries : [0, 1, 2]}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40202);
+
+ spec = fromjson("{$bucket : {groupBy : 1, boundaries : [0, 1, 2]}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40202);
+}
+
+TEST_F(InvalidBucketSpec, BucketFailsWithNonArrayBoundaries) {
+ auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : 'test'}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40200);
+
+ spec = fromjson("{$bucket : {groupBy : '$x', boundaries : 1}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40200);
+
+ spec = fromjson("{$bucket : {groupBy : '$x', boundaries : {test : 'obj'}}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40200);
+}
+
+TEST_F(InvalidBucketSpec, BucketFailsWithNotEnoughBoundaries) {
+ auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [0]}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40192);
+
+ spec = fromjson("{$bucket : {groupBy : '$x', boundaries : []}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40192);
+}
+
+TEST_F(InvalidBucketSpec, BucketFailsWithNonConstantValueBoundaries) {
+ const auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : ['$x', '$y', '$z']}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40191);
+}
+
+TEST_F(InvalidBucketSpec, BucketFailsWithMixedTypesBoundaries) {
+ const auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [0, 'test']}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40193);
+}
+
+TEST_F(InvalidBucketSpec, BucketFailsWithNonUniqueBoundaries) {
+ auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [1, 1, 2, 3]}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40194);
+
+ spec = fromjson("{$bucket : {groupBy : '$x', boundaries : ['a', 'b', 'b', 'c']}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40194);
+}
+
+TEST_F(InvalidBucketSpec, BucketFailsWithNonSortedBoundaries) {
+ const auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [4, 5, 3, 6]}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40194);
+}
+
+TEST_F(InvalidBucketSpec, BucketFailsWithNonConstantExpressionDefault) {
+ const auto spec =
+ fromjson("{$bucket : {groupBy : '$x', boundaries : [0, 1, 2], default : '$x'}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40195);
+}
+
+TEST_F(InvalidBucketSpec, BucketFailsWhenDefaultIsInBoundariesRange) {
+ auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [1, 2, 4], default : 3}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40199);
+
+ spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [1, 2, 4], default : 1}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40199);
+}
+
+TEST_F(InvalidBucketSpec, GroupFailsForBucketWithInvalidOutputField) {
+ auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [1, 2, 3], output : 'test'}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40196);
+
+ spec = fromjson(
+ "{$bucket : {groupBy : '$x', boundaries : [1, 2, 3], output : {number : 'test'}}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40234);
+
+ spec = fromjson(
+ "{$bucket : {groupBy : '$x', boundaries : [1, 2, 3], output : {'test.test' : {$sum : "
+ "1}}}}");
+ ASSERT_THROWS_CODE(createBucket(spec), UserException, 40235);
+}
+
+TEST_F(InvalidBucketSpec, SwitchFailsForBucketWhenNoDefaultSpecified) {
+ const auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [1, 2, 3]}}");
+ vector<intrusive_ptr<DocumentSource>> bucketStages = createBucket(spec);
+
+ ASSERT_EQUALS(bucketStages.size(), 2UL);
+
+ auto* groupStage = dynamic_cast<DocumentSourceGroup*>(bucketStages[0].get());
+ ASSERT(groupStage);
+
+ const auto* sortStage = dynamic_cast<DocumentSourceSort*>(bucketStages[1].get());
+ ASSERT(sortStage);
+
+ auto doc = Document{{"x", 4}};
+ auto source = DocumentSourceMock::create(doc);
+ groupStage->setSource(source.get());
+ ASSERT_THROWS_CODE(groupStage->getNext(), UserException, 40066);
+}
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_count_test.cpp b/src/mongo/db/pipeline/document_source_count_test.cpp
new file mode 100644
index 00000000000..552e69150b1
--- /dev/null
+++ b/src/mongo/db/pipeline/document_source_count_test.cpp
@@ -0,0 +1,134 @@
+/**
+ * Copyright (C) 2016 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include <boost/intrusive_ptr.hpp>
+#include <vector>
+
+#include "mongo/base/string_data.h"
+#include "mongo/bson/bsonmisc.h"
+#include "mongo/bson/bsonobj.h"
+#include "mongo/bson/bsonobjbuilder.h"
+#include "mongo/db/pipeline/aggregation_context_fixture.h"
+#include "mongo/db/pipeline/document.h"
+#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/pipeline/document_value_test_util.h"
+#include "mongo/db/pipeline/value.h"
+
+namespace mongo {
+namespace {
+using std::vector;
+using boost::intrusive_ptr;
+
+class CountReturnsGroupAndProjectStages : public AggregationContextFixture {
+public:
+ void testCreateFromBsonResult(BSONObj countSpec) {
+ vector<intrusive_ptr<DocumentSource>> result =
+ DocumentSourceCount::createFromBson(countSpec.firstElement(), getExpCtx());
+
+ ASSERT_EQUALS(result.size(), 2UL);
+
+ const auto* groupStage = dynamic_cast<DocumentSourceGroup*>(result[0].get());
+ ASSERT(groupStage);
+
+ const auto* projectStage =
+ dynamic_cast<DocumentSourceSingleDocumentTransformation*>(result[1].get());
+ ASSERT(projectStage);
+
+ const bool explain = true;
+ vector<Value> explainedStages;
+ groupStage->serializeToArray(explainedStages, explain);
+ projectStage->serializeToArray(explainedStages, explain);
+ ASSERT_EQUALS(explainedStages.size(), 2UL);
+
+ StringData countName = countSpec.firstElement().valueStringData();
+ Value expectedGroupExplain =
+ Value{Document{{"_id", Document{{"$const", BSONNULL}}},
+ {countName, Document{{"$sum", Document{{"$const", 1}}}}}}};
+ auto groupExplain = explainedStages[0];
+ ASSERT_VALUE_EQ(groupExplain["$group"], expectedGroupExplain);
+
+ Value expectedProjectExplain = Value{Document{{"_id", false}, {countName, true}}};
+ auto projectExplain = explainedStages[1];
+ ASSERT_VALUE_EQ(projectExplain["$project"], expectedProjectExplain);
+ }
+};
+
+TEST_F(CountReturnsGroupAndProjectStages, ValidStringSpec) {
+ BSONObj spec = BSON("$count"
+ << "myCount");
+ testCreateFromBsonResult(spec);
+
+ spec = BSON("$count"
+ << "quantity");
+ testCreateFromBsonResult(spec);
+}
+
+class InvalidCountSpec : public AggregationContextFixture {
+public:
+ vector<intrusive_ptr<DocumentSource>> createCount(BSONObj countSpec) {
+ auto specElem = countSpec.firstElement();
+ return DocumentSourceCount::createFromBson(specElem, getExpCtx());
+ }
+};
+
+TEST_F(InvalidCountSpec, NonStringSpec) {
+ BSONObj spec = BSON("$count" << 1);
+ ASSERT_THROWS_CODE(createCount(spec), UserException, 40156);
+
+ spec = BSON("$count" << BSON("field1"
+ << "test"));
+ ASSERT_THROWS_CODE(createCount(spec), UserException, 40156);
+}
+
+TEST_F(InvalidCountSpec, EmptyStringSpec) {
+ BSONObj spec = BSON("$count"
+ << "");
+ ASSERT_THROWS_CODE(createCount(spec), UserException, 40157);
+}
+
+TEST_F(InvalidCountSpec, FieldPathSpec) {
+ BSONObj spec = BSON("$count"
+ << "$x");
+ ASSERT_THROWS_CODE(createCount(spec), UserException, 40158);
+}
+
+TEST_F(InvalidCountSpec, EmbeddedNullByteSpec) {
+ BSONObj spec = BSON("$count"
+ << "te\0st"_sd);
+ ASSERT_THROWS_CODE(createCount(spec), UserException, 40159);
+}
+
+TEST_F(InvalidCountSpec, PeriodInStringSpec) {
+ BSONObj spec = BSON("$count"
+ << "test.string");
+ ASSERT_THROWS_CODE(createCount(spec), UserException, 40160);
+}
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_geo_near_test.cpp b/src/mongo/db/pipeline/document_source_geo_near_test.cpp
new file mode 100644
index 00000000000..2e4ef356114
--- /dev/null
+++ b/src/mongo/db/pipeline/document_source_geo_near_test.cpp
@@ -0,0 +1,85 @@
+/**
+ * Copyright (C) 2016 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include "mongo/bson/bsonmisc.h"
+#include "mongo/bson/bsonobj.h"
+#include "mongo/bson/bsonobjbuilder.h"
+#include "mongo/bson/json.h"
+#include "mongo/db/pipeline/aggregation_context_fixture.h"
+#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/pipeline/pipeline.h"
+
+namespace mongo {
+namespace {
+
+// This provides access to getExpCtx(), but we'll use a different name for this test suite.
+using DocumentSourceGeoNearTest = AggregationContextFixture;
+
+TEST_F(DocumentSourceGeoNearTest, ShouldAbsorbSubsequentLimitStage) {
+ auto geoNear = DocumentSourceGeoNear::create(getExpCtx());
+
+ Pipeline::SourceContainer container;
+ container.push_back(geoNear);
+
+ ASSERT_EQUALS(geoNear->getLimit(), DocumentSourceGeoNear::kDefaultLimit);
+
+ container.push_back(DocumentSourceLimit::create(getExpCtx(), 200));
+ geoNear->optimizeAt(container.begin(), &container);
+
+ ASSERT_EQUALS(container.size(), 1U);
+ ASSERT_EQUALS(geoNear->getLimit(), DocumentSourceGeoNear::kDefaultLimit);
+
+ container.push_back(DocumentSourceLimit::create(getExpCtx(), 50));
+ geoNear->optimizeAt(container.begin(), &container);
+
+ ASSERT_EQUALS(container.size(), 1U);
+ ASSERT_EQUALS(geoNear->getLimit(), 50);
+
+ container.push_back(DocumentSourceLimit::create(getExpCtx(), 30));
+ geoNear->optimizeAt(container.begin(), &container);
+
+ ASSERT_EQUALS(container.size(), 1U);
+ ASSERT_EQUALS(geoNear->getLimit(), 30);
+}
+
+TEST_F(DocumentSourceGeoNearTest, ShouldReportOutputsAreSortedByDistanceField) {
+ BSONObj queryObj = fromjson(
+ "{geoNear: { near: {type: 'Point', coordinates: [0, 0]}, distanceField: 'dist', "
+ "maxDistance: 2}}");
+ auto geoNear = DocumentSourceGeoNear::createFromBson(queryObj.firstElement(), getExpCtx());
+
+ BSONObjSet outputSort = geoNear->getOutputSorts();
+
+ ASSERT_EQUALS(outputSort.count(BSON("dist" << -1)), 1U);
+ ASSERT_EQUALS(outputSort.size(), 1U);
+}
+
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_graph_lookup_test.cpp b/src/mongo/db/pipeline/document_source_graph_lookup_test.cpp
index e3a6f220653..788b02e2825 100644
--- a/src/mongo/db/pipeline/document_source_graph_lookup_test.cpp
+++ b/src/mongo/db/pipeline/document_source_graph_lookup_test.cpp
@@ -211,7 +211,7 @@ TEST_F(DocumentSourceGraphLookUpTest,
unittest::assertGet(Pipeline::create({inputMock, graphLookupStage, unwindStage}, expCtx));
pipeline->optimizePipeline();
- ASSERT_THROWS_CODE(pipeline->output()->getNext(), UserException, 40271);
+ ASSERT_THROWS_CODE(pipeline->getNext(), UserException, 40271);
}
bool arrayContains(const boost::intrusive_ptr<ExpressionContext>& expCtx,
@@ -252,7 +252,7 @@ TEST_F(DocumentSourceGraphLookUpTest,
std::make_shared<MockMongodImplementation>(std::move(fromContents)));
auto pipeline = unittest::assertGet(Pipeline::create({inputMock, graphLookupStage}, expCtx));
- auto next = pipeline->output()->getNext();
+ auto next = pipeline->getNext();
ASSERT(next);
ASSERT_EQ(2U, next->size());
@@ -269,14 +269,14 @@ TEST_F(DocumentSourceGraphLookUpTest,
ASSERT(arrayContains(expCtx, resultsArray, Value(to1)));
ASSERT_EQ(2U, resultsArray.size());
- next = pipeline->output()->getNext();
+ next = pipeline->getNext();
ASSERT(!next);
} else if (arrayContains(expCtx, resultsArray, Value(to0from2))) {
// If 'to0from2' was returned, then we should see 'to2' and nothing else.
ASSERT(arrayContains(expCtx, resultsArray, Value(to2)));
ASSERT_EQ(2U, resultsArray.size());
- next = pipeline->output()->getNext();
+ next = pipeline->getNext();
ASSERT(!next);
} else {
FAIL(str::stream() << "Expected either [ " << to0from1.toString() << " ] or [ "
diff --git a/src/mongo/db/pipeline/document_source_group_test.cpp b/src/mongo/db/pipeline/document_source_group_test.cpp
new file mode 100644
index 00000000000..e86be1e96a8
--- /dev/null
+++ b/src/mongo/db/pipeline/document_source_group_test.cpp
@@ -0,0 +1,1048 @@
+/**
+ * Copyright (C) 2016 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include <boost/intrusive_ptr.hpp>
+#include <map>
+#include <string>
+#include <vector>
+
+#include "mongo/bson/bsonelement.h"
+#include "mongo/bson/bsonmisc.h"
+#include "mongo/bson/bsonobj.h"
+#include "mongo/bson/json.h"
+#include "mongo/db/pipeline/aggregation_request.h"
+#include "mongo/db/pipeline/dependencies.h"
+#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/pipeline/document_value_test_util.h"
+#include "mongo/db/pipeline/expression_context.h"
+#include "mongo/db/pipeline/value_comparator.h"
+#include "mongo/db/query/query_test_service_context.h"
+#include "mongo/dbtests/dbtests.h"
+#include "mongo/stdx/memory.h"
+#include "mongo/unittest/temp_dir.h"
+#include "mongo/unittest/unittest.h"
+
+namespace mongo {
+
+namespace {
+using boost::intrusive_ptr;
+using std::map;
+using std::string;
+using std::vector;
+
+static const char* const ns = "unittests.document_source_group_tests";
+
+BSONObj toBson(const intrusive_ptr<DocumentSource>& source) {
+ vector<Value> arr;
+ source->serializeToArray(arr);
+ ASSERT_EQUALS(arr.size(), 1UL);
+ return arr[0].getDocument().toBson();
+}
+
+class Base {
+public:
+ Base()
+ : _queryServiceContext(stdx::make_unique<QueryTestServiceContext>()),
+ _opCtx(_queryServiceContext->makeOperationContext()),
+ _ctx(new ExpressionContext(_opCtx.get(), AggregationRequest(NamespaceString(ns), {}))),
+ _tempDir("DocumentSourceGroupTest") {}
+
+protected:
+ void createGroup(const BSONObj& spec, bool inShard = false, bool inRouter = false) {
+ BSONObj namedSpec = BSON("$group" << spec);
+ BSONElement specElement = namedSpec.firstElement();
+
+ intrusive_ptr<ExpressionContext> expressionContext =
+ new ExpressionContext(_opCtx.get(), AggregationRequest(NamespaceString(ns), {}));
+ expressionContext->inShard = inShard;
+ expressionContext->inRouter = inRouter;
+ // Won't spill to disk properly if it needs to.
+ expressionContext->tempDir = _tempDir.path();
+
+ _group = DocumentSourceGroup::createFromBson(specElement, expressionContext);
+ _group->injectExpressionContext(expressionContext);
+ assertRoundTrips(_group);
+ }
+ DocumentSourceGroup* group() {
+ return static_cast<DocumentSourceGroup*>(_group.get());
+ }
+ /** Assert that iterator state accessors consistently report the source is exhausted. */
+ void assertEOF(const intrusive_ptr<DocumentSource>& source) const {
+ // It should be safe to check doneness multiple times
+ ASSERT(source->getNext().isEOF());
+ ASSERT(source->getNext().isEOF());
+ ASSERT(source->getNext().isEOF());
+ }
+
+ intrusive_ptr<ExpressionContext> ctx() const {
+ return _ctx;
+ }
+
+private:
+ /** Check that the group's spec round trips. */
+ void assertRoundTrips(const intrusive_ptr<DocumentSource>& group) {
+ // We don't check against the spec that generated 'group' originally, because
+ // $const operators may be introduced in the first serialization.
+ BSONObj spec = toBson(group);
+ BSONElement specElement = spec.firstElement();
+ intrusive_ptr<DocumentSource> generated =
+ DocumentSourceGroup::createFromBson(specElement, ctx());
+ ASSERT_BSONOBJ_EQ(spec, toBson(generated));
+ }
+ std::unique_ptr<QueryTestServiceContext> _queryServiceContext;
+ ServiceContext::UniqueOperationContext _opCtx;
+ intrusive_ptr<ExpressionContext> _ctx;
+ intrusive_ptr<DocumentSource> _group;
+ TempDir _tempDir;
+};
+
+class ParseErrorBase : public Base {
+public:
+ virtual ~ParseErrorBase() {}
+ void run() {
+ ASSERT_THROWS(createGroup(spec()), UserException);
+ }
+
+protected:
+ virtual BSONObj spec() = 0;
+};
+
+class ExpressionBase : public Base {
+public:
+ virtual ~ExpressionBase() {}
+ void run() {
+ createGroup(spec());
+ auto source = DocumentSourceMock::create(Document(doc()));
+ group()->setSource(source.get());
+ // A group result is available.
+ auto next = group()->getNext();
+ ASSERT(next.isAdvanced());
+ // The constant _id value from the $group spec is passed through.
+ ASSERT_BSONOBJ_EQ(expected(), next.getDocument().toBson());
+ }
+
+protected:
+ virtual BSONObj doc() = 0;
+ virtual BSONObj spec() = 0;
+ virtual BSONObj expected() = 0;
+};
+
+class IdConstantBase : public ExpressionBase {
+ virtual BSONObj doc() {
+ return BSONObj();
+ }
+ virtual BSONObj expected() {
+ // Since spec() specifies a constant _id, its value will be passed through.
+ return spec();
+ }
+};
+
+/** $group spec is not an object. */
+class NonObject : public Base {
+public:
+ void run() {
+ BSONObj spec = BSON("$group"
+ << "foo");
+ BSONElement specElement = spec.firstElement();
+ ASSERT_THROWS(DocumentSourceGroup::createFromBson(specElement, ctx()), UserException);
+ }
+};
+
+/** $group spec is an empty object. */
+class EmptySpec : public ParseErrorBase {
+ BSONObj spec() {
+ return BSONObj();
+ }
+};
+
+/** $group _id is an empty object. */
+class IdEmptyObject : public IdConstantBase {
+ BSONObj spec() {
+ return BSON("_id" << BSONObj());
+ }
+};
+
+/** $group _id is computed from an object expression. */
+class IdObjectExpression : public ExpressionBase {
+ BSONObj doc() {
+ return BSON("a" << 6);
+ }
+ BSONObj spec() {
+ return BSON("_id" << BSON("z"
+ << "$a"));
+ }
+ BSONObj expected() {
+ return BSON("_id" << BSON("z" << 6));
+ }
+};
+
+/** $group _id is specified as an invalid object expression. */
+class IdInvalidObjectExpression : public ParseErrorBase {
+ BSONObj spec() {
+ return BSON("_id" << BSON("$add" << 1 << "$and" << 1));
+ }
+};
+
+/** $group with two _id specs. */
+class TwoIdSpecs : public ParseErrorBase {
+ BSONObj spec() {
+ return BSON("_id" << 1 << "_id" << 2);
+ }
+};
+
+/** $group _id is the empty string. */
+class IdEmptyString : public IdConstantBase {
+ BSONObj spec() {
+ return BSON("_id"
+ << "");
+ }
+};
+
+/** $group _id is a string constant. */
+class IdStringConstant : public IdConstantBase {
+ BSONObj spec() {
+ return BSON("_id"
+ << "abc");
+ }
+};
+
+/** $group _id is a field path expression. */
+class IdFieldPath : public ExpressionBase {
+ BSONObj doc() {
+ return BSON("a" << 5);
+ }
+ BSONObj spec() {
+ return BSON("_id"
+ << "$a");
+ }
+ BSONObj expected() {
+ return BSON("_id" << 5);
+ }
+};
+
+/** $group with _id set to an invalid field path. */
+class IdInvalidFieldPath : public ParseErrorBase {
+ BSONObj spec() {
+ return BSON("_id"
+ << "$a..");
+ }
+};
+
+/** $group _id is a numeric constant. */
+class IdNumericConstant : public IdConstantBase {
+ BSONObj spec() {
+ return BSON("_id" << 2);
+ }
+};
+
+/** $group _id is an array constant. */
+class IdArrayConstant : public IdConstantBase {
+ BSONObj spec() {
+ return BSON("_id" << BSON_ARRAY(1 << 2));
+ }
+};
+
+/** $group _id is a regular expression (not supported). */
+class IdRegularExpression : public IdConstantBase {
+ BSONObj spec() {
+ return fromjson("{_id:/a/}");
+ }
+};
+
+/** The name of an aggregate field is specified with a $ prefix. */
+class DollarAggregateFieldName : public ParseErrorBase {
+ BSONObj spec() {
+ return BSON("_id" << 1 << "$foo" << BSON("$sum" << 1));
+ }
+};
+
+/** An aggregate field spec that is not an object. */
+class NonObjectAggregateSpec : public ParseErrorBase {
+ BSONObj spec() {
+ return BSON("_id" << 1 << "a" << 1);
+ }
+};
+
+/** An aggregate field spec that is not an object. */
+class EmptyObjectAggregateSpec : public ParseErrorBase {
+ BSONObj spec() {
+ return BSON("_id" << 1 << "a" << BSONObj());
+ }
+};
+
+/** An aggregate field spec with an invalid accumulator operator. */
+class BadAccumulator : public ParseErrorBase {
+ BSONObj spec() {
+ return BSON("_id" << 1 << "a" << BSON("$bad" << 1));
+ }
+};
+
+/** An aggregate field spec with an array argument. */
+class SumArray : public ParseErrorBase {
+ BSONObj spec() {
+ return BSON("_id" << 1 << "a" << BSON("$sum" << BSONArray()));
+ }
+};
+
+/** Multiple accumulator operators for a field. */
+class MultipleAccumulatorsForAField : public ParseErrorBase {
+ BSONObj spec() {
+ return BSON("_id" << 1 << "a" << BSON("$sum" << 1 << "$push" << 1));
+ }
+};
+
+/** Aggregation using duplicate field names is allowed currently. */
+class DuplicateAggregateFieldNames : public ExpressionBase {
+ BSONObj doc() {
+ return BSONObj();
+ }
+ BSONObj spec() {
+ return BSON("_id" << 0 << "z" << BSON("$sum" << 1) << "z" << BSON("$push" << 1));
+ }
+ BSONObj expected() {
+ return BSON("_id" << 0 << "z" << 1 << "z" << BSON_ARRAY(1));
+ }
+};
+
+/** Aggregate the value of an object expression. */
+class AggregateObjectExpression : public ExpressionBase {
+ BSONObj doc() {
+ return BSON("a" << 6);
+ }
+ BSONObj spec() {
+ return BSON("_id" << 0 << "z" << BSON("$first" << BSON("x"
+ << "$a")));
+ }
+ BSONObj expected() {
+ return BSON("_id" << 0 << "z" << BSON("x" << 6));
+ }
+};
+
+/** Aggregate the value of an operator expression. */
+class AggregateOperatorExpression : public ExpressionBase {
+ BSONObj doc() {
+ return BSON("a" << 6);
+ }
+ BSONObj spec() {
+ return BSON("_id" << 0 << "z" << BSON("$first"
+ << "$a"));
+ }
+ BSONObj expected() {
+ return BSON("_id" << 0 << "z" << 6);
+ }
+};
+
+struct ValueCmp {
+ bool operator()(const Value& a, const Value& b) const {
+ return ValueComparator().evaluate(a < b);
+ }
+};
+typedef map<Value, Document, ValueCmp> IdMap;
+
+class CheckResultsBase : public Base {
+public:
+ virtual ~CheckResultsBase() {}
+ void run() {
+ runSharded(false);
+ runSharded(true);
+ }
+ void runSharded(bool sharded) {
+ createGroup(groupSpec());
+ auto source = DocumentSourceMock::create(inputData());
+ group()->setSource(source.get());
+
+ intrusive_ptr<DocumentSource> sink = group();
+ if (sharded) {
+ sink = createMerger();
+ // Serialize and re-parse the shard stage.
+ createGroup(toBson(group())["$group"].Obj(), true);
+ group()->setSource(source.get());
+ sink->setSource(group());
+ }
+
+ checkResultSet(sink);
+ }
+
+protected:
+ virtual std::deque<Document> inputData() {
+ return {};
+ }
+ virtual BSONObj groupSpec() {
+ return BSON("_id" << 0);
+ }
+ /** Expected results. Must be sorted by _id to ensure consistent ordering. */
+ virtual BSONObj expectedResultSet() {
+ BSONObj wrappedResult =
+ // fromjson cannot parse an array, so place the array within an object.
+ fromjson(string("{'':") + expectedResultSetString() + "}");
+ return wrappedResult[""].embeddedObject().getOwned();
+ }
+ /** Expected results. Must be sorted by _id to ensure consistent ordering. */
+ virtual string expectedResultSetString() {
+ return "[]";
+ }
+ intrusive_ptr<DocumentSource> createMerger() {
+ // Set up a group merger to simulate merging results in the router. In this
+ // case only one shard is in use.
+ SplittableDocumentSource* splittable = dynamic_cast<SplittableDocumentSource*>(group());
+ ASSERT(splittable);
+ intrusive_ptr<DocumentSource> routerSource = splittable->getMergeSource();
+ ASSERT_NOT_EQUALS(group(), routerSource.get());
+ return routerSource;
+ }
+ void checkResultSet(const intrusive_ptr<DocumentSource>& sink) {
+ // Load the results from the DocumentSourceGroup and sort them by _id.
+ IdMap resultSet;
+ for (auto output = sink->getNext(); output.isAdvanced(); output = sink->getNext()) {
+ // Save the current result.
+ Value id = output.getDocument().getField("_id");
+ resultSet[id] = output.releaseDocument();
+ }
+ // Verify the DocumentSourceGroup is exhausted.
+ assertEOF(sink);
+
+ // Convert results to BSON once they all have been retrieved (to detect any errors
+ // resulting from incorrectly shared sub objects).
+ BSONArrayBuilder bsonResultSet;
+ for (IdMap::const_iterator i = resultSet.begin(); i != resultSet.end(); ++i) {
+ bsonResultSet << i->second;
+ }
+ // Check the result set.
+ ASSERT_BSONOBJ_EQ(expectedResultSet(), bsonResultSet.arr());
+ }
+};
+
+/** An empty collection generates no results. */
+class EmptyCollection : public CheckResultsBase {};
+
+/** A $group performed on a single document. */
+class SingleDocument : public CheckResultsBase {
+ std::deque<Document> inputData() {
+ return {DOC("a" << 1)};
+ }
+ virtual BSONObj groupSpec() {
+ return BSON("_id" << 0 << "a" << BSON("$sum"
+ << "$a"));
+ }
+ virtual string expectedResultSetString() {
+ return "[{_id:0,a:1}]";
+ }
+};
+
+/** A $group performed on two values for a single key. */
+class TwoValuesSingleKey : public CheckResultsBase {
+ std::deque<Document> inputData() {
+ return {DOC("a" << 1), DOC("a" << 2)};
+ }
+ virtual BSONObj groupSpec() {
+ return BSON("_id" << 0 << "a" << BSON("$push"
+ << "$a"));
+ }
+ virtual string expectedResultSetString() {
+ return "[{_id:0,a:[1,2]}]";
+ }
+};
+
+/** A $group performed on two values with one key each. */
+class TwoValuesTwoKeys : public CheckResultsBase {
+ std::deque<Document> inputData() {
+ return {DOC("_id" << 0 << "a" << 1), DOC("_id" << 1 << "a" << 2)};
+ }
+ virtual BSONObj groupSpec() {
+ return BSON("_id"
+ << "$_id"
+ << "a"
+ << BSON("$push"
+ << "$a"));
+ }
+ virtual string expectedResultSetString() {
+ return "[{_id:0,a:[1]},{_id:1,a:[2]}]";
+ }
+};
+
+/** A $group performed on two values with two keys each. */
+class FourValuesTwoKeys : public CheckResultsBase {
+ std::deque<Document> inputData() {
+ return {DOC("id" << 0 << "a" << 1),
+ DOC("id" << 1 << "a" << 2),
+ DOC("id" << 0 << "a" << 3),
+ DOC("id" << 1 << "a" << 4)};
+ }
+ virtual BSONObj groupSpec() {
+ return BSON("_id"
+ << "$id"
+ << "a"
+ << BSON("$push"
+ << "$a"));
+ }
+ virtual string expectedResultSetString() {
+ return "[{_id:0,a:[1,3]},{_id:1,a:[2,4]}]";
+ }
+};
+
+/** A $group performed on two values with two keys each and two accumulator operations. */
+class FourValuesTwoKeysTwoAccumulators : public CheckResultsBase {
+ std::deque<Document> inputData() {
+ return {DOC("id" << 0 << "a" << 1),
+ DOC("id" << 1 << "a" << 2),
+ DOC("id" << 0 << "a" << 3),
+ DOC("id" << 1 << "a" << 4)};
+ }
+ virtual BSONObj groupSpec() {
+ return BSON("_id"
+ << "$id"
+ << "list"
+ << BSON("$push"
+ << "$a")
+ << "sum"
+ << BSON("$sum" << BSON("$divide" << BSON_ARRAY("$a" << 2))));
+ }
+ virtual string expectedResultSetString() {
+ return "[{_id:0,list:[1,3],sum:2},{_id:1,list:[2,4],sum:3}]";
+ }
+};
+
+/** Null and undefined _id values are grouped together. */
+class GroupNullUndefinedIds : public CheckResultsBase {
+ std::deque<Document> inputData() {
+ return {DOC("a" << BSONNULL << "b" << 100), DOC("b" << 10)};
+ }
+ virtual BSONObj groupSpec() {
+ return BSON("_id"
+ << "$a"
+ << "sum"
+ << BSON("$sum"
+ << "$b"));
+ }
+ virtual string expectedResultSetString() {
+ return "[{_id:null,sum:110}]";
+ }
+};
+
+/** A complex _id expression. */
+class ComplexId : public CheckResultsBase {
+ std::deque<Document> inputData() {
+ return {DOC("a"
+ << "de"
+ << "b"
+ << "ad"
+ << "c"
+ << "beef"
+ << "d"
+ << ""),
+ DOC("a"
+ << "d"
+ << "b"
+ << "eadbe"
+ << "c"
+ << ""
+ << "d"
+ << "ef")};
+ }
+ virtual BSONObj groupSpec() {
+ return BSON("_id" << BSON("$concat" << BSON_ARRAY("$a"
+ << "$b"
+ << "$c"
+ << "$d")));
+ }
+ virtual string expectedResultSetString() {
+ return "[{_id:'deadbeef'}]";
+ }
+};
+
+/** An undefined accumulator value is dropped. */
+class UndefinedAccumulatorValue : public CheckResultsBase {
+ std::deque<Document> inputData() {
+ return {Document()};
+ }
+ virtual BSONObj groupSpec() {
+ return BSON("_id" << 0 << "first" << BSON("$first"
+ << "$missing"));
+ }
+ virtual string expectedResultSetString() {
+ return "[{_id:0, first:null}]";
+ }
+};
+
+/** Simulate merging sharded results in the router. */
+class RouterMerger : public CheckResultsBase {
+public:
+ void run() {
+ auto source = DocumentSourceMock::create({"{_id:0,list:[1,2]}",
+ "{_id:1,list:[3,4]}",
+ "{_id:0,list:[10,20]}",
+ "{_id:1,list:[30,40]}]}"});
+
+ // Create a group source.
+ createGroup(BSON("_id"
+ << "$x"
+ << "list"
+ << BSON("$push"
+ << "$y")));
+ // Create a merger version of the source.
+ intrusive_ptr<DocumentSource> group = createMerger();
+ // Attach the merger to the synthetic shard results.
+ group->setSource(source.get());
+ // Check the merger's output.
+ checkResultSet(group);
+ }
+
+private:
+ string expectedResultSetString() {
+ return "[{_id:0,list:[1,2,10,20]},{_id:1,list:[3,4,30,40]}]";
+ }
+};
+
+/** Dependant field paths. */
+class Dependencies : public Base {
+public:
+ void run() {
+ createGroup(fromjson("{_id:'$x',a:{$sum:'$y.z'},b:{$avg:{$add:['$u','$v']}}}"));
+ DepsTracker dependencies;
+ ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_ALL, group()->getDependencies(&dependencies));
+ ASSERT_EQUALS(4U, dependencies.fields.size());
+ // Dependency from _id expression.
+ ASSERT_EQUALS(1U, dependencies.fields.count("x"));
+ // Dependencies from accumulator expressions.
+ ASSERT_EQUALS(1U, dependencies.fields.count("y.z"));
+ ASSERT_EQUALS(1U, dependencies.fields.count("u"));
+ ASSERT_EQUALS(1U, dependencies.fields.count("v"));
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(false, dependencies.getNeedTextScore());
+ }
+};
+
+class StreamingOptimization : public Base {
+public:
+ void run() {
+ auto source = DocumentSourceMock::create({"{a: 0}", "{a: 0}", "{a: 1}", "{a: 1}"});
+ source->sorts = {BSON("a" << 1)};
+
+ createGroup(BSON("_id"
+ << "$a"));
+ group()->setSource(source.get());
+
+ auto res = group()->getNext();
+ ASSERT_TRUE(res.isAdvanced());
+ ASSERT_VALUE_EQ(res.getDocument().getField("_id"), Value(0));
+
+ ASSERT_TRUE(group()->isStreaming());
+
+ res = source->getNext();
+ ASSERT_TRUE(res.isAdvanced());
+ ASSERT_VALUE_EQ(res.getDocument().getField("a"), Value(1));
+
+ assertEOF(source);
+
+ res = group()->getNext();
+ ASSERT_TRUE(res.isAdvanced());
+ ASSERT_VALUE_EQ(res.getDocument().getField("_id"), Value(1));
+
+ assertEOF(group());
+
+ BSONObjSet outputSort = group()->getOutputSorts();
+ ASSERT_EQUALS(outputSort.size(), 1U);
+
+ ASSERT_EQUALS(outputSort.count(BSON("_id" << 1)), 1U);
+ }
+};
+
+class StreamingWithMultipleIdFields : public Base {
+public:
+ void run() {
+ auto source = DocumentSourceMock::create(
+ {"{a: 1, b: 2}", "{a: 1, b: 2}", "{a: 1, b: 1}", "{a: 2, b: 1}", "{a: 2, b: 1}"});
+ source->sorts = {BSON("a" << 1 << "b" << -1)};
+
+ createGroup(fromjson("{_id: {x: '$a', y: '$b'}}"));
+ group()->setSource(source.get());
+
+ auto res = group()->getNext();
+ ASSERT_TRUE(res.isAdvanced());
+ ASSERT_VALUE_EQ(res.getDocument().getField("_id")["x"], Value(1));
+ ASSERT_VALUE_EQ(res.getDocument().getField("_id")["y"], Value(2));
+
+ ASSERT_TRUE(group()->isStreaming());
+
+ res = group()->getNext();
+ ASSERT_TRUE(res.isAdvanced());
+ ASSERT_VALUE_EQ(res.getDocument().getField("_id")["x"], Value(1));
+ ASSERT_VALUE_EQ(res.getDocument().getField("_id")["y"], Value(1));
+
+ res = source->getNext();
+ ASSERT_TRUE(res.isAdvanced());
+ ASSERT_VALUE_EQ(res.getDocument().getField("a"), Value(2));
+ ASSERT_VALUE_EQ(res.getDocument().getField("b"), Value(1));
+
+ assertEOF(source);
+
+ BSONObjSet outputSort = group()->getOutputSorts();
+ ASSERT_EQUALS(outputSort.size(), 2U);
+
+ BSONObj correctSort = BSON("_id.x" << 1 << "_id.y" << -1);
+ ASSERT_EQUALS(outputSort.count(correctSort), 1U);
+
+ BSONObj prefixSort = BSON("_id.x" << 1);
+ ASSERT_EQUALS(outputSort.count(prefixSort), 1U);
+ }
+};
+
+class StreamingWithMultipleLevels : public Base {
+public:
+ void run() {
+ auto source = DocumentSourceMock::create(
+ {"{a: {b: {c: 3}}, d: 1}", "{a: {b: {c: 1}}, d: 2}", "{a: {b: {c: 1}}, d: 0}"});
+ source->sorts = {BSON("a.b.c" << -1 << "a.b.d" << 1 << "d" << 1)};
+
+ createGroup(fromjson("{_id: {x: {y: {z: '$a.b.c', q: '$a.b.d'}}, v: '$d'}}"));
+ group()->setSource(source.get());
+
+ auto res = group()->getNext();
+ ASSERT_TRUE(res.isAdvanced());
+ ASSERT_VALUE_EQ(res.getDocument().getField("_id")["x"]["y"]["z"], Value(3));
+
+ ASSERT_TRUE(group()->isStreaming());
+
+ res = source->getNext();
+ ASSERT_TRUE(res.isAdvanced());
+ ASSERT_VALUE_EQ(res.getDocument().getField("a")["b"]["c"], Value(1));
+
+ assertEOF(source);
+
+ BSONObjSet outputSort = group()->getOutputSorts();
+ ASSERT_EQUALS(outputSort.size(), 3U);
+
+ BSONObj correctSort = fromjson("{'_id.x.y.z': -1, '_id.x.y.q': 1, '_id.v': 1}");
+ ASSERT_EQUALS(outputSort.count(correctSort), 1U);
+
+ BSONObj prefixSortTwo = fromjson("{'_id.x.y.z': -1, '_id.x.y.q': 1}");
+ ASSERT_EQUALS(outputSort.count(prefixSortTwo), 1U);
+
+ BSONObj prefixSortOne = fromjson("{'_id.x.y.z': -1}");
+ ASSERT_EQUALS(outputSort.count(prefixSortOne), 1U);
+ }
+};
+
+class StreamingWithFieldRepeated : public Base {
+public:
+ void run() {
+ auto source = DocumentSourceMock::create(
+ {"{a: 1, b: 1}", "{a: 1, b: 1}", "{a: 2, b: 1}", "{a: 2, b: 3}"});
+ source->sorts = {BSON("a" << 1 << "b" << 1)};
+
+ createGroup(fromjson("{_id: {sub: {x: '$a', y: '$b', z: '$a'}}}"));
+ group()->setSource(source.get());
+
+ auto res = group()->getNext();
+ ASSERT_TRUE(res.isAdvanced());
+ ASSERT_VALUE_EQ(res.getDocument().getField("_id")["sub"]["x"], Value(1));
+ ASSERT_VALUE_EQ(res.getDocument().getField("_id")["sub"]["y"], Value(1));
+ ASSERT_VALUE_EQ(res.getDocument().getField("_id")["sub"]["z"], Value(1));
+
+ ASSERT_TRUE(group()->isStreaming());
+
+ res = source->getNext();
+ ASSERT_TRUE(res.isAdvanced());
+ ASSERT_VALUE_EQ(res.getDocument().getField("a"), Value(2));
+ ASSERT_VALUE_EQ(res.getDocument().getField("b"), Value(3));
+
+ BSONObjSet outputSort = group()->getOutputSorts();
+
+ ASSERT_EQUALS(outputSort.size(), 2U);
+
+ BSONObj correctSort = fromjson("{'_id.sub.z': 1}");
+ ASSERT_EQUALS(outputSort.count(correctSort), 1U);
+
+ BSONObj prefixSortTwo = fromjson("{'_id.sub.z': 1, '_id.sub.y': 1}");
+ ASSERT_EQUALS(outputSort.count(prefixSortTwo), 1U);
+ }
+};
+
+class StreamingWithConstantAndFieldPath : public Base {
+public:
+ void run() {
+ auto source = DocumentSourceMock::create(
+ {"{a: 5, b: 1}", "{a: 5, b: 2}", "{a: 3, b: 1}", "{a: 1, b: 1}", "{a: 1, b: 1}"});
+ source->sorts = {BSON("a" << -1 << "b" << 1)};
+
+ createGroup(fromjson("{_id: {sub: {x: '$a', y: '$b', z: {$literal: 'c'}}}}"));
+ group()->setSource(source.get());
+
+ auto res = group()->getNext();
+ ASSERT_TRUE(res.isAdvanced());
+ ASSERT_VALUE_EQ(res.getDocument().getField("_id")["sub"]["x"], Value(5));
+ ASSERT_VALUE_EQ(res.getDocument().getField("_id")["sub"]["y"], Value(1));
+ ASSERT_VALUE_EQ(res.getDocument().getField("_id")["sub"]["z"], Value("c"));
+
+ ASSERT_TRUE(group()->isStreaming());
+
+ res = source->getNext();
+ ASSERT_TRUE(res.isAdvanced());
+ ASSERT_VALUE_EQ(res.getDocument().getField("a"), Value(3));
+ ASSERT_VALUE_EQ(res.getDocument().getField("b"), Value(1));
+
+ BSONObjSet outputSort = group()->getOutputSorts();
+ ASSERT_EQUALS(outputSort.size(), 2U);
+
+ BSONObj correctSort = fromjson("{'_id.sub.x': -1}");
+ ASSERT_EQUALS(outputSort.count(correctSort), 1U);
+
+ BSONObj prefixSortTwo = fromjson("{'_id.sub.x': -1, '_id.sub.y': 1}");
+ ASSERT_EQUALS(outputSort.count(prefixSortTwo), 1U);
+ }
+};
+
+class StreamingWithRootSubfield : public Base {
+public:
+ void run() {
+ auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}", "{a: 3}"});
+ source->sorts = {BSON("a" << 1)};
+
+ createGroup(fromjson("{_id: '$$ROOT.a'}"));
+ group()->setSource(source.get());
+
+ group()->getNext();
+ ASSERT_TRUE(group()->isStreaming());
+
+ BSONObjSet outputSort = group()->getOutputSorts();
+ ASSERT_EQUALS(outputSort.size(), 1U);
+
+ BSONObj correctSort = fromjson("{_id: 1}");
+ ASSERT_EQUALS(outputSort.count(correctSort), 1U);
+ }
+};
+
+class StreamingWithConstant : public Base {
+public:
+ void run() {
+ auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}", "{a: 3}"});
+ source->sorts = {BSON("$a" << 1)};
+
+ createGroup(fromjson("{_id: 1}"));
+ group()->setSource(source.get());
+
+ group()->getNext();
+ ASSERT_TRUE(group()->isStreaming());
+
+ BSONObjSet outputSort = group()->getOutputSorts();
+ ASSERT_EQUALS(outputSort.size(), 0U);
+ }
+};
+
+class StreamingWithEmptyId : public Base {
+public:
+ void run() {
+ auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}", "{a: 3}"});
+ source->sorts = {BSON("$a" << 1)};
+
+ createGroup(fromjson("{_id: {}}"));
+ group()->setSource(source.get());
+
+ group()->getNext();
+ ASSERT_TRUE(group()->isStreaming());
+
+ BSONObjSet outputSort = group()->getOutputSorts();
+ ASSERT_EQUALS(outputSort.size(), 0U);
+ }
+};
+
+class NoOptimizationIfMissingDoubleSort : public Base {
+public:
+ void run() {
+ auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}", "{a: 3}"});
+ source->sorts = {BSON("a" << 1)};
+
+ // We pretend to be in the router so that we don't spill to disk, because this produces
+ // inconsistent output on debug vs. non-debug builds.
+ const bool inRouter = true;
+ const bool inShard = false;
+
+ createGroup(BSON("_id" << BSON("x"
+ << "$a"
+ << "y"
+ << "$b")),
+ inShard,
+ inRouter);
+ group()->setSource(source.get());
+
+ group()->getNext();
+ ASSERT_FALSE(group()->isStreaming());
+
+ BSONObjSet outputSort = group()->getOutputSorts();
+ ASSERT_EQUALS(outputSort.size(), 0U);
+ }
+};
+
+class NoOptimizationWithRawRoot : public Base {
+public:
+ void run() {
+ auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}", "{a: 3}"});
+ source->sorts = {BSON("a" << 1)};
+
+ // We pretend to be in the router so that we don't spill to disk, because this produces
+ // inconsistent output on debug vs. non-debug builds.
+ const bool inRouter = true;
+ const bool inShard = false;
+
+ createGroup(BSON("_id" << BSON("a"
+ << "$$ROOT"
+ << "b"
+ << "$a")),
+ inShard,
+ inRouter);
+ group()->setSource(source.get());
+
+ group()->getNext();
+ ASSERT_FALSE(group()->isStreaming());
+
+ BSONObjSet outputSort = group()->getOutputSorts();
+ ASSERT_EQUALS(outputSort.size(), 0U);
+ }
+};
+
+class NoOptimizationIfUsingExpressions : public Base {
+public:
+ void run() {
+ auto source = DocumentSourceMock::create({"{a: 1, b: 1}", "{a: 2, b: 2}", "{a: 3, b: 1}"});
+ source->sorts = {BSON("a" << 1 << "b" << 1)};
+
+ // We pretend to be in the router so that we don't spill to disk, because this produces
+ // inconsistent output on debug vs. non-debug builds.
+ const bool inRouter = true;
+ const bool inShard = false;
+
+ createGroup(fromjson("{_id: {$sum: ['$a', '$b']}}"), inShard, inRouter);
+ group()->setSource(source.get());
+
+ group()->getNext();
+ ASSERT_FALSE(group()->isStreaming());
+
+ BSONObjSet outputSort = group()->getOutputSorts();
+ ASSERT_EQUALS(outputSort.size(), 0U);
+ }
+};
+
+/**
+ * A string constant (not a field path) as an _id expression and passed to an accumulator.
+ * SERVER-6766
+ */
+class StringConstantIdAndAccumulatorExpressions : public CheckResultsBase {
+ std::deque<Document> inputData() {
+ return {Document()};
+ }
+ BSONObj groupSpec() {
+ return fromjson("{_id:{$const:'$_id...'},a:{$push:{$const:'$a...'}}}");
+ }
+ string expectedResultSetString() {
+ return "[{_id:'$_id...',a:['$a...']}]";
+ }
+};
+
+/** An array constant passed to an accumulator. */
+class ArrayConstantAccumulatorExpression : public CheckResultsBase {
+public:
+ void run() {
+ // A parse exception is thrown when a raw array is provided to an accumulator.
+ ASSERT_THROWS(createGroup(fromjson("{_id:1,a:{$push:[4,5,6]}}")), UserException);
+ // Run standard base tests.
+ CheckResultsBase::run();
+ }
+ std::deque<Document> inputData() {
+ return {Document()};
+ }
+ BSONObj groupSpec() {
+ // An array can be specified using $const.
+ return fromjson("{_id:[1,2,3],a:{$push:{$const:[4,5,6]}}}");
+ }
+ string expectedResultSetString() {
+ return "[{_id:[1,2,3],a:[[4,5,6]]}]";
+ }
+};
+
+class All : public Suite {
+public:
+ All() : Suite("DocumentSourceGroupTests") {}
+ void setupTests() {
+ add<NonObject>();
+ add<EmptySpec>();
+ add<IdEmptyObject>();
+ add<IdObjectExpression>();
+ add<IdInvalidObjectExpression>();
+ add<TwoIdSpecs>();
+ add<IdEmptyString>();
+ add<IdStringConstant>();
+ add<IdFieldPath>();
+ add<IdInvalidFieldPath>();
+ add<IdNumericConstant>();
+ add<IdArrayConstant>();
+ add<IdRegularExpression>();
+ add<DollarAggregateFieldName>();
+ add<NonObjectAggregateSpec>();
+ add<EmptyObjectAggregateSpec>();
+ add<BadAccumulator>();
+ add<SumArray>();
+ add<MultipleAccumulatorsForAField>();
+ add<DuplicateAggregateFieldNames>();
+ add<AggregateObjectExpression>();
+ add<AggregateOperatorExpression>();
+ add<EmptyCollection>();
+ add<SingleDocument>();
+ add<TwoValuesSingleKey>();
+ add<TwoValuesTwoKeys>();
+ add<FourValuesTwoKeys>();
+ add<FourValuesTwoKeysTwoAccumulators>();
+ add<GroupNullUndefinedIds>();
+ add<ComplexId>();
+ add<UndefinedAccumulatorValue>();
+ add<RouterMerger>();
+ add<Dependencies>();
+ add<StringConstantIdAndAccumulatorExpressions>();
+ add<ArrayConstantAccumulatorExpression>();
+#if 0
+ // Disabled tests until SERVER-23318 is implemented.
+ add<StreamingOptimization>();
+ add<StreamingWithMultipleIdFields>();
+ add<NoOptimizationIfMissingDoubleSort>();
+ add<NoOptimizationWithRawRoot>();
+ add<NoOptimizationIfUsingExpressions>();
+ add<StreamingWithMultipleLevels>();
+ add<StreamingWithConstant>();
+ add<StreamingWithEmptyId>();
+ add<StreamingWithRootSubfield>();
+ add<StreamingWithConstantAndFieldPath>();
+ add<StreamingWithFieldRepeated>();
+#endif
+ }
+};
+
+SuiteInstance<All> myall;
+
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_limit_test.cpp b/src/mongo/db/pipeline/document_source_limit_test.cpp
new file mode 100644
index 00000000000..7de5bf85a3a
--- /dev/null
+++ b/src/mongo/db/pipeline/document_source_limit_test.cpp
@@ -0,0 +1,103 @@
+/**
+ * Copyright (C) 2016 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include "mongo/bson/bsonmisc.h"
+#include "mongo/bson/bsonobj.h"
+#include "mongo/db/pipeline/aggregation_context_fixture.h"
+#include "mongo/db/pipeline/dependencies.h"
+#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/pipeline/document_value_test_util.h"
+#include "mongo/db/pipeline/pipeline.h"
+#include "mongo/unittest/unittest.h"
+
+namespace mongo {
+namespace {
+
+// This provides access to getExpCtx(), but we'll use a different name for this test suite.
+using DocumentSourceLimitTest = AggregationContextFixture;
+
+TEST_F(DocumentSourceLimitTest, ShouldDisposeSourceWhenLimitIsReached) {
+ auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}"});
+ auto limit = DocumentSourceLimit::create(getExpCtx(), 1);
+ limit->setSource(source.get());
+ // The limit's result is as expected.
+ auto next = limit->getNext();
+ ASSERT(next.isAdvanced());
+ ASSERT_VALUE_EQ(Value(1), next.getDocument().getField("a"));
+ // The limit is exhausted.
+ ASSERT(limit->getNext().isEOF());
+ // The source has been disposed
+ ASSERT_TRUE(source->isDisposed);
+}
+
+TEST_F(DocumentSourceLimitTest, TwoLimitStagesShouldCombineIntoOne) {
+ Pipeline::SourceContainer container;
+ auto firstLimit = DocumentSourceLimit::create(getExpCtx(), 10);
+ auto secondLimit = DocumentSourceLimit::create(getExpCtx(), 5);
+
+ container.push_back(firstLimit);
+ container.push_back(secondLimit);
+
+ firstLimit->optimizeAt(container.begin(), &container);
+ ASSERT_EQUALS(5, firstLimit->getLimit());
+ ASSERT_EQUALS(1U, container.size());
+}
+
+TEST_F(DocumentSourceLimitTest, DisposeShouldCascadeAllTheWayToSource) {
+ auto source = DocumentSourceMock::create({"{a: 1}", "{a: 1}"});
+
+ // Create a DocumentSourceMatch.
+ BSONObj spec = BSON("$match" << BSON("a" << 1));
+ BSONElement specElement = spec.firstElement();
+ auto match = DocumentSourceMatch::createFromBson(specElement, getExpCtx());
+ match->setSource(source.get());
+
+ auto limit = DocumentSourceLimit::create(getExpCtx(), 1);
+ limit->setSource(match.get());
+ // The limit is not exhauted.
+ auto next = limit->getNext();
+ ASSERT(next.isAdvanced());
+ ASSERT_VALUE_EQ(Value(1), next.getDocument().getField("a"));
+ // The limit is exhausted.
+ ASSERT(limit->getNext().isEOF());
+ ASSERT_TRUE(source->isDisposed);
+}
+
+TEST_F(DocumentSourceLimitTest, ShouldNotIntroduceAnyDependencies) {
+ auto limit = DocumentSourceLimit::create(getExpCtx(), 1);
+ DepsTracker dependencies;
+ ASSERT_EQUALS(DocumentSource::SEE_NEXT, limit->getDependencies(&dependencies));
+ ASSERT_EQUALS(0U, dependencies.fields.size());
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(false, dependencies.getNeedTextScore());
+}
+
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_lookup_test.cpp b/src/mongo/db/pipeline/document_source_lookup_test.cpp
new file mode 100644
index 00000000000..2ab28f3e392
--- /dev/null
+++ b/src/mongo/db/pipeline/document_source_lookup_test.cpp
@@ -0,0 +1,129 @@
+/**
+ * Copyright (C) 2016 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include <boost/intrusive_ptr.hpp>
+#include <vector>
+
+#include "mongo/bson/bsonmisc.h"
+#include "mongo/bson/bsonobj.h"
+#include "mongo/bson/bsonobjbuilder.h"
+#include "mongo/db/pipeline/aggregation_context_fixture.h"
+#include "mongo/db/pipeline/document.h"
+#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/pipeline/field_path.h"
+#include "mongo/db/pipeline/value.h"
+
+namespace mongo {
+namespace {
+using boost::intrusive_ptr;
+using std::vector;
+
+// This provides access to getExpCtx(), but we'll use a different name for this test suite.
+using DocumentSourceLookUpTest = AggregationContextFixture;
+
+TEST_F(DocumentSourceLookUpTest, ShouldTruncateOutputSortOnAsField) {
+ intrusive_ptr<DocumentSourceMock> source = DocumentSourceMock::create();
+ source->sorts = {BSON("a" << 1 << "d.e" << 1 << "c" << 1)};
+ auto lookup = DocumentSourceLookUp::createFromBson(
+ Document{
+ {"$lookup",
+ Document{{"from", "a"}, {"localField", "b"}, {"foreignField", "c"}, {"as", "d.e"}}}}
+ .toBson()
+ .firstElement(),
+ getExpCtx());
+ lookup->setSource(source.get());
+
+ BSONObjSet outputSort = lookup->getOutputSorts();
+
+ ASSERT_EQUALS(outputSort.count(BSON("a" << 1)), 1U);
+ ASSERT_EQUALS(outputSort.size(), 1U);
+}
+
+TEST_F(DocumentSourceLookUpTest, ShouldTruncateOutputSortOnSuffixOfAsField) {
+ intrusive_ptr<DocumentSourceMock> source = DocumentSourceMock::create();
+ source->sorts = {BSON("a" << 1 << "d.e" << 1 << "c" << 1)};
+ auto lookup = DocumentSourceLookUp::createFromBson(
+ Document{{"$lookup",
+ Document{{"from", "a"}, {"localField", "b"}, {"foreignField", "c"}, {"as", "d"}}}}
+ .toBson()
+ .firstElement(),
+ getExpCtx());
+ lookup->setSource(source.get());
+
+ BSONObjSet outputSort = lookup->getOutputSorts();
+
+ ASSERT_EQUALS(outputSort.count(BSON("a" << 1)), 1U);
+ ASSERT_EQUALS(outputSort.size(), 1U);
+}
+
+TEST(MakeMatchStageFromInput, NonArrayValueUsesEqQuery) {
+ auto input = Document{{"local", 1}};
+ BSONObj matchStage = DocumentSourceLookUp::makeMatchStageFromInput(
+ input, FieldPath("local"), "foreign", BSONObj());
+ ASSERT_BSONOBJ_EQ(matchStage, fromjson("{$match: {$and: [{foreign: {$eq: 1}}, {}]}}"));
+}
+
+TEST(MakeMatchStageFromInput, RegexValueUsesEqQuery) {
+ BSONRegEx regex("^a");
+ Document input = DOC("local" << Value(regex));
+ BSONObj matchStage = DocumentSourceLookUp::makeMatchStageFromInput(
+ input, FieldPath("local"), "foreign", BSONObj());
+ ASSERT_BSONOBJ_EQ(
+ matchStage,
+ BSON("$match" << BSON(
+ "$and" << BSON_ARRAY(BSON("foreign" << BSON("$eq" << regex)) << BSONObj()))));
+}
+
+TEST(MakeMatchStageFromInput, ArrayValueUsesInQuery) {
+ vector<Value> inputArray = {Value(1), Value(2)};
+ Document input = DOC("local" << Value(inputArray));
+ BSONObj matchStage = DocumentSourceLookUp::makeMatchStageFromInput(
+ input, FieldPath("local"), "foreign", BSONObj());
+ ASSERT_BSONOBJ_EQ(matchStage, fromjson("{$match: {$and: [{foreign: {$in: [1, 2]}}, {}]}}"));
+}
+
+TEST(MakeMatchStageFromInput, ArrayValueWithRegexUsesOrQuery) {
+ BSONRegEx regex("^a");
+ vector<Value> inputArray = {Value(1), Value(regex), Value(2)};
+ Document input = DOC("local" << Value(inputArray));
+ BSONObj matchStage = DocumentSourceLookUp::makeMatchStageFromInput(
+ input, FieldPath("local"), "foreign", BSONObj());
+ ASSERT_BSONOBJ_EQ(
+ matchStage,
+ BSON("$match" << BSON(
+ "$and" << BSON_ARRAY(
+ BSON("$or" << BSON_ARRAY(BSON("foreign" << BSON("$eq" << Value(1)))
+ << BSON("foreign" << BSON("$eq" << regex))
+ << BSON("foreign" << BSON("$eq" << Value(2)))))
+ << BSONObj()))));
+}
+
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_match.cpp b/src/mongo/db/pipeline/document_source_match.cpp
index 8478cb387b4..f5c4432bc31 100644
--- a/src/mongo/db/pipeline/document_source_match.cpp
+++ b/src/mongo/db/pipeline/document_source_match.cpp
@@ -450,13 +450,19 @@ static void uassertNoDisallowedClauses(BSONObj query) {
}
}
+intrusive_ptr<DocumentSourceMatch> DocumentSourceMatch::create(
+ BSONObj filter, const intrusive_ptr<ExpressionContext>& expCtx) {
+ uassertNoDisallowedClauses(filter);
+ intrusive_ptr<DocumentSourceMatch> match(new DocumentSourceMatch(filter, expCtx));
+ match->injectExpressionContext(expCtx);
+ return match;
+}
+
intrusive_ptr<DocumentSource> DocumentSourceMatch::createFromBson(
BSONElement elem, const intrusive_ptr<ExpressionContext>& pExpCtx) {
uassert(15959, "the match filter must be an expression in an object", elem.type() == Object);
- uassertNoDisallowedClauses(elem.Obj());
-
- return new DocumentSourceMatch(elem.Obj(), pExpCtx);
+ return DocumentSourceMatch::create(elem.Obj(), pExpCtx);
}
BSONObj DocumentSourceMatch::getQuery() const {
diff --git a/src/mongo/db/pipeline/document_source_match_test.cpp b/src/mongo/db/pipeline/document_source_match_test.cpp
new file mode 100644
index 00000000000..d61262ed1f7
--- /dev/null
+++ b/src/mongo/db/pipeline/document_source_match_test.cpp
@@ -0,0 +1,343 @@
+/**
+ * Copyright (C) 2016 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include <string>
+
+#include "mongo/bson/bsonmisc.h"
+#include "mongo/bson/bsonobj.h"
+#include "mongo/bson/json.h"
+#include "mongo/db/pipeline/aggregation_context_fixture.h"
+#include "mongo/db/pipeline/document.h"
+#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/pipeline/pipeline.h"
+#include "mongo/unittest/unittest.h"
+
+namespace mongo {
+namespace {
+using std::string;
+
+// This provides access to getExpCtx(), but we'll use a different name for this test suite.
+using DocumentSourceMatchTest = AggregationContextFixture;
+
+TEST_F(DocumentSourceMatchTest, RedactSafePortion) {
+ auto expCtx = getExpCtx();
+ auto assertExpectedRedactSafePortion = [&expCtx](string input, string safePortion) {
+ try {
+ auto match = DocumentSourceMatch::create(fromjson(input), expCtx);
+ ASSERT_BSONOBJ_EQ(match->redactSafePortion(), fromjson(safePortion));
+ } catch (...) {
+ unittest::log() << "Problem with redactSafePortion() of: " << input;
+ throw;
+ }
+ };
+
+ // Empty
+ assertExpectedRedactSafePortion("{}", "{}");
+
+ // Basic allowed things
+ assertExpectedRedactSafePortion("{a:1}", "{a:1}");
+
+ assertExpectedRedactSafePortion("{a:'asdf'}", "{a:'asdf'}");
+
+ assertExpectedRedactSafePortion("{a:/asdf/i}", "{a:/asdf/i}");
+
+ assertExpectedRedactSafePortion("{a: {$regex: 'adsf'}}", "{a: {$regex: 'adsf'}}");
+
+ assertExpectedRedactSafePortion("{a: {$regex: 'adsf', $options: 'i'}}",
+ "{a: {$regex: 'adsf', $options: 'i'}}");
+
+ assertExpectedRedactSafePortion("{a: {$mod: [1, 0]}}", "{a: {$mod: [1, 0]}}");
+
+ assertExpectedRedactSafePortion("{a: {$type: 1}}", "{a: {$type: 1}}");
+
+ // Basic disallowed things
+ assertExpectedRedactSafePortion("{a: null}", "{}");
+
+ assertExpectedRedactSafePortion("{a: {}}", "{}");
+
+ assertExpectedRedactSafePortion("{a: []}", "{}");
+
+ assertExpectedRedactSafePortion("{'a.0': 1}", "{}");
+
+ assertExpectedRedactSafePortion("{'a.0.b': 1}", "{}");
+
+ assertExpectedRedactSafePortion("{a: {$ne: 1}}", "{}");
+
+ assertExpectedRedactSafePortion("{a: {$nin: [1, 2, 3]}}", "{}");
+
+ assertExpectedRedactSafePortion("{a: {$exists: true}}",
+ "{}"); // could be allowed but currently isn't
+
+ assertExpectedRedactSafePortion("{a: {$exists: false}}", "{}"); // can never be allowed
+
+ assertExpectedRedactSafePortion("{a: {$size: 1}}", "{}");
+
+ assertExpectedRedactSafePortion("{$nor: [{a:1}]}", "{}");
+
+ // Combinations
+ assertExpectedRedactSafePortion("{a:1, b: 'asdf'}", "{a:1, b: 'asdf'}");
+
+ assertExpectedRedactSafePortion("{a:1, b: null}", "{a:1}");
+
+ assertExpectedRedactSafePortion("{a:null, b: null}", "{}");
+
+ // $elemMatch
+
+ assertExpectedRedactSafePortion("{a: {$elemMatch: {b: 1}}}", "{a: {$elemMatch: {b: 1}}}");
+
+ assertExpectedRedactSafePortion("{a: {$elemMatch: {b:null}}}", "{}");
+
+ assertExpectedRedactSafePortion("{a: {$elemMatch: {b:null, c:1}}}",
+ "{a: {$elemMatch: {c: 1}}}");
+
+ // explicit $and
+ assertExpectedRedactSafePortion("{$and:[{a: 1}]}", "{$and:[{a: 1}]}");
+
+ assertExpectedRedactSafePortion("{$and:[{a: 1}, {b: null}]}", "{$and:[{a: 1}]}");
+
+ assertExpectedRedactSafePortion("{$and:[{a: 1}, {b: null, c:1}]}", "{$and:[{a: 1}, {c:1}]}");
+
+ assertExpectedRedactSafePortion("{$and:[{a: null}, {b: null}]}", "{}");
+
+ // explicit $or
+ assertExpectedRedactSafePortion("{$or:[{a: 1}]}", "{$or:[{a: 1}]}");
+
+ assertExpectedRedactSafePortion("{$or:[{a: 1}, {b: null}]}", "{}");
+
+ assertExpectedRedactSafePortion("{$or:[{a: 1}, {b: null, c:1}]}", "{$or:[{a: 1}, {c:1}]}");
+
+ assertExpectedRedactSafePortion("{$or:[{a: null}, {b: null}]}", "{}");
+
+ assertExpectedRedactSafePortion("{}", "{}");
+
+ // $all and $in
+ assertExpectedRedactSafePortion("{a: {$all: [1, 0]}}", "{a: {$all: [1, 0]}}");
+
+ assertExpectedRedactSafePortion("{a: {$all: [1, 0, null]}}", "{a: {$all: [1, 0]}}");
+
+ assertExpectedRedactSafePortion("{a: {$all: [{$elemMatch: {b:1}}]}}",
+ "{}"); // could be allowed but currently isn't
+
+ assertExpectedRedactSafePortion("{a: {$all: [1, 0, null]}}", "{a: {$all: [1, 0]}}");
+
+ assertExpectedRedactSafePortion("{a: {$in: [1, 0]}}", "{a: {$in: [1, 0]}}");
+
+ assertExpectedRedactSafePortion("{a: {$in: [1, 0, null]}}", "{}");
+
+ {
+ const char* comparisonOps[] = {"$gt", "$lt", "$gte", "$lte", NULL};
+ for (int i = 0; comparisonOps[i]; i++) {
+ const char* op = comparisonOps[i];
+ assertExpectedRedactSafePortion(string("{a: {") + op + ": 1}}",
+ string("{a: {") + op + ": 1}}");
+
+ // $elemMatch takes direct expressions ...
+ assertExpectedRedactSafePortion(string("{a: {$elemMatch: {") + op + ": 1}}}",
+ string("{a: {$elemMatch: {") + op + ": 1}}}");
+
+ // ... or top-level style full matches
+ assertExpectedRedactSafePortion(string("{a: {$elemMatch: {b: {") + op + ": 1}}}}",
+ string("{a: {$elemMatch: {b: {") + op + ": 1}}}}");
+
+ assertExpectedRedactSafePortion(string("{a: {") + op + ": null}}", "{}");
+
+ assertExpectedRedactSafePortion(string("{a: {") + op + ": {}}}", "{}");
+
+ assertExpectedRedactSafePortion(string("{a: {") + op + ": []}}", "{}");
+
+ assertExpectedRedactSafePortion(string("{'a.0': {") + op + ": null}}", "{}");
+
+ assertExpectedRedactSafePortion(string("{'a.0.b': {") + op + ": null}}", "{}");
+ }
+ }
+}
+
+TEST_F(DocumentSourceMatchTest, ShouldAddDependenciesOfAllBranchesOfOrClause) {
+ auto match =
+ DocumentSourceMatch::create(fromjson("{$or: [{a: 1}, {'x.y': {$gt: 4}}]}"), getExpCtx());
+ DepsTracker dependencies;
+ ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(1U, dependencies.fields.count("a"));
+ ASSERT_EQUALS(1U, dependencies.fields.count("x.y"));
+ ASSERT_EQUALS(2U, dependencies.fields.size());
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(false, dependencies.getNeedTextScore());
+}
+
+TEST_F(DocumentSourceMatchTest, TextSearchShouldRequireWholeDocumentAndTextScore) {
+ auto match = DocumentSourceMatch::create(fromjson("{$text: {$search: 'hello'} }"), getExpCtx());
+ DepsTracker dependencies;
+ ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_ALL, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(true, dependencies.needWholeDocument);
+ ASSERT_EQUALS(false, dependencies.getNeedTextScore());
+}
+
+TEST_F(DocumentSourceMatchTest, ShouldOnlyAddOuterFieldAsDependencyOfImplicitEqualityPredicate) {
+ // Parses to {a: {$eq: {notAField: {$gte: 4}}}}.
+ auto match = DocumentSourceMatch::create(fromjson("{a: {notAField: {$gte: 4}}}"), getExpCtx());
+ DepsTracker dependencies;
+ ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(1U, dependencies.fields.count("a"));
+ ASSERT_EQUALS(1U, dependencies.fields.size());
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(false, dependencies.getNeedTextScore());
+}
+
+TEST_F(DocumentSourceMatchTest, ShouldAddDependenciesOfClausesWithinElemMatchAsDottedPaths) {
+ auto match =
+ DocumentSourceMatch::create(fromjson("{a: {$elemMatch: {c: {$gte: 4}}}}"), getExpCtx());
+ DepsTracker dependencies;
+ ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(1U, dependencies.fields.count("a.c"));
+ ASSERT_EQUALS(1U, dependencies.fields.count("a"));
+ ASSERT_EQUALS(2U, dependencies.fields.size());
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(false, dependencies.getNeedTextScore());
+}
+
+TEST_F(DocumentSourceMatchTest, ShouldAddOuterFieldToDependenciesIfElemMatchContainsNoFieldNames) {
+ auto match =
+ DocumentSourceMatch::create(fromjson("{a: {$elemMatch: {$gt: 1, $lt: 5}}}"), getExpCtx());
+ DepsTracker dependencies;
+ ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(1U, dependencies.fields.count("a"));
+ ASSERT_EQUALS(1U, dependencies.fields.size());
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(false, dependencies.getNeedTextScore());
+}
+
+TEST_F(DocumentSourceMatchTest, ShouldAddNotClausesFieldAsDependency) {
+ auto match = DocumentSourceMatch::create(fromjson("{b: {$not: {$gte: 4}}}}"), getExpCtx());
+ DepsTracker dependencies;
+ ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(1U, dependencies.fields.count("b"));
+ ASSERT_EQUALS(1U, dependencies.fields.size());
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(false, dependencies.getNeedTextScore());
+}
+
+TEST_F(DocumentSourceMatchTest, ShouldAddDependenciesOfEachNorClause) {
+ auto match = DocumentSourceMatch::create(
+ fromjson("{$nor: [{'a.b': {$gte: 4}}, {'b.c': {$in: [1, 2]}}]}"), getExpCtx());
+ DepsTracker dependencies;
+ ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(1U, dependencies.fields.count("a.b"));
+ ASSERT_EQUALS(1U, dependencies.fields.count("b.c"));
+ ASSERT_EQUALS(2U, dependencies.fields.size());
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(false, dependencies.getNeedTextScore());
+}
+
+TEST_F(DocumentSourceMatchTest, CommentShouldNotAddAnyDependencies) {
+ auto match = DocumentSourceMatch::create(fromjson("{$comment: 'misleading?'}"), getExpCtx());
+ DepsTracker dependencies;
+ ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(0U, dependencies.fields.size());
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(false, dependencies.getNeedTextScore());
+}
+
+TEST_F(DocumentSourceMatchTest, ClauseAndedWithCommentShouldAddDependencies) {
+ auto match =
+ DocumentSourceMatch::create(fromjson("{a: 4, $comment: 'irrelevant'}"), getExpCtx());
+ DepsTracker dependencies;
+ ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
+ ASSERT_EQUALS(1U, dependencies.fields.count("a"));
+ ASSERT_EQUALS(1U, dependencies.fields.size());
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(false, dependencies.getNeedTextScore());
+}
+
+TEST_F(DocumentSourceMatchTest, MultipleMatchStagesShouldCombineIntoOne) {
+ auto match1 = DocumentSourceMatch::create(BSON("a" << 1), getExpCtx());
+ auto match2 = DocumentSourceMatch::create(BSON("b" << 1), getExpCtx());
+ auto match3 = DocumentSourceMatch::create(BSON("c" << 1), getExpCtx());
+
+ Pipeline::SourceContainer container;
+
+ // Check initial state
+ ASSERT_BSONOBJ_EQ(match1->getQuery(), BSON("a" << 1));
+ ASSERT_BSONOBJ_EQ(match2->getQuery(), BSON("b" << 1));
+ ASSERT_BSONOBJ_EQ(match3->getQuery(), BSON("c" << 1));
+
+ container.push_back(match1);
+ container.push_back(match2);
+ match1->optimizeAt(container.begin(), &container);
+
+ ASSERT_EQUALS(container.size(), 1U);
+ ASSERT_BSONOBJ_EQ(match1->getQuery(), fromjson("{'$and': [{a:1}, {b:1}]}"));
+
+ container.push_back(match3);
+ match1->optimizeAt(container.begin(), &container);
+ ASSERT_EQUALS(container.size(), 1U);
+ ASSERT_BSONOBJ_EQ(match1->getQuery(),
+ fromjson("{'$and': [{'$and': [{a:1}, {b:1}]},"
+ "{c:1}]}"));
+}
+
+TEST(ObjectForMatch, ShouldExtractTopLevelFieldIfDottedFieldNeeded) {
+ Document input(fromjson("{a: 1, b: {c: 1, d: 1}}"));
+ BSONObj expected = fromjson("{b: {c: 1, d: 1}}");
+ ASSERT_BSONOBJ_EQ(expected, DocumentSourceMatch::getObjectForMatch(input, {"b.c"}));
+}
+
+TEST(ObjectForMatch, ShouldExtractEntireArray) {
+ Document input(fromjson("{a: [1, 2, 3], b: 1}"));
+ BSONObj expected = fromjson("{a: [1, 2, 3]}");
+ ASSERT_BSONOBJ_EQ(expected, DocumentSourceMatch::getObjectForMatch(input, {"a"}));
+}
+
+TEST(ObjectForMatch, ShouldOnlyAddPrefixedFieldOnceIfTwoDottedSubfields) {
+ Document input(fromjson("{a: 1, b: {c: 1, f: {d: {e: 1}}}}"));
+ BSONObj expected = fromjson("{b: {c: 1, f: {d: {e: 1}}}}");
+ ASSERT_BSONOBJ_EQ(expected, DocumentSourceMatch::getObjectForMatch(input, {"b.f", "b.f.d.e"}));
+}
+
+TEST(ObjectForMatch, MissingFieldShouldNotAppearInResult) {
+ Document input(fromjson("{a: 1}"));
+ BSONObj expected;
+ ASSERT_BSONOBJ_EQ(expected, DocumentSourceMatch::getObjectForMatch(input, {"b", "c"}));
+}
+
+TEST(ObjectForMatch, ShouldSerializeNothingIfNothingIsNeeded) {
+ Document input(fromjson("{a: 1, b: {c: 1}}"));
+ BSONObj expected;
+ ASSERT_BSONOBJ_EQ(expected,
+ DocumentSourceMatch::getObjectForMatch(input, std::set<std::string>{}));
+}
+
+TEST(ObjectForMatch, ShouldExtractEntireArrayFromPrefixOfDottedField) {
+ Document input(fromjson("{a: [{b: 1}, {b: 2}], c: 1}"));
+ BSONObj expected = fromjson("{a: [{b: 1}, {b: 2}]}");
+ ASSERT_BSONOBJ_EQ(expected, DocumentSourceMatch::getObjectForMatch(input, {"a.b"}));
+}
+
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_mock_test.cpp b/src/mongo/db/pipeline/document_source_mock_test.cpp
new file mode 100644
index 00000000000..acf4f21f3fe
--- /dev/null
+++ b/src/mongo/db/pipeline/document_source_mock_test.cpp
@@ -0,0 +1,72 @@
+/**
+ * Copyright (C) 2016 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include "mongo/db/pipeline/document.h"
+#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/pipeline/document_value_test_util.h"
+#include "mongo/unittest/unittest.h"
+
+namespace mongo {
+namespace {
+
+TEST(DocumentSourceMockTest, OneDoc) {
+ auto doc = Document{{"a", 1}};
+ auto source = DocumentSourceMock::create(doc);
+ ASSERT_DOCUMENT_EQ(source->getNext().getDocument(), doc);
+ ASSERT(source->getNext().isEOF());
+}
+
+TEST(DocumentSourceMockTest, DequeDocuments) {
+ auto source = DocumentSourceMock::create({Document{{"a", 1}}, Document{{"a", 2}}});
+ ASSERT_DOCUMENT_EQ(source->getNext().getDocument(), (Document{{"a", 1}}));
+ ASSERT_DOCUMENT_EQ(source->getNext().getDocument(), (Document{{"a", 2}}));
+ ASSERT(source->getNext().isEOF());
+}
+
+TEST(DocumentSourceMockTest, StringJSON) {
+ auto source = DocumentSourceMock::create("{a : 1}");
+ ASSERT_DOCUMENT_EQ(source->getNext().getDocument(), (Document{{"a", 1}}));
+ ASSERT(source->getNext().isEOF());
+}
+
+TEST(DocumentSourceMockTest, DequeStringJSONs) {
+ auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}"});
+ ASSERT_DOCUMENT_EQ(source->getNext().getDocument(), (Document{{"a", 1}}));
+ ASSERT_DOCUMENT_EQ(source->getNext().getDocument(), (Document{{"a", 2}}));
+ ASSERT(source->getNext().isEOF());
+}
+
+TEST(DocumentSourceMockTest, Empty) {
+ auto source = DocumentSourceMock::create();
+ ASSERT(source->getNext().isEOF());
+}
+
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_project.cpp b/src/mongo/db/pipeline/document_source_project.cpp
index cbbe94c8862..aa6a1529a26 100644
--- a/src/mongo/db/pipeline/document_source_project.cpp
+++ b/src/mongo/db/pipeline/document_source_project.cpp
@@ -43,12 +43,18 @@ using parsed_aggregation_projection::ProjectionType;
REGISTER_DOCUMENT_SOURCE(project, DocumentSourceProject::createFromBson);
+intrusive_ptr<DocumentSource> DocumentSourceProject::create(
+ BSONObj projectSpec, const intrusive_ptr<ExpressionContext>& expCtx) {
+ intrusive_ptr<DocumentSource> project(new DocumentSourceSingleDocumentTransformation(
+ expCtx, ParsedAggregationProjection::create(projectSpec), "$project"));
+ project->injectExpressionContext(expCtx);
+ return project;
+}
+
intrusive_ptr<DocumentSource> DocumentSourceProject::createFromBson(
BSONElement elem, const intrusive_ptr<ExpressionContext>& expCtx) {
uassert(15969, "$project specification must be an object", elem.type() == Object);
-
- return new DocumentSourceSingleDocumentTransformation(
- expCtx, ParsedAggregationProjection::create(elem.Obj()), "$project");
+ return DocumentSourceProject::create(elem.Obj(), expCtx);
}
} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_project_test.cpp b/src/mongo/db/pipeline/document_source_project_test.cpp
new file mode 100644
index 00000000000..12c5f72f087
--- /dev/null
+++ b/src/mongo/db/pipeline/document_source_project_test.cpp
@@ -0,0 +1,173 @@
+/**
+ * Copyright (C) 2016 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include <vector>
+
+#include "mongo/bson/bsonelement.h"
+#include "mongo/bson/bsonmisc.h"
+#include "mongo/bson/bsonobj.h"
+#include "mongo/bson/json.h"
+#include "mongo/db/pipeline/aggregation_context_fixture.h"
+#include "mongo/db/pipeline/dependencies.h"
+#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/pipeline/value.h"
+#include "mongo/unittest/unittest.h"
+
+namespace mongo {
+namespace {
+using boost::intrusive_ptr;
+using std::vector;
+
+//
+// DocumentSourceProject delegates much of its responsibilities to the ParsedAggregationProjection.
+// Most of the functional tests are testing ParsedAggregationProjection directly. These are meant as
+// simpler integration tests.
+//
+
+// This provides access to getExpCtx(), but we'll use a different name for this test suite.
+using ProjectStageTest = AggregationContextFixture;
+
+TEST_F(ProjectStageTest, InclusionProjectionShouldRemoveUnspecifiedFields) {
+ auto project =
+ DocumentSourceProject::create(BSON("a" << true << "c" << BSON("d" << true)), getExpCtx());
+ auto source = DocumentSourceMock::create("{_id: 0, a: 1, b: 1, c: {d: 1}}");
+ project->setSource(source.get());
+ // The first result exists and is as expected.
+ auto next = project->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ ASSERT_EQUALS(1, next.getDocument().getField("a").getInt());
+ ASSERT(next.getDocument().getField("b").missing());
+ // The _id field is included by default in the root document.
+ ASSERT_EQUALS(0, next.getDocument().getField("_id").getInt());
+ // The nested c.d inclusion.
+ ASSERT_EQUALS(1, next.getDocument()["c"]["d"].getInt());
+}
+
+TEST_F(ProjectStageTest, ShouldOptimizeInnerExpressions) {
+ auto project = DocumentSourceProject::create(
+ BSON("a" << BSON("$and" << BSON_ARRAY(BSON("$const" << true)))), getExpCtx());
+ project->optimize();
+ // The $and should have been replaced with its only argument.
+ vector<Value> serializedArray;
+ project->serializeToArray(serializedArray);
+ ASSERT_BSONOBJ_EQ(serializedArray[0].getDocument().toBson(),
+ fromjson("{$project: {_id: true, a: {$const: true}}}"));
+}
+
+TEST_F(ProjectStageTest, ShouldErrorOnNonObjectSpec) {
+ BSONObj spec = BSON("$project"
+ << "foo");
+ BSONElement specElement = spec.firstElement();
+ ASSERT_THROWS(DocumentSourceProject::createFromBson(specElement, getExpCtx()), UserException);
+}
+
+/**
+ * Basic sanity check that two documents can be projected correctly with a simple inclusion
+ * projection.
+ */
+TEST_F(ProjectStageTest, InclusionShouldBeAbleToProcessMultipleDocuments) {
+ auto project = DocumentSourceProject::create(BSON("a" << true), getExpCtx());
+ auto source = DocumentSourceMock::create({"{a: 1, b: 2}", "{a: 3, b: 4}"});
+ project->setSource(source.get());
+ auto next = project->getNext();
+ ASSERT(next.isAdvanced());
+ ASSERT_EQUALS(1, next.getDocument().getField("a").getInt());
+ ASSERT(next.getDocument().getField("b").missing());
+
+ next = project->getNext();
+ ASSERT(next.isAdvanced());
+ ASSERT_EQUALS(3, next.getDocument().getField("a").getInt());
+ ASSERT(next.getDocument().getField("b").missing());
+
+ ASSERT(project->getNext().isEOF());
+ ASSERT(project->getNext().isEOF());
+ ASSERT(project->getNext().isEOF());
+}
+
+/**
+ * Basic sanity check that two documents can be projected correctly with a simple inclusion
+ * projection.
+ */
+TEST_F(ProjectStageTest, ExclusionShouldBeAbleToProcessMultipleDocuments) {
+ auto project = DocumentSourceProject::create(BSON("a" << false), getExpCtx());
+ auto source = DocumentSourceMock::create({"{a: 1, b: 2}", "{a: 3, b: 4}"});
+ project->setSource(source.get());
+ auto next = project->getNext();
+ ASSERT(next.isAdvanced());
+ ASSERT(next.getDocument().getField("a").missing());
+ ASSERT_EQUALS(2, next.getDocument().getField("b").getInt());
+
+ next = project->getNext();
+ ASSERT(next.isAdvanced());
+ ASSERT(next.getDocument().getField("a").missing());
+ ASSERT_EQUALS(4, next.getDocument().getField("b").getInt());
+
+ ASSERT(project->getNext().isEOF());
+ ASSERT(project->getNext().isEOF());
+ ASSERT(project->getNext().isEOF());
+}
+
+TEST_F(ProjectStageTest, InclusionShouldAddDependenciesOfIncludedAndComputedFields) {
+ auto project = DocumentSourceProject::create(
+ fromjson("{a: true, x: '$b', y: {$and: ['$c','$d']}, z: {$meta: 'textScore'}}"),
+ getExpCtx());
+ DepsTracker dependencies(DepsTracker::MetadataAvailable::kTextScore);
+ ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_FIELDS, project->getDependencies(&dependencies));
+ ASSERT_EQUALS(5U, dependencies.fields.size());
+
+ // Implicit _id dependency.
+ ASSERT_EQUALS(1U, dependencies.fields.count("_id"));
+
+ // Inclusion dependency.
+ ASSERT_EQUALS(1U, dependencies.fields.count("a"));
+
+ // Field path expression dependency.
+ ASSERT_EQUALS(1U, dependencies.fields.count("b"));
+
+ // Nested expression dependencies.
+ ASSERT_EQUALS(1U, dependencies.fields.count("c"));
+ ASSERT_EQUALS(1U, dependencies.fields.count("d"));
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(true, dependencies.getNeedTextScore());
+}
+
+TEST_F(ProjectStageTest, ExclusionShouldNotAddDependencies) {
+ auto project = DocumentSourceProject::create(fromjson("{a: false, 'b.c': false}"), getExpCtx());
+
+ DepsTracker dependencies;
+ ASSERT_EQUALS(DocumentSource::SEE_NEXT, project->getDependencies(&dependencies));
+
+ ASSERT_EQUALS(0U, dependencies.fields.size());
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(false, dependencies.getNeedTextScore());
+}
+
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_redact.cpp b/src/mongo/db/pipeline/document_source_redact.cpp
index 4bd685a8382..9ad79eca661 100644
--- a/src/mongo/db/pipeline/document_source_redact.cpp
+++ b/src/mongo/db/pipeline/document_source_redact.cpp
@@ -84,10 +84,7 @@ Pipeline::SourceContainer::iterator DocumentSourceRedact::optimizeAt(
// create an infinite number of $matches.
Pipeline::SourceContainer::iterator returnItr = std::next(itr);
- container->insert(
- itr,
- DocumentSourceMatch::createFromBson(
- BSON("$match" << redactSafePortion).firstElement(), this->pExpCtx));
+ container->insert(itr, DocumentSourceMatch::create(redactSafePortion, pExpCtx));
return returnItr;
}
diff --git a/src/mongo/db/pipeline/document_source_redact_test.cpp b/src/mongo/db/pipeline/document_source_redact_test.cpp
new file mode 100644
index 00000000000..f4de62feff0
--- /dev/null
+++ b/src/mongo/db/pipeline/document_source_redact_test.cpp
@@ -0,0 +1,61 @@
+/**
+ * Copyright (C) 2016 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include "mongo/bson/bsonmisc.h"
+#include "mongo/bson/bsonobj.h"
+#include "mongo/bson/bsonobjbuilder.h"
+#include "mongo/db/pipeline/aggregation_context_fixture.h"
+#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/pipeline/pipeline.h"
+#include "mongo/unittest/unittest.h"
+
+namespace mongo {
+namespace {
+
+// This provides access to getExpCtx(), but we'll use a different name for this test suite.
+using DocumentSourceRedactTest = AggregationContextFixture;
+
+TEST_F(DocumentSourceRedactTest, ShouldCopyRedactSafePartOfMatchBeforeItself) {
+ BSONObj redactSpec = BSON("$redact"
+ << "$$PRUNE");
+ auto redact = DocumentSourceRedact::createFromBson(redactSpec.firstElement(), getExpCtx());
+ auto match = DocumentSourceMatch::create(BSON("a" << 1), getExpCtx());
+
+ Pipeline::SourceContainer pipeline;
+ pipeline.push_back(redact);
+ pipeline.push_back(match);
+
+ pipeline.front()->optimizeAt(pipeline.begin(), &pipeline);
+
+ ASSERT_EQUALS(pipeline.size(), 3U);
+ ASSERT(dynamic_cast<DocumentSourceMatch*>(pipeline.front().get()));
+}
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_replace_root_test.cpp b/src/mongo/db/pipeline/document_source_replace_root_test.cpp
new file mode 100644
index 00000000000..0ba1c7ab2b9
--- /dev/null
+++ b/src/mongo/db/pipeline/document_source_replace_root_test.cpp
@@ -0,0 +1,339 @@
+/**
+ * Copyright (C) 2016 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include <boost/intrusive_ptr.hpp>
+
+#include "mongo/bson/bsonmisc.h"
+#include "mongo/bson/bsonobj.h"
+#include "mongo/db/pipeline/aggregation_context_fixture.h"
+#include "mongo/db/pipeline/dependencies.h"
+#include "mongo/db/pipeline/document.h"
+#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/pipeline/document_value_test_util.h"
+#include "mongo/unittest/unittest.h"
+
+namespace mongo {
+namespace {
+
+using boost::intrusive_ptr;
+
+class ReplaceRootBasics : public AggregationContextFixture {
+protected:
+ intrusive_ptr<DocumentSource> createReplaceRoot(const BSONObj& replaceRoot) {
+ BSONObj spec = BSON("$replaceRoot" << replaceRoot);
+ BSONElement specElement = spec.firstElement();
+ return DocumentSourceReplaceRoot::createFromBson(specElement, getExpCtx());
+ }
+
+ /**
+ * Assert 'source' consistently reports it is exhausted.
+ */
+ void assertExhausted(const boost::intrusive_ptr<DocumentSource>& source) const {
+ ASSERT(source->getNext().isEOF());
+ ASSERT(source->getNext().isEOF());
+ ASSERT(source->getNext().isEOF());
+ }
+};
+
+// Verify that sending $newRoot a field path that contains an object in the document results
+// in the replacement of the root with that object.
+TEST_F(ReplaceRootBasics, FieldPathAsNewRootPromotesSubdocument) {
+ auto replaceRoot = createReplaceRoot(BSON("newRoot"
+ << "$a"));
+ Document subdoc = Document{{"b", 1}, {"c", "hello"}, {"d", Document{{"e", 2}}}};
+ auto mock = DocumentSourceMock::create({Document{{"a", subdoc}}});
+ replaceRoot->setSource(mock.get());
+
+ auto next = replaceRoot->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ ASSERT_DOCUMENT_EQ(next.releaseDocument(), subdoc);
+ assertExhausted(replaceRoot);
+}
+
+// Verify that sending $newRoot a dotted field path that contains an object in the document results
+// in the replacement of the root with that object.
+TEST_F(ReplaceRootBasics, DottedFieldPathAsNewRootPromotesSubdocument) {
+ auto replaceRoot = createReplaceRoot(BSON("newRoot"
+ << "$a.b"));
+ // source document: {a: {b: {c: 3}}}
+ Document subdoc = Document{{"c", 3}};
+ auto mock = DocumentSourceMock::create({Document{{"a", Document{{"b", subdoc}}}}});
+ replaceRoot->setSource(mock.get());
+
+ auto next = replaceRoot->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ ASSERT_DOCUMENT_EQ(next.releaseDocument(), subdoc);
+ assertExhausted(replaceRoot);
+}
+
+// Verify that sending $newRoot a dotted field path that contains an object in two different
+// documents results in the replacement of the root with that object in both documents.
+TEST_F(ReplaceRootBasics, FieldPathAsNewRootPromotesSubdocumentInMultipleDocuments) {
+ auto replaceRoot = createReplaceRoot(BSON("newRoot"
+ << "$a"));
+ Document subdoc1 = Document{{"b", 1}, {"c", 2}};
+ Document subdoc2 = Document{{"b", 3}, {"c", 4}};
+ auto mock = DocumentSourceMock::create({Document{{"a", subdoc1}}, Document{{"a", subdoc2}}});
+ replaceRoot->setSource(mock.get());
+
+ // Verify that the first document that comes out is the first document we put in.
+ auto next = replaceRoot->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ ASSERT_DOCUMENT_EQ(next.releaseDocument(), subdoc1);
+
+ next = replaceRoot->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ ASSERT_DOCUMENT_EQ(next.releaseDocument(), subdoc2);
+
+ assertExhausted(replaceRoot);
+}
+
+// Verify that when newRoot contains an expression object, the document is replaced with that
+// object.
+TEST_F(ReplaceRootBasics, ExpressionObjectForNewRootReplacesRootWithThatObject) {
+ auto replaceRoot = createReplaceRoot(BSON("newRoot" << BSON("b" << 1)));
+ auto mock = DocumentSourceMock::create({Document{{"a", 2}}});
+ replaceRoot->setSource(mock.get());
+
+ auto next = replaceRoot->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ ASSERT_DOCUMENT_EQ(next.releaseDocument(), (Document{{"b", 1}}));
+ assertExhausted(replaceRoot);
+
+ BSONObj newObject = BSON("a" << 1 << "b" << 2 << "arr" << BSON_ARRAY(3 << 4 << 5));
+ replaceRoot = createReplaceRoot(BSON("newRoot" << newObject));
+ mock = DocumentSourceMock::create({Document{{"c", 2}}});
+ replaceRoot->setSource(mock.get());
+
+ next = replaceRoot->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ ASSERT_DOCUMENT_EQ(next.releaseDocument(), Document(newObject));
+ assertExhausted(replaceRoot);
+
+ replaceRoot = createReplaceRoot(BSON("newRoot" << BSON("a" << BSON("b" << 1))));
+ mock = DocumentSourceMock::create({Document{{"c", 2}}});
+ replaceRoot->setSource(mock.get());
+
+ next = replaceRoot->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ ASSERT_DOCUMENT_EQ(next.releaseDocument(), (Document{{"a", Document{{"b", 1}}}}));
+ assertExhausted(replaceRoot);
+
+ replaceRoot = createReplaceRoot(BSON("newRoot" << BSON("a" << 2)));
+ mock = DocumentSourceMock::create({Document{{"b", 2}}});
+ replaceRoot->setSource(mock.get());
+
+ next = replaceRoot->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ ASSERT_DOCUMENT_EQ(next.releaseDocument(), (Document{{"a", 2}}));
+ assertExhausted(replaceRoot);
+}
+
+// Verify that when newRoot contains a system variable, the document is replaced with the correct
+// object corresponding to that system variable.
+TEST_F(ReplaceRootBasics, SystemVariableForNewRootReplacesRootWithThatObject) {
+ // System variables
+ auto replaceRoot = createReplaceRoot(BSON("newRoot"
+ << "$$CURRENT"));
+ Document inputDoc = Document{{"b", 2}};
+ auto mock = DocumentSourceMock::create({inputDoc});
+ replaceRoot->setSource(mock.get());
+
+ auto next = replaceRoot->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ ASSERT_DOCUMENT_EQ(next.releaseDocument(), inputDoc);
+ assertExhausted(replaceRoot);
+
+ replaceRoot = createReplaceRoot(BSON("newRoot"
+ << "$$ROOT"));
+ mock = DocumentSourceMock::create({inputDoc});
+ replaceRoot->setSource(mock.get());
+
+ next = replaceRoot->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ ASSERT_DOCUMENT_EQ(next.releaseDocument(), inputDoc);
+ assertExhausted(replaceRoot);
+}
+
+// Verify that when the expression at newRoot does not resolve to an object, as per the spec we
+// throw a user assertion.
+TEST_F(ReplaceRootBasics, ErrorsWhenNewRootDoesNotEvaluateToAnObject) {
+ auto replaceRoot = createReplaceRoot(BSON("newRoot"
+ << "$a"));
+
+ // A string is not an object.
+ auto mock = DocumentSourceMock::create({Document{{"a", "hello"}}});
+ replaceRoot->setSource(mock.get());
+ ASSERT_THROWS_CODE(replaceRoot->getNext(), UserException, 40228);
+
+ // An integer is not an object.
+ mock = DocumentSourceMock::create({Document{{"a", 5}}});
+ replaceRoot->setSource(mock.get());
+ ASSERT_THROWS_CODE(replaceRoot->getNext(), UserException, 40228);
+
+ // Literals are not objects.
+ replaceRoot = createReplaceRoot(BSON("newRoot" << BSON("$literal" << 1)));
+ mock = DocumentSourceMock::create({Document()});
+ replaceRoot->setSource(mock.get());
+ ASSERT_THROWS_CODE(replaceRoot->getNext(), UserException, 40228);
+ assertExhausted(replaceRoot);
+
+ // Most operator expressions do not resolve to objects.
+ replaceRoot = createReplaceRoot(BSON("newRoot" << BSON("$and"
+ << "$a")));
+ mock = DocumentSourceMock::create({Document{{"a", true}}});
+ replaceRoot->setSource(mock.get());
+ ASSERT_THROWS_CODE(replaceRoot->getNext(), UserException, 40228);
+ assertExhausted(replaceRoot);
+}
+
+// Verify that when newRoot contains a field path and that field path doesn't exist, we throw a user
+// error. This error happens whenever the expression evaluates to a "missing" Value.
+TEST_F(ReplaceRootBasics, ErrorsIfNewRootFieldPathDoesNotExist) {
+ auto replaceRoot = createReplaceRoot(BSON("newRoot"
+ << "$a"));
+
+ auto mock = DocumentSourceMock::create({Document()});
+ replaceRoot->setSource(mock.get());
+ ASSERT_THROWS_CODE(replaceRoot->getNext(), UserException, 40232);
+ assertExhausted(replaceRoot);
+
+ mock = DocumentSourceMock::create({Document{{"e", Document{{"b", Document{{"c", 3}}}}}}});
+ replaceRoot->setSource(mock.get());
+ ASSERT_THROWS_CODE(replaceRoot->getNext(), UserException, 40232);
+ assertExhausted(replaceRoot);
+}
+
+// Verify that the only dependent field is the root we are replacing with.
+TEST_F(ReplaceRootBasics, OnlyDependentFieldIsNewRoot) {
+ auto replaceRoot = createReplaceRoot(BSON("newRoot"
+ << "$a.b"));
+ DepsTracker dependencies;
+ ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_FIELDS, replaceRoot->getDependencies(&dependencies));
+
+ // Should only depend on field a.b
+ ASSERT_EQUALS(1U, dependencies.fields.size());
+ ASSERT_EQUALS(1U, dependencies.fields.count("a.b"));
+ ASSERT_EQUALS(0U, dependencies.fields.count("a"));
+ ASSERT_EQUALS(0U, dependencies.fields.count("b"));
+
+ // Should not need any other fields.
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(false, dependencies.getNeedTextScore());
+}
+
+/**
+ * Fixture to test error cases of initializing the $replaceRoot stage.
+ */
+class ReplaceRootSpec : public AggregationContextFixture {
+public:
+ intrusive_ptr<DocumentSource> createReplaceRoot(const BSONObj& replaceRootSpec) {
+ return DocumentSourceReplaceRoot::createFromBson(replaceRootSpec.firstElement(),
+ getExpCtx());
+ }
+
+ BSONObj createSpec(BSONObj spec) {
+ return BSON("$replaceRoot" << spec);
+ }
+
+ BSONObj createFullSpec(BSONObj spec) {
+ return BSON("$replaceRoot" << BSON("newRoot" << spec));
+ }
+};
+
+// Verify that the creation of a $replaceRoot stage requires an object specification
+TEST_F(ReplaceRootSpec, CreationRequiresObjectSpecification) {
+ ASSERT_THROWS_CODE(createReplaceRoot(BSON("$replaceRoot" << 1)), UserException, 40229);
+ ASSERT_THROWS_CODE(createReplaceRoot(BSON("$replaceRoot"
+ << "string")),
+ UserException,
+ 40229);
+}
+
+// Verify that the only valid option for the $replaceRoot object specification is newRoot.
+TEST_F(ReplaceRootSpec, OnlyValidOptionInObjectSpecIsNewRoot) {
+ ASSERT_THROWS_CODE(createReplaceRoot(createSpec(BSON("newRoot"
+ << "$a"
+ << "root"
+ << 2))),
+ UserException,
+ 40230);
+ ASSERT_THROWS_CODE(createReplaceRoot(createSpec(BSON("newRoot"
+ << "$a"
+ << "path"
+ << 2))),
+ UserException,
+ 40230);
+ ASSERT_THROWS_CODE(createReplaceRoot(createSpec(BSON("path"
+ << "$a"))),
+ UserException,
+ 40230);
+}
+
+// Verify that $replaceRoot requires a valid expression as input to the newRoot option.
+TEST_F(ReplaceRootSpec, RequiresExpressionForNewRootOption) {
+ ASSERT_THROWS_CODE(createReplaceRoot(createSpec(BSONObj())), UserException, 40231);
+ ASSERT_THROWS(createReplaceRoot(createSpec(BSON("newRoot"
+ << "$$$a"))),
+ UserException);
+ ASSERT_THROWS(createReplaceRoot(createSpec(BSON("newRoot"
+ << "$$a"))),
+ UserException);
+ ASSERT_THROWS(createReplaceRoot(createFullSpec(BSON("$map" << BSON("a" << 1)))), UserException);
+}
+
+// Verify that newRoot accepts all types of expressions.
+TEST_F(ReplaceRootSpec, NewRootAcceptsAllTypesOfExpressions) {
+ // Field Path and system variables
+ ASSERT_TRUE(createReplaceRoot(createSpec(BSON("newRoot"
+ << "$a.b.c.d.e"))));
+ ASSERT_TRUE(createReplaceRoot(createSpec(BSON("newRoot"
+ << "$$CURRENT"))));
+
+ // Literals
+ ASSERT_TRUE(createReplaceRoot(createFullSpec(BSON("$literal" << 1))));
+
+ // Expression Objects
+ ASSERT_TRUE(createReplaceRoot(createFullSpec(BSON("a" << BSON("b" << 1)))));
+
+ // Operator Expressions
+ ASSERT_TRUE(createReplaceRoot(createFullSpec(BSON("$and"
+ << "$a"))));
+ ASSERT_TRUE(createReplaceRoot(createFullSpec(BSON("$gt" << BSON_ARRAY("$a" << 1)))));
+ ASSERT_TRUE(createReplaceRoot(createFullSpec(BSON("$sqrt"
+ << "$a"))));
+
+ // Accumulators
+ ASSERT_TRUE(createReplaceRoot(createFullSpec(BSON("$sum"
+ << "$a"))));
+}
+
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_sample_test.cpp b/src/mongo/db/pipeline/document_source_sample_test.cpp
new file mode 100644
index 00000000000..333b0cc5b16
--- /dev/null
+++ b/src/mongo/db/pipeline/document_source_sample_test.cpp
@@ -0,0 +1,387 @@
+/**
+ * Copyright (C) 2016 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include <boost/intrusive_ptr.hpp>
+#include <memory>
+
+#include "mongo/bson/bsonelement.h"
+#include "mongo/bson/bsonobj.h"
+#include "mongo/bson/bsonobjbuilder.h"
+#include "mongo/db/pipeline/aggregation_context_fixture.h"
+#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/pipeline/document_value_test_util.h"
+#include "mongo/db/pipeline/expression_context.h"
+#include "mongo/db/service_context.h"
+#include "mongo/stdx/memory.h"
+#include "mongo/unittest/unittest.h"
+#include "mongo/util/clock_source_mock.h"
+#include "mongo/util/tick_source_mock.h"
+
+namespace mongo {
+
+std::unique_ptr<ServiceContextNoop> makeTestServiceContext() {
+ auto service = stdx::make_unique<ServiceContextNoop>();
+ service->setFastClockSource(stdx::make_unique<ClockSourceMock>());
+ service->setTickSource(stdx::make_unique<TickSourceMock>());
+ return service;
+}
+
+namespace {
+using boost::intrusive_ptr;
+
+static const char* const ns = "unittests.document_source_sample_tests";
+
+// Stub to avoid including the server environment library.
+MONGO_INITIALIZER(SetGlobalEnvironment)(InitializerContext* context) {
+ setGlobalServiceContext(makeTestServiceContext());
+ return Status::OK();
+}
+
+class SampleBasics : public AggregationContextFixture {
+public:
+ SampleBasics() : _mock(DocumentSourceMock::create()) {}
+
+protected:
+ virtual void createSample(long long size) {
+ BSONObj spec = BSON("$sample" << BSON("size" << size));
+ BSONElement specElement = spec.firstElement();
+ _sample = DocumentSourceSample::createFromBson(specElement, getExpCtx());
+ sample()->setSource(_mock.get());
+ checkBsonRepresentation(spec);
+ }
+
+ DocumentSource* sample() {
+ return _sample.get();
+ }
+
+ DocumentSourceMock* source() {
+ return _mock.get();
+ }
+
+ /**
+ * Makes some general assertions about the results of a $sample stage.
+ *
+ * Creates a $sample stage with the given size, advances it 'nExpectedResults' times, asserting
+ * the results come back in sorted order according to their assigned random values, then asserts
+ * the stage is exhausted.
+ */
+ void checkResults(long long size, long long nExpectedResults) {
+ createSample(size);
+
+ boost::optional<Document> prevDoc;
+ for (long long i = 0; i < nExpectedResults; i++) {
+ auto nextResult = sample()->getNext();
+ ASSERT_TRUE(nextResult.isAdvanced());
+ auto thisDoc = nextResult.releaseDocument();
+ ASSERT_TRUE(thisDoc.hasRandMetaField());
+ if (prevDoc) {
+ ASSERT_LTE(thisDoc.getRandMetaField(), prevDoc->getRandMetaField());
+ }
+ prevDoc = std::move(thisDoc);
+ }
+ assertEOF();
+ }
+
+ /**
+ * Helper to load 'nDocs' documents into the source stage.
+ */
+ void loadDocuments(int nDocs) {
+ for (int i = 0; i < nDocs; i++) {
+ _mock->queue.push_back(DOC("_id" << i));
+ }
+ }
+
+ /**
+ * Assert that iterator state accessors consistently report the source is exhausted.
+ */
+ void assertEOF() const {
+ ASSERT(_sample->getNext().isEOF());
+ ASSERT(_sample->getNext().isEOF());
+ ASSERT(_sample->getNext().isEOF());
+ }
+
+protected:
+ intrusive_ptr<DocumentSource> _sample;
+ intrusive_ptr<DocumentSourceMock> _mock;
+
+private:
+ /**
+ * Check that the BSON representation generated by the souce matches the BSON it was
+ * created with.
+ */
+ void checkBsonRepresentation(const BSONObj& spec) {
+ Value serialized = static_cast<DocumentSourceSample*>(sample())->serialize(false);
+ auto generatedSpec = serialized.getDocument().toBson();
+ ASSERT_BSONOBJ_EQ(spec, generatedSpec);
+ }
+};
+
+/**
+ * A sample of size 0 should return 0 results.
+ */
+TEST_F(SampleBasics, ZeroSize) {
+ loadDocuments(2);
+ checkResults(0, 0);
+}
+
+/**
+ * If the source stage is exhausted, the $sample stage should also be exhausted.
+ */
+TEST_F(SampleBasics, SourceEOFBeforeSample) {
+ loadDocuments(5);
+ checkResults(10, 5);
+}
+
+/**
+ * A $sample stage should limit the number of results to the given size.
+ */
+TEST_F(SampleBasics, SampleEOFBeforeSource) {
+ loadDocuments(10);
+ checkResults(5, 5);
+}
+
+/**
+ * The incoming documents should not be modified by a $sample stage (except their metadata).
+ */
+TEST_F(SampleBasics, DocsUnmodified) {
+ createSample(1);
+ source()->queue.push_back(DOC("a" << 1 << "b" << DOC("c" << 2)));
+ auto next = sample()->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ auto doc = next.releaseDocument();
+ ASSERT_EQUALS(1, doc["a"].getInt());
+ ASSERT_EQUALS(2, doc["b"]["c"].getInt());
+ ASSERT_TRUE(doc.hasRandMetaField());
+ assertEOF();
+}
+
+/**
+ * Fixture to test error cases of the $sample stage.
+ */
+class InvalidSampleSpec : public AggregationContextFixture {
+public:
+ intrusive_ptr<DocumentSource> createSample(BSONObj sampleSpec) {
+ auto specElem = sampleSpec.firstElement();
+ return DocumentSourceSample::createFromBson(specElem, getExpCtx());
+ }
+
+ BSONObj createSpec(BSONObj spec) {
+ return BSON("$sample" << spec);
+ }
+};
+
+TEST_F(InvalidSampleSpec, NonObject) {
+ ASSERT_THROWS_CODE(createSample(BSON("$sample" << 1)), UserException, 28745);
+ ASSERT_THROWS_CODE(createSample(BSON("$sample"
+ << "string")),
+ UserException,
+ 28745);
+}
+
+TEST_F(InvalidSampleSpec, NonNumericSize) {
+ ASSERT_THROWS_CODE(createSample(createSpec(BSON("size"
+ << "string"))),
+ UserException,
+ 28746);
+}
+
+TEST_F(InvalidSampleSpec, NegativeSize) {
+ ASSERT_THROWS_CODE(createSample(createSpec(BSON("size" << -1))), UserException, 28747);
+ ASSERT_THROWS_CODE(createSample(createSpec(BSON("size" << -1.0))), UserException, 28747);
+}
+
+TEST_F(InvalidSampleSpec, ExtraOption) {
+ ASSERT_THROWS_CODE(
+ createSample(createSpec(BSON("size" << 1 << "extra" << 2))), UserException, 28748);
+}
+
+TEST_F(InvalidSampleSpec, MissingSize) {
+ ASSERT_THROWS_CODE(createSample(createSpec(BSONObj())), UserException, 28749);
+}
+
+//
+// Test the implementation that gets results from a random cursor.
+//
+
+class SampleFromRandomCursorBasics : public SampleBasics {
+public:
+ void createSample(long long size) override {
+ _sample = DocumentSourceSampleFromRandomCursor::create(getExpCtx(), size, "_id", 100);
+ sample()->setSource(_mock.get());
+ }
+};
+
+/**
+ * A sample of size zero should not return any results.
+ */
+TEST_F(SampleFromRandomCursorBasics, ZeroSize) {
+ loadDocuments(2);
+ checkResults(0, 0);
+}
+
+/**
+ * When sampling with a size smaller than the number of documents our source stage can produce,
+ * there should be no more than the sample size output.
+ */
+TEST_F(SampleFromRandomCursorBasics, SourceEOFBeforeSample) {
+ loadDocuments(5);
+ checkResults(10, 5);
+}
+
+/**
+ * When the source stage runs out of documents, the $sampleFromRandomCursors stage should be
+ * exhausted.
+ */
+TEST_F(SampleFromRandomCursorBasics, SampleEOFBeforeSource) {
+ loadDocuments(10);
+ checkResults(5, 5);
+}
+
+/**
+ * The $sampleFromRandomCursor stage should not modify the contents of the documents.
+ */
+TEST_F(SampleFromRandomCursorBasics, DocsUnmodified) {
+ createSample(1);
+ source()->queue.push_back(DOC("_id" << 1 << "b" << DOC("c" << 2)));
+ auto next = sample()->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ auto doc = next.releaseDocument();
+ ASSERT_EQUALS(1, doc["_id"].getInt());
+ ASSERT_EQUALS(2, doc["b"]["c"].getInt());
+ ASSERT_TRUE(doc.hasRandMetaField());
+ assertEOF();
+}
+
+/**
+ * The $sampleFromRandomCursor stage should ignore duplicate documents.
+ */
+TEST_F(SampleFromRandomCursorBasics, IgnoreDuplicates) {
+ createSample(2);
+ source()->queue.push_back(DOC("_id" << 1));
+ source()->queue.push_back(DOC("_id" << 1)); // Duplicate, should ignore.
+ source()->queue.push_back(DOC("_id" << 2));
+
+ auto next = sample()->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ auto doc = next.releaseDocument();
+ ASSERT_EQUALS(1, doc["_id"].getInt());
+ ASSERT_TRUE(doc.hasRandMetaField());
+ double doc1Meta = doc.getRandMetaField();
+
+ // Should ignore the duplicate {_id: 1}, and return {_id: 2}.
+ next = sample()->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ doc = next.releaseDocument();
+ ASSERT_EQUALS(2, doc["_id"].getInt());
+ ASSERT_TRUE(doc.hasRandMetaField());
+ double doc2Meta = doc.getRandMetaField();
+ ASSERT_GTE(doc1Meta, doc2Meta);
+
+ // Both stages should be exhausted.
+ ASSERT_TRUE(source()->getNext().isEOF());
+ assertEOF();
+}
+
+/**
+ * The $sampleFromRandomCursor stage should error if it receives too many duplicate documents.
+ */
+TEST_F(SampleFromRandomCursorBasics, TooManyDups) {
+ createSample(2);
+ for (int i = 0; i < 1000; i++) {
+ source()->queue.push_back(DOC("_id" << 1));
+ }
+
+ // First should be successful, it's not a duplicate.
+ ASSERT_TRUE(sample()->getNext().isAdvanced());
+
+ // The rest are duplicates, should error.
+ ASSERT_THROWS_CODE(sample()->getNext(), UserException, 28799);
+}
+
+/**
+ * The $sampleFromRandomCursor stage should error if it receives a document without an _id.
+ */
+TEST_F(SampleFromRandomCursorBasics, MissingIdField) {
+ // Once with only a bad document.
+ createSample(2); // _idField is '_id'.
+ source()->queue.push_back(DOC("non_id" << 2));
+ ASSERT_THROWS_CODE(sample()->getNext(), UserException, 28793);
+
+ // Again, with some regular documents before a bad one.
+ createSample(2); // _idField is '_id'.
+ source()->queue.push_back(DOC("_id" << 1));
+ source()->queue.push_back(DOC("_id" << 1));
+ source()->queue.push_back(DOC("non_id" << 2));
+
+ // First should be successful.
+ ASSERT_TRUE(sample()->getNext().isAdvanced());
+
+ ASSERT_THROWS_CODE(sample()->getNext(), UserException, 28793);
+}
+
+/**
+ * The $sampleFromRandomCursor stage should set the random meta value in a way that mimics the
+ * non-optimized case.
+ */
+TEST_F(SampleFromRandomCursorBasics, MimicNonOptimized) {
+ // Compute the average random meta value on the each doc returned.
+ double firstTotal = 0.0;
+ double secondTotal = 0.0;
+ int nTrials = 10000;
+ for (int i = 0; i < nTrials; i++) {
+ // Sample 2 out of 3 documents.
+ _sample = DocumentSourceSampleFromRandomCursor::create(getExpCtx(), 2, "_id", 3);
+ sample()->setSource(_mock.get());
+
+ source()->queue.push_back(DOC("_id" << 1));
+ source()->queue.push_back(DOC("_id" << 2));
+
+ auto doc = sample()->getNext();
+ ASSERT_TRUE(doc.isAdvanced());
+ ASSERT_TRUE(doc.getDocument().hasRandMetaField());
+ firstTotal += doc.getDocument().getRandMetaField();
+
+ doc = sample()->getNext();
+ ASSERT_TRUE(doc.isAdvanced());
+ ASSERT_TRUE(doc.getDocument().hasRandMetaField());
+ secondTotal += doc.getDocument().getRandMetaField();
+ }
+ // The average random meta value of the first document should be about 0.75. We assume that
+ // 10000 trials is sufficient for us to apply the Central Limit Theorem. Using an error
+ // tolerance of 0.02 gives us a spurious failure rate approximately equal to 10^-24.
+ ASSERT_GTE(firstTotal / nTrials, 0.73);
+ ASSERT_LTE(firstTotal / nTrials, 0.77);
+
+ // The average random meta value of the second document should be about 0.5.
+ ASSERT_GTE(secondTotal / nTrials, 0.48);
+ ASSERT_LTE(secondTotal / nTrials, 0.52);
+}
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_sort_by_count_test.cpp b/src/mongo/db/pipeline/document_source_sort_by_count_test.cpp
new file mode 100644
index 00000000000..bed658e616a
--- /dev/null
+++ b/src/mongo/db/pipeline/document_source_sort_by_count_test.cpp
@@ -0,0 +1,138 @@
+/**
+ * Copyright (C) 2016 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include <boost/intrusive_ptr.hpp>
+#include <vector>
+
+#include "mongo/bson/bsonmisc.h"
+#include "mongo/bson/bsonobj.h"
+#include "mongo/bson/bsonobjbuilder.h"
+#include "mongo/db/pipeline/aggregation_context_fixture.h"
+#include "mongo/db/pipeline/document.h"
+#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/pipeline/document_value_test_util.h"
+#include "mongo/db/pipeline/value.h"
+#include "mongo/unittest/unittest.h"
+
+namespace mongo {
+namespace {
+using std::vector;
+using boost::intrusive_ptr;
+
+/**
+ * Fixture to test that $sortByCount returns a DocumentSourceGroup and DocumentSourceSort.
+ */
+class SortByCountReturnsGroupAndSort : public AggregationContextFixture {
+public:
+ void testCreateFromBsonResult(BSONObj sortByCountSpec, Value expectedGroupExplain) {
+ vector<intrusive_ptr<DocumentSource>> result =
+ DocumentSourceSortByCount::createFromBson(sortByCountSpec.firstElement(), getExpCtx());
+
+ ASSERT_EQUALS(result.size(), 2UL);
+
+ const auto* groupStage = dynamic_cast<DocumentSourceGroup*>(result[0].get());
+ ASSERT(groupStage);
+
+ const auto* sortStage = dynamic_cast<DocumentSourceSort*>(result[1].get());
+ ASSERT(sortStage);
+
+ // Serialize the DocumentSourceGroup and DocumentSourceSort from $sortByCount so that we can
+ // check the explain output to make sure $group and $sort have the correct fields.
+ const bool explain = true;
+ vector<Value> explainedStages;
+ groupStage->serializeToArray(explainedStages, explain);
+ sortStage->serializeToArray(explainedStages, explain);
+ ASSERT_EQUALS(explainedStages.size(), 2UL);
+
+ auto groupExplain = explainedStages[0];
+ ASSERT_VALUE_EQ(groupExplain["$group"], expectedGroupExplain);
+
+ auto sortExplain = explainedStages[1];
+ auto expectedSortExplain = Value{Document{{"sortKey", Document{{"count", -1}}}}};
+ ASSERT_VALUE_EQ(sortExplain["$sort"], expectedSortExplain);
+ }
+};
+
+TEST_F(SortByCountReturnsGroupAndSort, ExpressionFieldPathSpec) {
+ BSONObj spec = BSON("$sortByCount"
+ << "$x");
+ Value expectedGroupExplain =
+ Value{Document{{"_id", "$x"}, {"count", Document{{"$sum", Document{{"$const", 1}}}}}}};
+ testCreateFromBsonResult(spec, expectedGroupExplain);
+}
+
+TEST_F(SortByCountReturnsGroupAndSort, ExpressionInObjectSpec) {
+ BSONObj spec = BSON("$sortByCount" << BSON("$floor"
+ << "$x"));
+ Value expectedGroupExplain =
+ Value{Document{{"_id", Document{{"$floor", Value{BSON_ARRAY("$x")}}}},
+ {"count", Document{{"$sum", Document{{"$const", 1}}}}}}};
+ testCreateFromBsonResult(spec, expectedGroupExplain);
+
+ spec = BSON("$sortByCount" << BSON("$eq" << BSON_ARRAY("$x" << 15)));
+ expectedGroupExplain =
+ Value{Document{{"_id", Document{{"$eq", Value{BSON_ARRAY("$x" << BSON("$const" << 15))}}}},
+ {"count", Document{{"$sum", Document{{"$const", 1}}}}}}};
+ testCreateFromBsonResult(spec, expectedGroupExplain);
+}
+
+/**
+ * Fixture to test error cases of the $sortByCount stage.
+ */
+class InvalidSortByCountSpec : public AggregationContextFixture {
+public:
+ vector<intrusive_ptr<DocumentSource>> createSortByCount(BSONObj sortByCountSpec) {
+ auto specElem = sortByCountSpec.firstElement();
+ return DocumentSourceSortByCount::createFromBson(specElem, getExpCtx());
+ }
+};
+
+TEST_F(InvalidSortByCountSpec, NonObjectNonStringSpec) {
+ BSONObj spec = BSON("$sortByCount" << 1);
+ ASSERT_THROWS_CODE(createSortByCount(spec), UserException, 40149);
+
+ spec = BSON("$sortByCount" << BSONNULL);
+ ASSERT_THROWS_CODE(createSortByCount(spec), UserException, 40149);
+}
+
+TEST_F(InvalidSortByCountSpec, NonExpressionInObjectSpec) {
+ BSONObj spec = BSON("$sortByCount" << BSON("field1"
+ << "$x"));
+ ASSERT_THROWS_CODE(createSortByCount(spec), UserException, 40147);
+}
+
+TEST_F(InvalidSortByCountSpec, NonFieldPathStringSpec) {
+ BSONObj spec = BSON("$sortByCount"
+ << "test");
+ ASSERT_THROWS_CODE(createSortByCount(spec), UserException, 40148);
+}
+
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_sort_test.cpp b/src/mongo/db/pipeline/document_source_sort_test.cpp
new file mode 100644
index 00000000000..ae409910fd0
--- /dev/null
+++ b/src/mongo/db/pipeline/document_source_sort_test.cpp
@@ -0,0 +1,352 @@
+/**
+ * Copyright (C) 2016 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include <boost/intrusive_ptr.hpp>
+#include <deque>
+#include <string>
+#include <vector>
+
+#include "mongo/bson/bsonelement.h"
+#include "mongo/bson/bsonmisc.h"
+#include "mongo/bson/bsonobj.h"
+#include "mongo/bson/bsonobjbuilder.h"
+#include "mongo/bson/json.h"
+#include "mongo/db/pipeline/aggregation_context_fixture.h"
+#include "mongo/db/pipeline/dependencies.h"
+#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/pipeline/document_value_test_util.h"
+#include "mongo/db/pipeline/pipeline.h"
+#include "mongo/unittest/unittest.h"
+
+namespace mongo {
+
+// Crutch.
+bool isMongos() {
+ return false;
+}
+
+namespace {
+
+using boost::intrusive_ptr;
+using std::string;
+using std::vector;
+
+static const BSONObj metaTextScore = BSON("$meta"
+ << "textScore");
+
+class DocumentSourceSortTest : public AggregationContextFixture {
+protected:
+ void createSort(const BSONObj& sortKey = BSON("a" << 1)) {
+ BSONObj spec = BSON("$sort" << sortKey);
+ BSONElement specElement = spec.firstElement();
+ _sort = DocumentSourceSort::createFromBson(specElement, getExpCtx());
+ checkBsonRepresentation(spec);
+ }
+ DocumentSourceSort* sort() {
+ return dynamic_cast<DocumentSourceSort*>(_sort.get());
+ }
+ /** Assert that iterator state accessors consistently report the source is exhausted. */
+ void assertEOF() const {
+ ASSERT(_sort->getNext().isEOF());
+ ASSERT(_sort->getNext().isEOF());
+ ASSERT(_sort->getNext().isEOF());
+ }
+
+private:
+ /**
+ * Check that the BSON representation generated by the souce matches the BSON it was
+ * created with.
+ */
+ void checkBsonRepresentation(const BSONObj& spec) {
+ vector<Value> arr;
+ _sort->serializeToArray(arr);
+ BSONObj generatedSpec = arr[0].getDocument().toBson();
+ ASSERT_BSONOBJ_EQ(spec, generatedSpec);
+ }
+ intrusive_ptr<DocumentSource> _sort;
+};
+
+
+TEST_F(DocumentSourceSortTest, RejectsNonObjectSpec) {
+ BSONObj spec = BSON("$sort" << 1);
+ BSONElement specElement = spec.firstElement();
+ ASSERT_THROWS(DocumentSourceSort::createFromBson(specElement, getExpCtx()), UserException);
+}
+
+TEST_F(DocumentSourceSortTest, RejectsEmptyObjectSpec) {
+ BSONObj spec = BSON("$sort" << BSONObj());
+ BSONElement specElement = spec.firstElement();
+ ASSERT_THROWS(DocumentSourceSort::createFromBson(specElement, getExpCtx()), UserException);
+}
+
+TEST_F(DocumentSourceSortTest, RejectsSpecWithNonNumericValues) {
+ BSONObj spec = BSON("$sort" << BSON("a"
+ << "b"));
+ BSONElement specElement = spec.firstElement();
+ ASSERT_THROWS(DocumentSourceSort::createFromBson(specElement, getExpCtx()), UserException);
+}
+
+TEST_F(DocumentSourceSortTest, RejectsSpecWithZeroAsValue) {
+ BSONObj spec = BSON("$sort" << BSON("a" << 0));
+ BSONElement specElement = spec.firstElement();
+ ASSERT_THROWS(DocumentSourceSort::createFromBson(specElement, getExpCtx()), UserException);
+}
+
+TEST_F(DocumentSourceSortTest, SortWithLimit) {
+ auto expCtx = getExpCtx();
+ createSort(BSON("a" << 1));
+
+ ASSERT_EQUALS(sort()->getLimit(), -1);
+ Pipeline::SourceContainer container;
+ container.push_back(sort());
+
+ { // pre-limit checks
+ vector<Value> arr;
+ sort()->serializeToArray(arr);
+ ASSERT_BSONOBJ_EQ(arr[0].getDocument().toBson(), BSON("$sort" << BSON("a" << 1)));
+
+ ASSERT(sort()->getShardSource() != nullptr);
+ ASSERT(sort()->getMergeSource() != nullptr);
+ }
+
+ container.push_back(DocumentSourceLimit::create(expCtx, 10));
+ sort()->optimizeAt(container.begin(), &container);
+ ASSERT_EQUALS(container.size(), 1U);
+ ASSERT_EQUALS(sort()->getLimit(), 10);
+
+ // unchanged
+ container.push_back(DocumentSourceLimit::create(expCtx, 15));
+ sort()->optimizeAt(container.begin(), &container);
+ ASSERT_EQUALS(container.size(), 1U);
+ ASSERT_EQUALS(sort()->getLimit(), 10);
+
+ // reduced
+ container.push_back(DocumentSourceLimit::create(expCtx, 5));
+ sort()->optimizeAt(container.begin(), &container);
+ ASSERT_EQUALS(container.size(), 1U);
+ ASSERT_EQUALS(sort()->getLimit(), 5);
+
+ vector<Value> arr;
+ sort()->serializeToArray(arr);
+ ASSERT_VALUE_EQ(
+ Value(arr),
+ DOC_ARRAY(DOC("$sort" << DOC("a" << 1)) << DOC("$limit" << sort()->getLimit())));
+
+ ASSERT(sort()->getShardSource() != nullptr);
+ ASSERT(sort()->getMergeSource() != nullptr);
+}
+
+TEST_F(DocumentSourceSortTest, Dependencies) {
+ createSort(BSON("a" << 1 << "b.c" << -1));
+ DepsTracker dependencies;
+ ASSERT_EQUALS(DocumentSource::SEE_NEXT, sort()->getDependencies(&dependencies));
+ ASSERT_EQUALS(2U, dependencies.fields.size());
+ ASSERT_EQUALS(1U, dependencies.fields.count("a"));
+ ASSERT_EQUALS(1U, dependencies.fields.count("b.c"));
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(false, dependencies.getNeedTextScore());
+}
+
+TEST_F(DocumentSourceSortTest, OutputSort) {
+ createSort(BSON("a" << 1 << "b.c" << -1));
+ BSONObjSet outputSort = sort()->getOutputSorts();
+ ASSERT_EQUALS(outputSort.count(BSON("a" << 1)), 1U);
+ ASSERT_EQUALS(outputSort.count(BSON("a" << 1 << "b.c" << -1)), 1U);
+ ASSERT_EQUALS(outputSort.size(), 2U);
+}
+
+class DocumentSourceSortExecutionTest : public DocumentSourceSortTest {
+public:
+ void checkResults(std::deque<Document> inputDocs,
+ BSONObj sortSpec,
+ string expectedResultSetString) {
+ createSort(sortSpec);
+ auto source = DocumentSourceMock::create(inputDocs);
+ sort()->setSource(source.get());
+
+ // Load the results from the DocumentSourceUnwind.
+ vector<Document> resultSet;
+ for (auto output = sort()->getNext(); output.isAdvanced(); output = sort()->getNext()) {
+ // Get the current result.
+ resultSet.push_back(output.releaseDocument());
+ }
+ // Verify the DocumentSourceUnwind is exhausted.
+ assertEOF();
+
+ // Convert results to BSON once they all have been retrieved (to detect any errors
+ // resulting from incorrectly shared sub objects).
+ BSONArrayBuilder bsonResultSet;
+ for (auto&& result : resultSet) {
+ bsonResultSet << result;
+ }
+ // Check the result set.
+ ASSERT_BSONOBJ_EQ(expectedResultSet(expectedResultSetString), bsonResultSet.arr());
+ }
+
+protected:
+ virtual BSONObj expectedResultSet(string expectedResultSetString) {
+ BSONObj wrappedResult =
+ // fromjson cannot parse an array, so place the array within an object.
+ fromjson(string("{'':") + expectedResultSetString + "}");
+ return wrappedResult[""].embeddedObject().getOwned();
+ }
+};
+
+TEST_F(DocumentSourceSortExecutionTest, ShouldGiveNoOutputIfGivenNoInputs) {
+ checkResults({}, BSON("a" << 1), "[]");
+}
+
+TEST_F(DocumentSourceSortExecutionTest, ShouldGiveOneOutputIfGivenOneInput) {
+ checkResults({Document{{"_id", 0}, {"a", 1}}}, BSON("a" << 1), "[{_id:0,a:1}]");
+}
+
+TEST_F(DocumentSourceSortExecutionTest, ShouldSortTwoInputsAccordingToOneFieldAscending) {
+ checkResults({Document{{"_id", 0}, {"a", 2}}, Document{{"_id", 1}, {"a", 1}}},
+ BSON("a" << 1),
+ "[{_id:1,a:1},{_id:0,a:2}]");
+}
+
+/** Sort spec with a descending field. */
+TEST_F(DocumentSourceSortExecutionTest, DescendingOrder) {
+ checkResults({Document{{"_id", 0}, {"a", 2}}, Document{{"_id", 1}, {"a", 1}}},
+ BSON("a" << -1),
+ "[{_id:0,a:2},{_id:1,a:1}]");
+}
+
+/** Sort spec with a dotted field. */
+TEST_F(DocumentSourceSortExecutionTest, DottedSortField) {
+ checkResults({Document{{"_id", 0}, {"a", Document{{"b", 2}}}},
+ Document{{"_id", 1}, {"a", Document{{"b", 1}}}}},
+ BSON("a.b" << 1),
+ "[{_id:1,a:{b:1}},{_id:0,a:{b:2}}]");
+}
+
+/** Sort spec with a compound key. */
+TEST_F(DocumentSourceSortExecutionTest, CompoundSortSpec) {
+ checkResults({Document{{"_id", 0}, {"a", 1}, {"b", 3}},
+ Document{{"_id", 1}, {"a", 1}, {"b", 2}},
+ Document{{"_id", 2}, {"a", 0}, {"b", 4}}},
+ BSON("a" << 1 << "b" << 1),
+ "[{_id:2,a:0,b:4},{_id:1,a:1,b:2},{_id:0,a:1,b:3}]");
+}
+
+/** Sort spec with a compound key and descending order. */
+TEST_F(DocumentSourceSortExecutionTest, CompoundSortSpecAlternateOrder) {
+ checkResults({Document{{"_id", 0}, {"a", 1}, {"b", 3}},
+ Document{{"_id", 1}, {"a", 1}, {"b", 2}},
+ Document{{"_id", 2}, {"a", 0}, {"b", 4}}},
+ BSON("a" << -1 << "b" << 1),
+ "[{_id:1,a:1,b:2},{_id:0,a:1,b:3},{_id:2,a:0,b:4}]");
+}
+
+/** Sort spec with a compound key and descending order. */
+TEST_F(DocumentSourceSortExecutionTest, CompoundSortSpecAlternateOrderSecondField) {
+ checkResults({Document{{"_id", 0}, {"a", 1}, {"b", 3}},
+ Document{{"_id", 1}, {"a", 1}, {"b", 2}},
+ Document{{"_id", 2}, {"a", 0}, {"b", 4}}},
+ BSON("a" << 1 << "b" << -1),
+ "[{_id:2,a:0,b:4},{_id:0,a:1,b:3},{_id:1,a:1,b:2}]");
+}
+
+/** Sorting different types is not supported. */
+TEST_F(DocumentSourceSortExecutionTest, InconsistentTypeSort) {
+ checkResults({Document{{"_id", 0}, {"a", 1}}, Document{{"_id", 1}, {"a", "foo"}}},
+ BSON("a" << 1),
+ "[{_id:0,a:1},{_id:1,a:\"foo\"}]");
+}
+
+/** Sorting different numeric types is supported. */
+TEST_F(DocumentSourceSortExecutionTest, MixedNumericSort) {
+ checkResults({Document{{"_id", 0}, {"a", 2.3}}, Document{{"_id", 1}, {"a", 1}}},
+ BSON("a" << 1),
+ "[{_id:1,a:1},{_id:0,a:2.3}]");
+}
+
+/** Ordering of a missing value. */
+TEST_F(DocumentSourceSortExecutionTest, MissingValue) {
+ checkResults({Document{{"_id", 0}, {"a", 1}}, Document{{"_id", 1}}},
+ BSON("a" << 1),
+ "[{_id:1},{_id:0,a:1}]");
+}
+
+/** Ordering of a null value. */
+TEST_F(DocumentSourceSortExecutionTest, NullValue) {
+ checkResults({Document{{"_id", 0}, {"a", 1}}, Document{{"_id", 1}, {"a", BSONNULL}}},
+ BSON("a" << 1),
+ "[{_id:1,a:null},{_id:0,a:1}]");
+}
+
+/**
+ * Order by text score.
+ */
+TEST_F(DocumentSourceSortExecutionTest, TextScore) {
+ MutableDocument first(Document{{"_id", 0}});
+ first.setTextScore(10);
+ MutableDocument second(Document{{"_id", 1}});
+ second.setTextScore(20);
+
+ checkResults({first.freeze(), second.freeze()},
+ BSON("$computed0" << metaTextScore),
+ "[{_id:1},{_id:0}]");
+}
+
+/**
+ * Order by random value in metadata.
+ */
+TEST_F(DocumentSourceSortExecutionTest, RandMeta) {
+ MutableDocument first(Document{{"_id", 0}});
+ first.setRandMetaField(0.01);
+ MutableDocument second(Document{{"_id", 1}});
+ second.setRandMetaField(0.02);
+
+ checkResults({first.freeze(), second.freeze()},
+ BSON("$computed0" << BSON("$meta"
+ << "randVal")),
+ "[{_id:1},{_id:0}]");
+}
+
+/** A missing nested object within an array returns an empty array. */
+TEST_F(DocumentSourceSortExecutionTest, MissingObjectWithinArray) {
+ checkResults({Document{{"_id", 0}, {"a", DOC_ARRAY(1)}},
+ Document{{"_id", 1}, {"a", DOC_ARRAY(DOC("b" << 1))}}},
+ BSON("a.b" << 1),
+ "[{_id:0,a:[1]},{_id:1,a:[{b:1}]}]");
+}
+
+/** Compare nested values from within an array. */
+TEST_F(DocumentSourceSortExecutionTest, ExtractArrayValues) {
+ checkResults({Document{{"_id", 0}, {"a", DOC_ARRAY(DOC("b" << 1) << DOC("b" << 2))}},
+ Document{{"_id", 1}, {"a", DOC_ARRAY(DOC("b" << 1) << DOC("b" << 1))}}},
+ BSON("a.b" << 1),
+ "[{_id:1,a:[{b:1},{b:1}]},{_id:0,a:[{b:1},{b:2}]}]");
+}
+
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_test.cpp b/src/mongo/db/pipeline/document_source_test.cpp
index 9eac259af91..934f9bf024a 100644
--- a/src/mongo/db/pipeline/document_source_test.cpp
+++ b/src/mongo/db/pipeline/document_source_test.cpp
@@ -28,67 +28,15 @@
#include "mongo/platform/basic.h"
-#include "mongo/base/init.h"
-#include "mongo/db/matcher/extensions_callback_noop.h"
-#include "mongo/db/operation_context_noop.h"
-#include "mongo/db/pipeline/dependencies.h"
+#include "mongo/bson/bsonmisc.h"
+#include "mongo/bson/bsonobj.h"
+#include "mongo/bson/bsonobjbuilder.h"
#include "mongo/db/pipeline/document_source.h"
-#include "mongo/db/pipeline/document_value_test_util.h"
-#include "mongo/db/pipeline/expression_context.h"
-#include "mongo/db/pipeline/pipeline.h"
-#include "mongo/db/pipeline/value_comparator.h"
-#include "mongo/db/service_context.h"
-#include "mongo/db/service_context_noop.h"
-#include "mongo/db/storage/storage_options.h"
-#include "mongo/dbtests/dbtests.h"
-#include "mongo/stdx/memory.h"
-#include "mongo/unittest/temp_dir.h"
#include "mongo/unittest/unittest.h"
-#include "mongo/util/clock_source_mock.h"
-#include "mongo/util/tick_source_mock.h"
namespace mongo {
-bool isMongos() {
- return false;
-}
-
-std::unique_ptr<ServiceContextNoop> makeTestServiceContext() {
- auto service = stdx::make_unique<ServiceContextNoop>();
- service->setFastClockSource(stdx::make_unique<ClockSourceMock>());
- service->setTickSource(stdx::make_unique<TickSourceMock>());
- return service;
-}
-}
-
-// Stub to avoid including the server environment library.
-MONGO_INITIALIZER(SetGlobalEnvironment)(InitializerContext* context) {
- setGlobalServiceContext(makeTestServiceContext());
- return Status::OK();
-}
-
-namespace DocumentSourceTests {
-
-using boost::intrusive_ptr;
-using std::shared_ptr;
-using std::map;
-using std::set;
-using std::string;
-using std::vector;
-static const char* const ns = "unittests.documentsourcetests";
-static const BSONObj metaTextScore = BSON("$meta"
- << "textScore");
-
-BSONObj toBson(const intrusive_ptr<DocumentSource>& source) {
- vector<Value> arr;
- source->serializeToArray(arr);
- ASSERT_EQUALS(arr.size(), 1UL);
- return arr[0].getDocument().toBson();
-}
-
-
-namespace DocumentSourceClass {
-using mongo::DocumentSource;
+namespace {
TEST(TruncateSort, SortTruncatesNormalField) {
SimpleBSONObjComparator bsonComparator{};
@@ -99,7 +47,7 @@ TEST(TruncateSort, SortTruncatesNormalField) {
ASSERT_EQUALS(truncated.count(BSON("a" << 1)), 1U);
}
-TEST(TruncateSort, SortTruncatesOnSubfield) {
+TEST(DocumentSourceTruncateSort, SortTruncatesOnSubfield) {
SimpleBSONObjComparator bsonComparator{};
BSONObj sortKey = BSON("a" << 1 << "b.c" << 1 << "d" << 1);
auto truncated =
@@ -108,7 +56,7 @@ TEST(TruncateSort, SortTruncatesOnSubfield) {
ASSERT_EQUALS(truncated.count(BSON("a" << 1)), 1U);
}
-TEST(TruncateSort, SortDoesNotTruncateOnParent) {
+TEST(DocumentSourceTruncateSort, SortDoesNotTruncateOnParent) {
SimpleBSONObjComparator bsonComparator{};
BSONObj sortKey = BSON("a" << 1 << "b" << 1 << "d" << 1);
auto truncated =
@@ -117,7 +65,7 @@ TEST(TruncateSort, SortDoesNotTruncateOnParent) {
ASSERT_EQUALS(truncated.count(BSON("a" << 1 << "b" << 1 << "d" << 1)), 1U);
}
-TEST(TruncateSort, TruncateSortDedupsSortCorrectly) {
+TEST(DocumentSourceTruncateSort, TruncateSortDedupsSortCorrectly) {
SimpleBSONObjComparator bsonComparator{};
BSONObj sortKeyOne = BSON("a" << 1 << "b" << 1);
BSONObj sortKeyTwo = BSON("a" << 1);
@@ -127,4888 +75,5 @@ TEST(TruncateSort, TruncateSortDedupsSortCorrectly) {
ASSERT_EQUALS(truncated.count(BSON("a" << 1)), 1U);
}
-template <size_t ArrayLen>
-set<string> arrayToSet(const char* (&array)[ArrayLen]) {
- set<string> out;
- for (size_t i = 0; i < ArrayLen; i++)
- out.insert(array[i]);
- return out;
-}
-
-class Deps {
-public:
- void run() {
- {
- const char* array[] = {"a", "b"}; // basic
- DepsTracker deps;
- deps.fields = arrayToSet(array);
- ASSERT_BSONOBJ_EQ(deps.toProjection(), BSON("a" << 1 << "b" << 1 << "_id" << 0));
- }
- {
- const char* array[] = {"a", "ab"}; // prefixed but not subfield
- DepsTracker deps;
- deps.fields = arrayToSet(array);
- ASSERT_BSONOBJ_EQ(deps.toProjection(), BSON("a" << 1 << "ab" << 1 << "_id" << 0));
- }
- {
- const char* array[] = {"a", "b", "a.b"}; // a.b included by a
- DepsTracker deps;
- deps.fields = arrayToSet(array);
- ASSERT_BSONOBJ_EQ(deps.toProjection(), BSON("a" << 1 << "b" << 1 << "_id" << 0));
- }
- {
- const char* array[] = {"a", "_id"}; // _id now included
- DepsTracker deps;
- deps.fields = arrayToSet(array);
- ASSERT_BSONOBJ_EQ(deps.toProjection(), BSON("a" << 1 << "_id" << 1));
- }
- {
- const char* array[] = {"a", "_id.a"}; // still include whole _id (SERVER-7502)
- DepsTracker deps;
- deps.fields = arrayToSet(array);
- ASSERT_BSONOBJ_EQ(deps.toProjection(), BSON("a" << 1 << "_id" << 1));
- }
- {
- const char* array[] = {"a", "_id", "_id.a"}; // handle both _id and subfield
- DepsTracker deps;
- deps.fields = arrayToSet(array);
- ASSERT_BSONOBJ_EQ(deps.toProjection(), BSON("a" << 1 << "_id" << 1));
- }
- {
- const char* array[] = {"a", "_id", "_id_a"}; // _id prefixed but non-subfield
- DepsTracker deps;
- deps.fields = arrayToSet(array);
- ASSERT_BSONOBJ_EQ(deps.toProjection(), BSON("_id_a" << 1 << "a" << 1 << "_id" << 1));
- }
- {
- const char* array[] = {"a"}; // fields ignored with needWholeDocument
- DepsTracker deps;
- deps.fields = arrayToSet(array);
- deps.needWholeDocument = true;
- ASSERT_BSONOBJ_EQ(deps.toProjection(), BSONObj());
- }
- {
- const char* array[] = {"a"}; // needTextScore with needWholeDocument
- DepsTracker deps(DepsTracker::MetadataAvailable::kTextScore);
- deps.fields = arrayToSet(array);
- deps.needWholeDocument = true;
- deps.setNeedTextScore(true);
- ASSERT_BSONOBJ_EQ(deps.toProjection(),
- BSON(Document::metaFieldTextScore << metaTextScore));
- }
- {
- const char* array[] = {"a"}; // needTextScore without needWholeDocument
- DepsTracker deps(DepsTracker::MetadataAvailable::kTextScore);
- deps.fields = arrayToSet(array);
- deps.setNeedTextScore(true);
- ASSERT_BSONOBJ_EQ(
- deps.toProjection(),
- BSON(Document::metaFieldTextScore << metaTextScore << "a" << 1 << "_id" << 0));
- }
- }
-};
-
-
-} // namespace DocumentSourceClass
-
-namespace Mock {
-using mongo::DocumentSourceMock;
-
-/**
- * A fixture which provides access to things like a ServiceContext that are needed by other tests.
- */
-class Base {
-public:
- Base()
- : _service(makeTestServiceContext()),
- _client(_service->makeClient("DocumentSourceTest")),
- _opCtx(_client->makeOperationContext()),
- _ctx(new ExpressionContext(_opCtx.get(), AggregationRequest(NamespaceString(ns), {}))) {}
-
-protected:
- intrusive_ptr<ExpressionContext> ctx() {
- return _ctx;
- }
-
- std::unique_ptr<ServiceContextNoop> _service;
- ServiceContext::UniqueClient _client;
- ServiceContext::UniqueOperationContext _opCtx;
-
-private:
- intrusive_ptr<ExpressionContext> _ctx;
-};
-
-TEST(Mock, OneDoc) {
- auto doc = Document{{"a", 1}};
- auto source = DocumentSourceMock::create(doc);
- ASSERT_DOCUMENT_EQ(source->getNext().getDocument(), doc);
- ASSERT(source->getNext().isEOF());
-}
-
-TEST(Mock, DequeDocuments) {
- auto source = DocumentSourceMock::create({DOC("a" << 1), DOC("a" << 2)});
- ASSERT_DOCUMENT_EQ(source->getNext().getDocument(), DOC("a" << 1));
- ASSERT_DOCUMENT_EQ(source->getNext().getDocument(), DOC("a" << 2));
- ASSERT(source->getNext().isEOF());
-}
-
-TEST(Mock, StringJSON) {
- auto source = DocumentSourceMock::create("{a : 1}");
- ASSERT_DOCUMENT_EQ(source->getNext().getDocument(), DOC("a" << 1));
- ASSERT(source->getNext().isEOF());
-}
-
-TEST(Mock, DequeStringJSONs) {
- auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}"});
- ASSERT_DOCUMENT_EQ(source->getNext().getDocument(), DOC("a" << 1));
- ASSERT_DOCUMENT_EQ(source->getNext().getDocument(), DOC("a" << 2));
- ASSERT(source->getNext().isEOF());
-}
-
-TEST(Mock, Empty) {
- auto source = DocumentSourceMock::create();
- ASSERT(source->getNext().isEOF());
-}
-
-} // namespace Mock
-
-namespace DocumentSourceRedact {
-using mongo::DocumentSourceRedact;
-using mongo::DocumentSourceMatch;
-using mongo::DocumentSourceMock;
-
-class Base : public Mock::Base {
-protected:
- void createRedact() {
- BSONObj spec = BSON("$redact"
- << "$$PRUNE");
- _redact = DocumentSourceRedact::createFromBson(spec.firstElement(), ctx());
- }
-
- DocumentSource* redact() {
- return _redact.get();
- }
-
-private:
- intrusive_ptr<DocumentSource> _redact;
-};
-
-class PromoteMatch : public Base {
-public:
- void run() {
- createRedact();
-
- auto match = DocumentSourceMatch::createFromBson(BSON("a" << 1).firstElement(), ctx());
-
- Pipeline::SourceContainer pipeline;
- pipeline.push_back(redact());
- pipeline.push_back(match);
-
- pipeline.front()->optimizeAt(pipeline.begin(), &pipeline);
-
- ASSERT_EQUALS(pipeline.size(), 4U);
- ASSERT(dynamic_cast<DocumentSourceMatch*>(pipeline.front().get()));
- }
-};
-} // namespace DocumentSourceRedact
-
-namespace DocumentSourceLimit {
-
-using mongo::DocumentSourceLimit;
-using mongo::DocumentSourceMock;
-
-class Base : public Mock::Base {
-protected:
- void createLimit(int limit) {
- BSONObj spec = BSON("$limit" << limit);
- BSONElement specElement = spec.firstElement();
- _limit = DocumentSourceLimit::createFromBson(specElement, ctx());
- }
- DocumentSource* limit() {
- return _limit.get();
- }
-
-private:
- intrusive_ptr<DocumentSource> _limit;
-};
-
-/** Exhausting a DocumentSourceLimit disposes of the limit's source. */
-class DisposeSource : public Base {
-public:
- void run() {
- auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}"});
- createLimit(1);
- limit()->setSource(source.get());
- // The limit's result is as expected.
- auto next = limit()->getNext();
- ASSERT(next.isAdvanced());
- ASSERT_VALUE_EQ(Value(1), next.getDocument().getField("a"));
- // The limit is exhausted.
- ASSERT(limit()->getNext().isEOF());
- }
-};
-
-/** Combine two $limit stages. */
-class CombineLimit : public Base {
-public:
- void run() {
- Pipeline::SourceContainer container;
- createLimit(10);
-
- auto secondLimit =
- DocumentSourceLimit::createFromBson(BSON("$limit" << 5).firstElement(), ctx());
-
- container.push_back(limit());
- container.push_back(secondLimit);
-
- limit()->optimizeAt(container.begin(), &container);
- ASSERT_EQUALS(5, static_cast<DocumentSourceLimit*>(limit())->getLimit());
- ASSERT_EQUALS(1U, container.size());
- }
-};
-
-/** Exhausting a DocumentSourceLimit disposes of the pipeline's source. */
-class DisposeSourceCascade : public Base {
-public:
- void run() {
- auto source = DocumentSourceMock::create({"{a: 1}", "{a: 1}"});
- // Create a DocumentSourceMatch.
- BSONObj spec = BSON("$match" << BSON("a" << 1));
- BSONElement specElement = spec.firstElement();
- intrusive_ptr<DocumentSource> match =
- DocumentSourceMatch::createFromBson(specElement, ctx());
- match->setSource(source.get());
-
- createLimit(1);
- limit()->setSource(match.get());
- // The limit is not exhauted.
- auto next = limit()->getNext();
- ASSERT(next.isAdvanced());
- std::cout << next.getDocument() << std::endl;
- ASSERT_VALUE_EQ(Value(1), next.getDocument().getField("a"));
- // The limit is exhausted.
- ASSERT(limit()->getNext().isEOF());
- }
-};
-
-/** A limit does not introduce any dependencies. */
-class Dependencies : public Base {
-public:
- void run() {
- createLimit(1);
- DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, limit()->getDependencies(&dependencies));
- ASSERT_EQUALS(0U, dependencies.fields.size());
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(false, dependencies.getNeedTextScore());
- }
-};
-
-} // namespace DocumentSourceLimit
-
-namespace DocumentSourceLookup {
-
-TEST(MakeMatchStageFromInput, NonArrayValueUsesEqQuery) {
- Document input = DOC("local" << 1);
- BSONObj matchStage = DocumentSourceLookUp::makeMatchStageFromInput(
- input, FieldPath("local"), "foreign", BSONObj());
- ASSERT_BSONOBJ_EQ(matchStage, fromjson("{$match: {$and: [{foreign: {$eq: 1}}, {}]}}"));
-}
-
-TEST(MakeMatchStageFromInput, RegexValueUsesEqQuery) {
- BSONRegEx regex("^a");
- Document input = DOC("local" << Value(regex));
- BSONObj matchStage = DocumentSourceLookUp::makeMatchStageFromInput(
- input, FieldPath("local"), "foreign", BSONObj());
- ASSERT_BSONOBJ_EQ(
- matchStage,
- BSON("$match" << BSON(
- "$and" << BSON_ARRAY(BSON("foreign" << BSON("$eq" << regex)) << BSONObj()))));
-}
-
-TEST(MakeMatchStageFromInput, ArrayValueUsesInQuery) {
- vector<Value> inputArray = {Value(1), Value(2)};
- Document input = DOC("local" << Value(inputArray));
- BSONObj matchStage = DocumentSourceLookUp::makeMatchStageFromInput(
- input, FieldPath("local"), "foreign", BSONObj());
- ASSERT_BSONOBJ_EQ(matchStage, fromjson("{$match: {$and: [{foreign: {$in: [1, 2]}}, {}]}}"));
-}
-
-TEST(MakeMatchStageFromInput, ArrayValueWithRegexUsesOrQuery) {
- BSONRegEx regex("^a");
- vector<Value> inputArray = {Value(1), Value(regex), Value(2)};
- Document input = DOC("local" << Value(inputArray));
- BSONObj matchStage = DocumentSourceLookUp::makeMatchStageFromInput(
- input, FieldPath("local"), "foreign", BSONObj());
- ASSERT_BSONOBJ_EQ(
- matchStage,
- BSON("$match" << BSON(
- "$and" << BSON_ARRAY(
- BSON("$or" << BSON_ARRAY(BSON("foreign" << BSON("$eq" << Value(1)))
- << BSON("foreign" << BSON("$eq" << regex))
- << BSON("foreign" << BSON("$eq" << Value(2)))))
- << BSONObj()))));
-}
-
-} // namespace DocumentSourceLookUp
-
-namespace DocumentSourceGroup {
-
-using mongo::DocumentSourceGroup;
-using mongo::DocumentSourceMock;
-
-class Base : public Mock::Base {
-public:
- Base() : _tempDir("DocumentSourceGroupTest") {}
-
-protected:
- void createGroup(const BSONObj& spec, bool inShard = false, bool inRouter = false) {
- BSONObj namedSpec = BSON("$group" << spec);
- BSONElement specElement = namedSpec.firstElement();
-
- intrusive_ptr<ExpressionContext> expressionContext =
- new ExpressionContext(_opCtx.get(), AggregationRequest(NamespaceString(ns), {}));
- expressionContext->inShard = inShard;
- expressionContext->inRouter = inRouter;
- // Won't spill to disk properly if it needs to.
- expressionContext->tempDir = _tempDir.path();
-
- _group = DocumentSourceGroup::createFromBson(specElement, expressionContext);
- _group->injectExpressionContext(expressionContext);
- assertRoundTrips(_group);
- }
- DocumentSourceGroup* group() {
- return static_cast<DocumentSourceGroup*>(_group.get());
- }
- /** Assert that iterator state accessors consistently report the source is exhausted. */
- void assertEOF(const intrusive_ptr<DocumentSource>& source) const {
- // It should be safe to check doneness multiple times
- ASSERT(source->getNext().isEOF());
- ASSERT(source->getNext().isEOF());
- ASSERT(source->getNext().isEOF());
- }
-
-private:
- /** Check that the group's spec round trips. */
- void assertRoundTrips(const intrusive_ptr<DocumentSource>& group) {
- // We don't check against the spec that generated 'group' originally, because
- // $const operators may be introduced in the first serialization.
- BSONObj spec = toBson(group);
- BSONElement specElement = spec.firstElement();
- intrusive_ptr<DocumentSource> generated =
- DocumentSourceGroup::createFromBson(specElement, ctx());
- ASSERT_BSONOBJ_EQ(spec, toBson(generated));
- }
- intrusive_ptr<DocumentSource> _group;
- TempDir _tempDir;
-};
-
-class ParseErrorBase : public Base {
-public:
- virtual ~ParseErrorBase() {}
- void run() {
- ASSERT_THROWS(createGroup(spec()), UserException);
- }
-
-protected:
- virtual BSONObj spec() = 0;
-};
-
-class ExpressionBase : public Base {
-public:
- virtual ~ExpressionBase() {}
- void run() {
- createGroup(spec());
- auto source = DocumentSourceMock::create(Document(doc()));
- group()->setSource(source.get());
- // A group result is available.
- auto next = group()->getNext();
- ASSERT(next.isAdvanced());
- // The constant _id value from the $group spec is passed through.
- ASSERT_BSONOBJ_EQ(expected(), next.getDocument().toBson());
- }
-
-protected:
- virtual BSONObj doc() = 0;
- virtual BSONObj spec() = 0;
- virtual BSONObj expected() = 0;
-};
-
-class IdConstantBase : public ExpressionBase {
- virtual BSONObj doc() {
- return BSONObj();
- }
- virtual BSONObj expected() {
- // Since spec() specifies a constant _id, its value will be passed through.
- return spec();
- }
-};
-
-/** $group spec is not an object. */
-class NonObject : public Base {
-public:
- void run() {
- BSONObj spec = BSON("$group"
- << "foo");
- BSONElement specElement = spec.firstElement();
- ASSERT_THROWS(DocumentSourceGroup::createFromBson(specElement, ctx()), UserException);
- }
-};
-
-/** $group spec is an empty object. */
-class EmptySpec : public ParseErrorBase {
- BSONObj spec() {
- return BSONObj();
- }
-};
-
-/** $group _id is an empty object. */
-class IdEmptyObject : public IdConstantBase {
- BSONObj spec() {
- return BSON("_id" << BSONObj());
- }
-};
-
-/** $group _id is computed from an object expression. */
-class IdObjectExpression : public ExpressionBase {
- BSONObj doc() {
- return BSON("a" << 6);
- }
- BSONObj spec() {
- return BSON("_id" << BSON("z"
- << "$a"));
- }
- BSONObj expected() {
- return BSON("_id" << BSON("z" << 6));
- }
-};
-
-/** $group _id is specified as an invalid object expression. */
-class IdInvalidObjectExpression : public ParseErrorBase {
- BSONObj spec() {
- return BSON("_id" << BSON("$add" << 1 << "$and" << 1));
- }
-};
-
-/** $group with two _id specs. */
-class TwoIdSpecs : public ParseErrorBase {
- BSONObj spec() {
- return BSON("_id" << 1 << "_id" << 2);
- }
-};
-
-/** $group _id is the empty string. */
-class IdEmptyString : public IdConstantBase {
- BSONObj spec() {
- return BSON("_id"
- << "");
- }
-};
-
-/** $group _id is a string constant. */
-class IdStringConstant : public IdConstantBase {
- BSONObj spec() {
- return BSON("_id"
- << "abc");
- }
-};
-
-/** $group _id is a field path expression. */
-class IdFieldPath : public ExpressionBase {
- BSONObj doc() {
- return BSON("a" << 5);
- }
- BSONObj spec() {
- return BSON("_id"
- << "$a");
- }
- BSONObj expected() {
- return BSON("_id" << 5);
- }
-};
-
-/** $group with _id set to an invalid field path. */
-class IdInvalidFieldPath : public ParseErrorBase {
- BSONObj spec() {
- return BSON("_id"
- << "$a..");
- }
-};
-
-/** $group _id is a numeric constant. */
-class IdNumericConstant : public IdConstantBase {
- BSONObj spec() {
- return BSON("_id" << 2);
- }
-};
-
-/** $group _id is an array constant. */
-class IdArrayConstant : public IdConstantBase {
- BSONObj spec() {
- return BSON("_id" << BSON_ARRAY(1 << 2));
- }
-};
-
-/** $group _id is a regular expression (not supported). */
-class IdRegularExpression : public IdConstantBase {
- BSONObj spec() {
- return fromjson("{_id:/a/}");
- }
-};
-
-/** The name of an aggregate field is specified with a $ prefix. */
-class DollarAggregateFieldName : public ParseErrorBase {
- BSONObj spec() {
- return BSON("_id" << 1 << "$foo" << BSON("$sum" << 1));
- }
-};
-
-/** An aggregate field spec that is not an object. */
-class NonObjectAggregateSpec : public ParseErrorBase {
- BSONObj spec() {
- return BSON("_id" << 1 << "a" << 1);
- }
-};
-
-/** An aggregate field spec that is not an object. */
-class EmptyObjectAggregateSpec : public ParseErrorBase {
- BSONObj spec() {
- return BSON("_id" << 1 << "a" << BSONObj());
- }
-};
-
-/** An aggregate field spec with an invalid accumulator operator. */
-class BadAccumulator : public ParseErrorBase {
- BSONObj spec() {
- return BSON("_id" << 1 << "a" << BSON("$bad" << 1));
- }
-};
-
-/** An aggregate field spec with an array argument. */
-class SumArray : public ParseErrorBase {
- BSONObj spec() {
- return BSON("_id" << 1 << "a" << BSON("$sum" << BSONArray()));
- }
-};
-
-/** Multiple accumulator operators for a field. */
-class MultipleAccumulatorsForAField : public ParseErrorBase {
- BSONObj spec() {
- return BSON("_id" << 1 << "a" << BSON("$sum" << 1 << "$push" << 1));
- }
-};
-
-/** Aggregation using duplicate field names is allowed currently. */
-class DuplicateAggregateFieldNames : public ExpressionBase {
- BSONObj doc() {
- return BSONObj();
- }
- BSONObj spec() {
- return BSON("_id" << 0 << "z" << BSON("$sum" << 1) << "z" << BSON("$push" << 1));
- }
- BSONObj expected() {
- return BSON("_id" << 0 << "z" << 1 << "z" << BSON_ARRAY(1));
- }
-};
-
-/** Aggregate the value of an object expression. */
-class AggregateObjectExpression : public ExpressionBase {
- BSONObj doc() {
- return BSON("a" << 6);
- }
- BSONObj spec() {
- return BSON("_id" << 0 << "z" << BSON("$first" << BSON("x"
- << "$a")));
- }
- BSONObj expected() {
- return BSON("_id" << 0 << "z" << BSON("x" << 6));
- }
-};
-
-/** Aggregate the value of an operator expression. */
-class AggregateOperatorExpression : public ExpressionBase {
- BSONObj doc() {
- return BSON("a" << 6);
- }
- BSONObj spec() {
- return BSON("_id" << 0 << "z" << BSON("$first"
- << "$a"));
- }
- BSONObj expected() {
- return BSON("_id" << 0 << "z" << 6);
- }
-};
-
-struct ValueCmp {
- bool operator()(const Value& a, const Value& b) const {
- return ValueComparator().evaluate(a < b);
- }
-};
-typedef map<Value, Document, ValueCmp> IdMap;
-
-class CheckResultsBase : public Base {
-public:
- virtual ~CheckResultsBase() {}
- void run() {
- runSharded(false);
- runSharded(true);
- }
- void runSharded(bool sharded) {
- createGroup(groupSpec());
- auto source = DocumentSourceMock::create(inputData());
- group()->setSource(source.get());
-
- intrusive_ptr<DocumentSource> sink = group();
- if (sharded) {
- sink = createMerger();
- // Serialize and re-parse the shard stage.
- createGroup(toBson(group())["$group"].Obj(), true);
- group()->setSource(source.get());
- sink->setSource(group());
- }
-
- checkResultSet(sink);
- }
-
-protected:
- virtual std::deque<Document> inputData() {
- return {};
- }
- virtual BSONObj groupSpec() {
- return BSON("_id" << 0);
- }
- /** Expected results. Must be sorted by _id to ensure consistent ordering. */
- virtual BSONObj expectedResultSet() {
- BSONObj wrappedResult =
- // fromjson cannot parse an array, so place the array within an object.
- fromjson(string("{'':") + expectedResultSetString() + "}");
- return wrappedResult[""].embeddedObject().getOwned();
- }
- /** Expected results. Must be sorted by _id to ensure consistent ordering. */
- virtual string expectedResultSetString() {
- return "[]";
- }
- intrusive_ptr<DocumentSource> createMerger() {
- // Set up a group merger to simulate merging results in the router. In this
- // case only one shard is in use.
- SplittableDocumentSource* splittable = dynamic_cast<SplittableDocumentSource*>(group());
- ASSERT(splittable);
- intrusive_ptr<DocumentSource> routerSource = splittable->getMergeSource();
- ASSERT_NOT_EQUALS(group(), routerSource.get());
- return routerSource;
- }
- void checkResultSet(const intrusive_ptr<DocumentSource>& sink) {
- // Load the results from the DocumentSourceGroup and sort them by _id.
- IdMap resultSet;
- for (auto output = sink->getNext(); output.isAdvanced(); output = sink->getNext()) {
- // Save the current result.
- Value id = output.getDocument().getField("_id");
- resultSet[id] = output.releaseDocument();
- }
- // Verify the DocumentSourceGroup is exhausted.
- assertEOF(sink);
-
- // Convert results to BSON once they all have been retrieved (to detect any errors
- // resulting from incorrectly shared sub objects).
- BSONArrayBuilder bsonResultSet;
- for (IdMap::const_iterator i = resultSet.begin(); i != resultSet.end(); ++i) {
- bsonResultSet << i->second;
- }
- // Check the result set.
- ASSERT_BSONOBJ_EQ(expectedResultSet(), bsonResultSet.arr());
- }
-};
-
-/** An empty collection generates no results. */
-class EmptyCollection : public CheckResultsBase {};
-
-/** A $group performed on a single document. */
-class SingleDocument : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("a" << 1)};
- }
- virtual BSONObj groupSpec() {
- return BSON("_id" << 0 << "a" << BSON("$sum"
- << "$a"));
- }
- virtual string expectedResultSetString() {
- return "[{_id:0,a:1}]";
- }
-};
-
-/** A $group performed on two values for a single key. */
-class TwoValuesSingleKey : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("a" << 1), DOC("a" << 2)};
- }
- virtual BSONObj groupSpec() {
- return BSON("_id" << 0 << "a" << BSON("$push"
- << "$a"));
- }
- virtual string expectedResultSetString() {
- return "[{_id:0,a:[1,2]}]";
- }
-};
-
-/** A $group performed on two values with one key each. */
-class TwoValuesTwoKeys : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("_id" << 0 << "a" << 1), DOC("_id" << 1 << "a" << 2)};
- }
- virtual BSONObj groupSpec() {
- return BSON("_id"
- << "$_id"
- << "a"
- << BSON("$push"
- << "$a"));
- }
- virtual string expectedResultSetString() {
- return "[{_id:0,a:[1]},{_id:1,a:[2]}]";
- }
-};
-
-/** A $group performed on two values with two keys each. */
-class FourValuesTwoKeys : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("id" << 0 << "a" << 1),
- DOC("id" << 1 << "a" << 2),
- DOC("id" << 0 << "a" << 3),
- DOC("id" << 1 << "a" << 4)};
- }
- virtual BSONObj groupSpec() {
- return BSON("_id"
- << "$id"
- << "a"
- << BSON("$push"
- << "$a"));
- }
- virtual string expectedResultSetString() {
- return "[{_id:0,a:[1,3]},{_id:1,a:[2,4]}]";
- }
-};
-
-/** A $group performed on two values with two keys each and two accumulator operations. */
-class FourValuesTwoKeysTwoAccumulators : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("id" << 0 << "a" << 1),
- DOC("id" << 1 << "a" << 2),
- DOC("id" << 0 << "a" << 3),
- DOC("id" << 1 << "a" << 4)};
- }
- virtual BSONObj groupSpec() {
- return BSON("_id"
- << "$id"
- << "list"
- << BSON("$push"
- << "$a")
- << "sum"
- << BSON("$sum" << BSON("$divide" << BSON_ARRAY("$a" << 2))));
- }
- virtual string expectedResultSetString() {
- return "[{_id:0,list:[1,3],sum:2},{_id:1,list:[2,4],sum:3}]";
- }
-};
-
-/** Null and undefined _id values are grouped together. */
-class GroupNullUndefinedIds : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("a" << BSONNULL << "b" << 100), DOC("b" << 10)};
- }
- virtual BSONObj groupSpec() {
- return BSON("_id"
- << "$a"
- << "sum"
- << BSON("$sum"
- << "$b"));
- }
- virtual string expectedResultSetString() {
- return "[{_id:null,sum:110}]";
- }
-};
-
-/** A complex _id expression. */
-class ComplexId : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("a"
- << "de"
- << "b"
- << "ad"
- << "c"
- << "beef"
- << "d"
- << ""),
- DOC("a"
- << "d"
- << "b"
- << "eadbe"
- << "c"
- << ""
- << "d"
- << "ef")};
- }
- virtual BSONObj groupSpec() {
- return BSON("_id" << BSON("$concat" << BSON_ARRAY("$a"
- << "$b"
- << "$c"
- << "$d")));
- }
- virtual string expectedResultSetString() {
- return "[{_id:'deadbeef'}]";
- }
-};
-
-/** An undefined accumulator value is dropped. */
-class UndefinedAccumulatorValue : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {Document()};
- }
- virtual BSONObj groupSpec() {
- return BSON("_id" << 0 << "first" << BSON("$first"
- << "$missing"));
- }
- virtual string expectedResultSetString() {
- return "[{_id:0, first:null}]";
- }
-};
-
-/** Simulate merging sharded results in the router. */
-class RouterMerger : public CheckResultsBase {
-public:
- void run() {
- auto source = DocumentSourceMock::create({"{_id:0,list:[1,2]}",
- "{_id:1,list:[3,4]}",
- "{_id:0,list:[10,20]}",
- "{_id:1,list:[30,40]}]}"});
-
- // Create a group source.
- createGroup(BSON("_id"
- << "$x"
- << "list"
- << BSON("$push"
- << "$y")));
- // Create a merger version of the source.
- intrusive_ptr<DocumentSource> group = createMerger();
- // Attach the merger to the synthetic shard results.
- group->setSource(source.get());
- // Check the merger's output.
- checkResultSet(group);
- }
-
-private:
- string expectedResultSetString() {
- return "[{_id:0,list:[1,2,10,20]},{_id:1,list:[3,4,30,40]}]";
- }
-};
-
-/** Dependant field paths. */
-class Dependencies : public Base {
-public:
- void run() {
- createGroup(fromjson("{_id:'$x',a:{$sum:'$y.z'},b:{$avg:{$add:['$u','$v']}}}"));
- DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_ALL, group()->getDependencies(&dependencies));
- ASSERT_EQUALS(4U, dependencies.fields.size());
- // Dependency from _id expression.
- ASSERT_EQUALS(1U, dependencies.fields.count("x"));
- // Dependencies from accumulator expressions.
- ASSERT_EQUALS(1U, dependencies.fields.count("y.z"));
- ASSERT_EQUALS(1U, dependencies.fields.count("u"));
- ASSERT_EQUALS(1U, dependencies.fields.count("v"));
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(false, dependencies.getNeedTextScore());
- }
-};
-
-class StreamingOptimization : public Base {
-public:
- void run() {
- auto source = DocumentSourceMock::create({"{a: 0}", "{a: 0}", "{a: 1}", "{a: 1}"});
- source->sorts = {BSON("a" << 1)};
-
- createGroup(BSON("_id"
- << "$a"));
- group()->setSource(source.get());
-
- auto res = group()->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("_id"), Value(0));
-
- ASSERT_TRUE(group()->isStreaming());
-
- res = source->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("a"), Value(1));
-
- assertEOF(source);
-
- res = group()->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("_id"), Value(1));
-
- assertEOF(group());
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 1U);
-
- ASSERT_EQUALS(outputSort.count(BSON("_id" << 1)), 1U);
- }
-};
-
-class StreamingWithMultipleIdFields : public Base {
-public:
- void run() {
- auto source = DocumentSourceMock::create(
- {"{a: 1, b: 2}", "{a: 1, b: 2}", "{a: 1, b: 1}", "{a: 2, b: 1}", "{a: 2, b: 1}"});
- source->sorts = {BSON("a" << 1 << "b" << -1)};
-
- createGroup(fromjson("{_id: {x: '$a', y: '$b'}}"));
- group()->setSource(source.get());
-
- auto res = group()->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["x"], Value(1));
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["y"], Value(2));
-
- ASSERT_TRUE(group()->isStreaming());
-
- res = group()->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["x"], Value(1));
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["y"], Value(1));
-
- res = source->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("a"), Value(2));
- ASSERT_VALUE_EQ(res.getDocument().getField("b"), Value(1));
-
- assertEOF(source);
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 2U);
-
- BSONObj correctSort = BSON("_id.x" << 1 << "_id.y" << -1);
- ASSERT_EQUALS(outputSort.count(correctSort), 1U);
-
- BSONObj prefixSort = BSON("_id.x" << 1);
- ASSERT_EQUALS(outputSort.count(prefixSort), 1U);
- }
-};
-
-class StreamingWithMultipleLevels : public Base {
-public:
- void run() {
- auto source = DocumentSourceMock::create(
- {"{a: {b: {c: 3}}, d: 1}", "{a: {b: {c: 1}}, d: 2}", "{a: {b: {c: 1}}, d: 0}"});
- source->sorts = {BSON("a.b.c" << -1 << "a.b.d" << 1 << "d" << 1)};
-
- createGroup(fromjson("{_id: {x: {y: {z: '$a.b.c', q: '$a.b.d'}}, v: '$d'}}"));
- group()->setSource(source.get());
-
- auto res = group()->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["x"]["y"]["z"], Value(3));
-
- ASSERT_TRUE(group()->isStreaming());
-
- res = source->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("a")["b"]["c"], Value(1));
-
- assertEOF(source);
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 3U);
-
- BSONObj correctSort = fromjson("{'_id.x.y.z': -1, '_id.x.y.q': 1, '_id.v': 1}");
- ASSERT_EQUALS(outputSort.count(correctSort), 1U);
-
- BSONObj prefixSortTwo = fromjson("{'_id.x.y.z': -1, '_id.x.y.q': 1}");
- ASSERT_EQUALS(outputSort.count(prefixSortTwo), 1U);
-
- BSONObj prefixSortOne = fromjson("{'_id.x.y.z': -1}");
- ASSERT_EQUALS(outputSort.count(prefixSortOne), 1U);
- }
-};
-
-class StreamingWithFieldRepeated : public Base {
-public:
- void run() {
- auto source = DocumentSourceMock::create(
- {"{a: 1, b: 1}", "{a: 1, b: 1}", "{a: 2, b: 1}", "{a: 2, b: 3}"});
- source->sorts = {BSON("a" << 1 << "b" << 1)};
-
- createGroup(fromjson("{_id: {sub: {x: '$a', y: '$b', z: '$a'}}}"));
- group()->setSource(source.get());
-
- auto res = group()->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["sub"]["x"], Value(1));
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["sub"]["y"], Value(1));
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["sub"]["z"], Value(1));
-
- ASSERT_TRUE(group()->isStreaming());
-
- res = source->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("a"), Value(2));
- ASSERT_VALUE_EQ(res.getDocument().getField("b"), Value(3));
-
- BSONObjSet outputSort = group()->getOutputSorts();
-
- ASSERT_EQUALS(outputSort.size(), 2U);
-
- BSONObj correctSort = fromjson("{'_id.sub.z': 1}");
- ASSERT_EQUALS(outputSort.count(correctSort), 1U);
-
- BSONObj prefixSortTwo = fromjson("{'_id.sub.z': 1, '_id.sub.y': 1}");
- ASSERT_EQUALS(outputSort.count(prefixSortTwo), 1U);
- }
-};
-
-class StreamingWithConstantAndFieldPath : public Base {
-public:
- void run() {
- auto source = DocumentSourceMock::create(
- {"{a: 5, b: 1}", "{a: 5, b: 2}", "{a: 3, b: 1}", "{a: 1, b: 1}", "{a: 1, b: 1}"});
- source->sorts = {BSON("a" << -1 << "b" << 1)};
-
- createGroup(fromjson("{_id: {sub: {x: '$a', y: '$b', z: {$literal: 'c'}}}}"));
- group()->setSource(source.get());
-
- auto res = group()->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["sub"]["x"], Value(5));
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["sub"]["y"], Value(1));
- ASSERT_VALUE_EQ(res.getDocument().getField("_id")["sub"]["z"], Value("c"));
-
- ASSERT_TRUE(group()->isStreaming());
-
- res = source->getNext();
- ASSERT_TRUE(res.isAdvanced());
- ASSERT_VALUE_EQ(res.getDocument().getField("a"), Value(3));
- ASSERT_VALUE_EQ(res.getDocument().getField("b"), Value(1));
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 2U);
-
- BSONObj correctSort = fromjson("{'_id.sub.x': -1}");
- ASSERT_EQUALS(outputSort.count(correctSort), 1U);
-
- BSONObj prefixSortTwo = fromjson("{'_id.sub.x': -1, '_id.sub.y': 1}");
- ASSERT_EQUALS(outputSort.count(prefixSortTwo), 1U);
- }
-};
-
-class StreamingWithRootSubfield : public Base {
-public:
- void run() {
- auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}", "{a: 3}"});
- source->sorts = {BSON("a" << 1)};
-
- createGroup(fromjson("{_id: '$$ROOT.a'}"));
- group()->setSource(source.get());
-
- group()->getNext();
- ASSERT_TRUE(group()->isStreaming());
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 1U);
-
- BSONObj correctSort = fromjson("{_id: 1}");
- ASSERT_EQUALS(outputSort.count(correctSort), 1U);
- }
-};
-
-class StreamingWithConstant : public Base {
-public:
- void run() {
- auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}", "{a: 3}"});
- source->sorts = {BSON("$a" << 1)};
-
- createGroup(fromjson("{_id: 1}"));
- group()->setSource(source.get());
-
- group()->getNext();
- ASSERT_TRUE(group()->isStreaming());
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 0U);
- }
-};
-
-class StreamingWithEmptyId : public Base {
-public:
- void run() {
- auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}", "{a: 3}"});
- source->sorts = {BSON("$a" << 1)};
-
- createGroup(fromjson("{_id: {}}"));
- group()->setSource(source.get());
-
- group()->getNext();
- ASSERT_TRUE(group()->isStreaming());
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 0U);
- }
-};
-
-class NoOptimizationIfMissingDoubleSort : public Base {
-public:
- void run() {
- auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}", "{a: 3}"});
- source->sorts = {BSON("a" << 1)};
-
- // We pretend to be in the router so that we don't spill to disk, because this produces
- // inconsistent output on debug vs. non-debug builds.
- const bool inRouter = true;
- const bool inShard = false;
-
- createGroup(BSON("_id" << BSON("x"
- << "$a"
- << "y"
- << "$b")),
- inShard,
- inRouter);
- group()->setSource(source.get());
-
- group()->getNext();
- ASSERT_FALSE(group()->isStreaming());
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 0U);
- }
-};
-
-class NoOptimizationWithRawRoot : public Base {
-public:
- void run() {
- auto source = DocumentSourceMock::create({"{a: 1}", "{a: 2}", "{a: 3}"});
- source->sorts = {BSON("a" << 1)};
-
- // We pretend to be in the router so that we don't spill to disk, because this produces
- // inconsistent output on debug vs. non-debug builds.
- const bool inRouter = true;
- const bool inShard = false;
-
- createGroup(BSON("_id" << BSON("a"
- << "$$ROOT"
- << "b"
- << "$a")),
- inShard,
- inRouter);
- group()->setSource(source.get());
-
- group()->getNext();
- ASSERT_FALSE(group()->isStreaming());
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 0U);
- }
-};
-
-class NoOptimizationIfUsingExpressions : public Base {
-public:
- void run() {
- auto source = DocumentSourceMock::create({"{a: 1, b: 1}", "{a: 2, b: 2}", "{a: 3, b: 1}"});
- source->sorts = {BSON("a" << 1 << "b" << 1)};
-
- // We pretend to be in the router so that we don't spill to disk, because this produces
- // inconsistent output on debug vs. non-debug builds.
- const bool inRouter = true;
- const bool inShard = false;
-
- createGroup(fromjson("{_id: {$sum: ['$a', '$b']}}"), inShard, inRouter);
- group()->setSource(source.get());
-
- group()->getNext();
- ASSERT_FALSE(group()->isStreaming());
-
- BSONObjSet outputSort = group()->getOutputSorts();
- ASSERT_EQUALS(outputSort.size(), 0U);
- }
-};
-
-/**
- * A string constant (not a field path) as an _id expression and passed to an accumulator.
- * SERVER-6766
- */
-class StringConstantIdAndAccumulatorExpressions : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {Document()};
- }
- BSONObj groupSpec() {
- return fromjson("{_id:{$const:'$_id...'},a:{$push:{$const:'$a...'}}}");
- }
- string expectedResultSetString() {
- return "[{_id:'$_id...',a:['$a...']}]";
- }
-};
-
-/** An array constant passed to an accumulator. */
-class ArrayConstantAccumulatorExpression : public CheckResultsBase {
-public:
- void run() {
- // A parse exception is thrown when a raw array is provided to an accumulator.
- ASSERT_THROWS(createGroup(fromjson("{_id:1,a:{$push:[4,5,6]}}")), UserException);
- // Run standard base tests.
- CheckResultsBase::run();
- }
- std::deque<Document> inputData() {
- return {Document()};
- }
- BSONObj groupSpec() {
- // An array can be specified using $const.
- return fromjson("{_id:[1,2,3],a:{$push:{$const:[4,5,6]}}}");
- }
- string expectedResultSetString() {
- return "[{_id:[1,2,3],a:[[4,5,6]]}]";
- }
-};
-
-} // namespace DocumentSourceGroup
-
-namespace DocumentSourceProject {
-
-using mongo::DocumentSourceMock;
-using mongo::DocumentSourceProject;
-
-//
-// DocumentSourceProject delegates much of its responsibilities to the ParsedAggregationProjection.
-// Most of the functional tests are testing ParsedAggregationProjection directly. These are meant as
-// simpler integration tests.
-//
-
-/**
- * Class which provides useful helpers to test the functionality of the $project stage.
- */
-class ProjectStageTest : public Mock::Base, public unittest::Test {
-protected:
- /**
- * Creates the $project stage, which can be accessed via project().
- */
- void createProject(const BSONObj& projection) {
- BSONObj spec = BSON("$project" << projection);
- BSONElement specElement = spec.firstElement();
- _project = DocumentSourceProject::createFromBson(specElement, ctx());
- }
-
- DocumentSource* project() {
- return _project.get();
- }
-
- /**
- * Assert that iterator state accessors consistently report the source is exhausted.
- */
- void assertEOF() const {
- ASSERT(_project->getNext().isEOF());
- ASSERT(_project->getNext().isEOF());
- ASSERT(_project->getNext().isEOF());
- }
-
-private:
- intrusive_ptr<DocumentSource> _project;
-};
-
-TEST_F(ProjectStageTest, InclusionProjectionShouldRemoveUnspecifiedFields) {
- createProject(BSON("a" << true << "c" << BSON("d" << true)));
- auto source = DocumentSourceMock::create("{_id: 0, a: 1, b: 1, c: {d: 1}}");
- project()->setSource(source.get());
- // The first result exists and is as expected.
- auto next = project()->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_EQUALS(1, next.getDocument().getField("a").getInt());
- ASSERT(next.getDocument().getField("b").missing());
- // The _id field is included by default in the root document.
- ASSERT_EQUALS(0, next.getDocument().getField("_id").getInt());
- // The nested c.d inclusion.
- ASSERT_EQUALS(1, next.getDocument()["c"]["d"].getInt());
-};
-
-TEST_F(ProjectStageTest, ShouldOptimizeInnerExpressions) {
- createProject(BSON("a" << BSON("$and" << BSON_ARRAY(BSON("$const" << true)))));
- project()->optimize();
- // The $and should have been replaced with its only argument.
- vector<Value> serializedArray;
- project()->serializeToArray(serializedArray);
- ASSERT_BSONOBJ_EQ(serializedArray[0].getDocument().toBson(),
- fromjson("{$project: {_id: true, a: {$const: true}}}"));
-};
-
-TEST_F(ProjectStageTest, ShouldErrorOnNonObjectSpec) {
- // Can't use createProject() helper because we want to give a non-object spec.
- BSONObj spec = BSON("$project"
- << "foo");
- BSONElement specElement = spec.firstElement();
- ASSERT_THROWS(DocumentSourceProject::createFromBson(specElement, ctx()), UserException);
-};
-
-/**
- * Basic sanity check that two documents can be projected correctly with a simple inclusion
- * projection.
- */
-TEST_F(ProjectStageTest, InclusionShouldBeAbleToProcessMultipleDocuments) {
- createProject(BSON("a" << true));
- auto source = DocumentSourceMock::create({"{a: 1, b: 2}", "{a: 3, b: 4}"});
- project()->setSource(source.get());
- auto next = project()->getNext();
- ASSERT(next.isAdvanced());
- ASSERT_EQUALS(1, next.getDocument().getField("a").getInt());
- ASSERT(next.getDocument().getField("b").missing());
-
- next = project()->getNext();
- ASSERT(next.isAdvanced());
- ASSERT_EQUALS(3, next.getDocument().getField("a").getInt());
- ASSERT(next.getDocument().getField("b").missing());
-
- assertEOF();
-};
-
-/**
- * Basic sanity check that two documents can be projected correctly with a simple inclusion
- * projection.
- */
-TEST_F(ProjectStageTest, ExclusionShouldBeAbleToProcessMultipleDocuments) {
- createProject(BSON("a" << false));
- auto source = DocumentSourceMock::create({"{a: 1, b: 2}", "{a: 3, b: 4}"});
- project()->setSource(source.get());
- auto next = project()->getNext();
- ASSERT(next.isAdvanced());
- ASSERT(next.getDocument().getField("a").missing());
- ASSERT_EQUALS(2, next.getDocument().getField("b").getInt());
-
- next = project()->getNext();
- ASSERT(next.isAdvanced());
- ASSERT(next.getDocument().getField("a").missing());
- ASSERT_EQUALS(4, next.getDocument().getField("b").getInt());
-
- assertEOF();
-};
-
-TEST_F(ProjectStageTest, InclusionShouldAddDependenciesOfIncludedAndComputedFields) {
- createProject(fromjson("{a: true, x: '$b', y: {$and: ['$c','$d']}, z: {$meta: 'textScore'}}"));
- DepsTracker dependencies(DepsTracker::MetadataAvailable::kTextScore);
- ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_FIELDS, project()->getDependencies(&dependencies));
- ASSERT_EQUALS(5U, dependencies.fields.size());
-
- // Implicit _id dependency.
- ASSERT_EQUALS(1U, dependencies.fields.count("_id"));
-
- // Inclusion dependency.
- ASSERT_EQUALS(1U, dependencies.fields.count("a"));
-
- // Field path expression dependency.
- ASSERT_EQUALS(1U, dependencies.fields.count("b"));
-
- // Nested expression dependencies.
- ASSERT_EQUALS(1U, dependencies.fields.count("c"));
- ASSERT_EQUALS(1U, dependencies.fields.count("d"));
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(true, dependencies.getNeedTextScore());
-};
-
-TEST_F(ProjectStageTest, ExclusionShouldNotAddDependencies) {
- createProject(fromjson("{a: false, 'b.c': false}"));
-
- DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, project()->getDependencies(&dependencies));
-
- ASSERT_EQUALS(0U, dependencies.fields.size());
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(false, dependencies.getNeedTextScore());
-};
-
-} // namespace DocumentSourceProject
-
-
-namespace DocumentSourceReplaceRoot {
-
-using mongo::DocumentSourceReplaceRoot;
-using mongo::DocumentSourceMock;
-
-class ReplaceRootBasics : public Mock::Base, public unittest::Test {
-public:
- ReplaceRootBasics() : _mock(DocumentSourceMock::create()) {}
-
-protected:
- virtual void createReplaceRoot(const BSONObj& replaceRoot) {
- BSONObj spec = BSON("$replaceRoot" << replaceRoot);
- BSONElement specElement = spec.firstElement();
- _replaceRoot = DocumentSourceReplaceRoot::createFromBson(specElement, ctx());
- _replaceRoot->setSource(source());
- }
-
- DocumentSource* replaceRoot() {
- return _replaceRoot.get();
- }
-
- DocumentSourceMock* source() {
- return _mock.get();
- }
-
- /**
- * Assert that iterator state accessors consistently report the source is exhausted.
- */
- void assertExhausted() const {
- ASSERT(_replaceRoot->getNext().isEOF());
- ASSERT(_replaceRoot->getNext().isEOF());
- ASSERT(_replaceRoot->getNext().isEOF());
- }
-
- intrusive_ptr<DocumentSource> _replaceRoot;
- intrusive_ptr<DocumentSourceMock> _mock;
-};
-
-// Verify that sending $newRoot a field path that contains an object in the document results
-// in the replacement of the root with that object.
-TEST_F(ReplaceRootBasics, FieldPathAsNewRootPromotesSubdocument) {
- createReplaceRoot(BSON("newRoot"
- << "$a"));
- Document subdoc = Document{{"b", 1}, {"c", "hello"}, {"d", Document{{"e", 2}}}};
- source()->queue.push_back(Document{{"a", subdoc}});
- auto next = replaceRoot()->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.releaseDocument(), subdoc);
- assertExhausted();
-}
-
-// Verify that sending $newRoot a dotted field path that contains an object in the document results
-// in the replacement of the root with that object.
-TEST_F(ReplaceRootBasics, DottedFieldPathAsNewRootPromotesSubdocument) {
- createReplaceRoot(BSON("newRoot"
- << "$a.b"));
- // source document: {a: {b: {c: 3}}}
- Document subdoc = Document{{"c", 3}};
- source()->queue.push_back(Document{{"a", Document{{"b", subdoc}}}});
- auto next = replaceRoot()->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.releaseDocument(), subdoc);
- assertExhausted();
-}
-
-// Verify that sending $newRoot a dotted field path that contains an object in two different
-// documents results in the replacement of the root with that object in both documents.
-TEST_F(ReplaceRootBasics, FieldPathAsNewRootPromotesSubdocumentInMultipleDocuments) {
- createReplaceRoot(BSON("newRoot"
- << "$a"));
- Document subdoc1 = Document{{"b", 1}, {"c", 2}};
- Document subdoc2 = Document{{"b", 3}, {"c", 4}};
- source()->queue.push_back(Document{{"a", subdoc1}});
- source()->queue.push_back(Document{{"a", subdoc2}});
-
- // Verify that the first document that comes out is the first document we put in.
- auto next = replaceRoot()->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.releaseDocument(), subdoc1);
-
- next = replaceRoot()->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.releaseDocument(), subdoc2);
- assertExhausted();
-}
-
-// Verify that when newRoot contains an expression object, the document is replaced with that
-// object.
-TEST_F(ReplaceRootBasics, ExpressionObjectForNewRootReplacesRootWithThatObject) {
- createReplaceRoot(BSON("newRoot" << BSON("b" << 1)));
- source()->queue.push_back(Document{{"a", 2}});
- auto next = replaceRoot()->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.releaseDocument(), (Document{{"b", 1}}));
- assertExhausted();
-
- BSONObj newObject = BSON("a" << 1 << "b" << 2 << "arr" << BSON_ARRAY(3 << 4 << 5));
- createReplaceRoot(BSON("newRoot" << newObject));
- source()->queue.push_back(Document{{"c", 2}});
- next = replaceRoot()->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.releaseDocument(), Document(newObject));
- assertExhausted();
-
- createReplaceRoot(BSON("newRoot" << BSON("a" << BSON("b" << 1))));
- source()->queue.push_back(DOC("c" << 2));
- next = replaceRoot()->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.releaseDocument(), (Document{{"a", Document{{"b", 1}}}}));
- assertExhausted();
-
- createReplaceRoot(BSON("newRoot" << BSON("a"
- << "$b")));
- source()->queue.push_back(DOC("b" << 2));
- next = replaceRoot()->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.releaseDocument(), (Document{{"a", 2}}));
- assertExhausted();
-}
-
-// Verify that when newRoot contains a system variable, the document is replaced with the correct
-// object corresponding to that system variable.
-TEST_F(ReplaceRootBasics, SystemVariableForNewRootReplacesRootWithThatObject) {
- // System variables
- createReplaceRoot(BSON("newRoot"
- << "$$CURRENT"));
- Document inputDoc = Document{{"b", 2}};
- source()->queue.push_back(inputDoc);
- auto next = replaceRoot()->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.releaseDocument(), inputDoc);
- assertExhausted();
-
- createReplaceRoot(BSON("newRoot"
- << "$$ROOT"));
- source()->queue.push_back(inputDoc);
- next = replaceRoot()->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.releaseDocument(), inputDoc);
- assertExhausted();
-}
-
-// Verify that when the expression at newRoot does not resolve to an object, as per the spec we
-// throw a user assertion.
-TEST_F(ReplaceRootBasics, ErrorsWhenNewRootDoesNotEvaluateToAnObject) {
- createReplaceRoot(BSON("newRoot"
- << "$a"));
-
- // A string is not an object.
- source()->queue.push_back(Document{{"a", "hello"}});
- ASSERT_THROWS_CODE(replaceRoot()->getNext(), UserException, 40228);
- assertExhausted();
-
- // An integer is not an object.
- source()->queue.push_back(Document{{"a", 5}});
- ASSERT_THROWS_CODE(replaceRoot()->getNext(), UserException, 40228);
- assertExhausted();
-
- // Literals are not objects.
- createReplaceRoot(BSON("newRoot" << BSON("$literal" << 1)));
- source()->queue.push_back(Document());
- ASSERT_THROWS_CODE(replaceRoot()->getNext(), UserException, 40228);
- assertExhausted();
-
- // Most operator expressions do not resolve to objects.
- createReplaceRoot(BSON("newRoot" << BSON("$and"
- << "$a")));
- source()->queue.push_back(Document{{"a", true}});
- ASSERT_THROWS_CODE(replaceRoot()->getNext(), UserException, 40228);
- assertExhausted();
-}
-
-// Verify that when newRoot contains a field path and that field path doesn't exist, we throw a user
-// error. This error happens whenever the expression evaluates to a "missing" Value.
-TEST_F(ReplaceRootBasics, ErrorsIfNewRootFieldPathDoesNotExist) {
- createReplaceRoot(BSON("newRoot"
- << "$a"));
-
- source()->queue.push_back(Document());
- ASSERT_THROWS_CODE(replaceRoot()->getNext(), UserException, 40232);
- assertExhausted();
-
- source()->queue.push_back(Document{{"e", Document{{"b", Document{{"c", 3}}}}}});
- ASSERT_THROWS_CODE(replaceRoot()->getNext(), UserException, 40232);
- assertExhausted();
-}
-
-// Verify that the only dependent field is the root we are replacing with.
-TEST_F(ReplaceRootBasics, OnlyDependentFieldIsNewRoot) {
- createReplaceRoot(BSON("newRoot"
- << "$a.b"));
- DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_FIELDS, replaceRoot()->getDependencies(&dependencies));
-
- // Should only depend on field a.b
- ASSERT_EQUALS(1U, dependencies.fields.size());
- ASSERT_EQUALS(1U, dependencies.fields.count("a.b"));
- ASSERT_EQUALS(0U, dependencies.fields.count("a"));
- ASSERT_EQUALS(0U, dependencies.fields.count("b"));
-
- // Should not need any other fields.
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(false, dependencies.getNeedTextScore());
-};
-
-/**
- * Fixture to test error cases of initializing the $replaceRoot stage.
- */
-class ReplaceRootSpec : public Mock::Base, public unittest::Test {
-public:
- intrusive_ptr<DocumentSource> createReplaceRoot(BSONObj replaceRootSpec) {
- auto specElement = replaceRootSpec.firstElement();
- return DocumentSourceReplaceRoot::createFromBson(specElement, ctx());
- ;
- }
-
- BSONObj createSpec(BSONObj spec) {
- return BSON("$replaceRoot" << spec);
- }
-
- BSONObj createFullSpec(BSONObj spec) {
- return BSON("$replaceRoot" << BSON("newRoot" << spec));
- }
-};
-
-// Verify that the creation of a $replaceRoot stage requires an object specification
-TEST_F(ReplaceRootSpec, CreationRequiresObjectSpecification) {
- ASSERT_THROWS_CODE(createReplaceRoot(BSON("$replaceRoot" << 1)), UserException, 40229);
- ASSERT_THROWS_CODE(createReplaceRoot(BSON("$replaceRoot"
- << "string")),
- UserException,
- 40229);
-}
-
-// Verify that the only valid option for the $replaceRoot object specification is newRoot.
-TEST_F(ReplaceRootSpec, OnlyValidOptionInObjectSpecIsNewRoot) {
- ASSERT_THROWS_CODE(createReplaceRoot(createSpec(BSON("newRoot"
- << "$a"
- << "root"
- << 2))),
- UserException,
- 40230);
- ASSERT_THROWS_CODE(createReplaceRoot(createSpec(BSON("newRoot"
- << "$a"
- << "path"
- << 2))),
- UserException,
- 40230);
- ASSERT_THROWS_CODE(createReplaceRoot(createSpec(BSON("path"
- << "$a"))),
- UserException,
- 40230);
-}
-
-// Verify that $replaceRoot requires a valid expression as input to the newRoot option.
-TEST_F(ReplaceRootSpec, RequiresExpressionForNewRootOption) {
- ASSERT_THROWS_CODE(createReplaceRoot(createSpec(BSONObj())), UserException, 40231);
- ASSERT_THROWS(createReplaceRoot(createSpec(BSON("newRoot"
- << "$$$a"))),
- UserException);
- ASSERT_THROWS(createReplaceRoot(createSpec(BSON("newRoot"
- << "$$a"))),
- UserException);
- ASSERT_THROWS(createReplaceRoot(createFullSpec(BSON("$map" << BSON("a" << 1)))), UserException);
-}
-
-// Verify that newRoot accepts all types of expressions.
-TEST_F(ReplaceRootSpec, NewRootAcceptsAllTypesOfExpressions) {
- // Field Path and system variables
- ASSERT_TRUE(createReplaceRoot(createSpec(BSON("newRoot"
- << "$a.b.c.d.e"))));
- ASSERT_TRUE(createReplaceRoot(createSpec(BSON("newRoot"
- << "$$CURRENT"))));
-
- // Literals
- ASSERT_TRUE(createReplaceRoot(createFullSpec(BSON("$literal" << 1))));
-
- // Expression Objects
- ASSERT_TRUE(createReplaceRoot(createFullSpec(BSON("a" << BSON("b" << 1)))));
-
- // Operator Expressions
- ASSERT_TRUE(createReplaceRoot(createFullSpec(BSON("$and"
- << "$a"))));
- ASSERT_TRUE(createReplaceRoot(createFullSpec(BSON("$gt" << BSON_ARRAY("$a" << 1)))));
- ASSERT_TRUE(createReplaceRoot(createFullSpec(BSON("$sqrt"
- << "$a"))));
-
- // Accumulators
- ASSERT_TRUE(createReplaceRoot(createFullSpec(BSON("$sum"
- << "$a"))));
-}
-
-} // namespace DocumentSourceReplaceRoot
-
-namespace DocumentSourceSample {
-
-using mongo::DocumentSourceSample;
-using mongo::DocumentSourceMock;
-
-class SampleBasics : public Mock::Base, public unittest::Test {
-public:
- SampleBasics() : _mock(DocumentSourceMock::create()) {}
-
-protected:
- virtual void createSample(long long size) {
- BSONObj spec = BSON("$sample" << BSON("size" << size));
- BSONElement specElement = spec.firstElement();
- _sample = DocumentSourceSample::createFromBson(specElement, ctx());
- sample()->setSource(_mock.get());
- checkBsonRepresentation(spec);
- }
-
- DocumentSource* sample() {
- return _sample.get();
- }
-
- DocumentSourceMock* source() {
- return _mock.get();
- }
-
- /**
- * Makes some general assertions about the results of a $sample stage.
- *
- * Creates a $sample stage with the given size, advances it 'nExpectedResults' times, asserting
- * the results come back in sorted order according to their assigned random values, then asserts
- * the stage is exhausted.
- */
- void checkResults(long long size, long long nExpectedResults) {
- createSample(size);
-
- boost::optional<Document> prevDoc;
- for (long long i = 0; i < nExpectedResults; i++) {
- auto nextResult = sample()->getNext();
- ASSERT_TRUE(nextResult.isAdvanced());
- auto thisDoc = nextResult.releaseDocument();
- ASSERT_TRUE(thisDoc.hasRandMetaField());
- if (prevDoc) {
- ASSERT_LTE(thisDoc.getRandMetaField(), prevDoc->getRandMetaField());
- }
- prevDoc = std::move(thisDoc);
- }
- assertEOF();
- }
-
- /**
- * Helper to load 'nDocs' documents into the source stage.
- */
- void loadDocuments(int nDocs) {
- for (int i = 0; i < nDocs; i++) {
- _mock->queue.push_back(DOC("_id" << i));
- }
- }
-
- /**
- * Assert that iterator state accessors consistently report the source is exhausted.
- */
- void assertEOF() const {
- ASSERT(_sample->getNext().isEOF());
- ASSERT(_sample->getNext().isEOF());
- ASSERT(_sample->getNext().isEOF());
- }
-
-protected:
- intrusive_ptr<DocumentSource> _sample;
- intrusive_ptr<DocumentSourceMock> _mock;
-
-private:
- /**
- * Check that the BSON representation generated by the souce matches the BSON it was
- * created with.
- */
- void checkBsonRepresentation(const BSONObj& spec) {
- Value serialized = static_cast<DocumentSourceSample*>(sample())->serialize(false);
- auto generatedSpec = serialized.getDocument().toBson();
- ASSERT_BSONOBJ_EQ(spec, generatedSpec);
- }
-};
-
-/**
- * A sample of size 0 should return 0 results.
- */
-TEST_F(SampleBasics, ZeroSize) {
- loadDocuments(2);
- checkResults(0, 0);
-}
-
-/**
- * If the source stage is exhausted, the $sample stage should also be exhausted.
- */
-TEST_F(SampleBasics, SourceEOFBeforeSample) {
- loadDocuments(5);
- checkResults(10, 5);
-}
-
-/**
- * A $sample stage should limit the number of results to the given size.
- */
-TEST_F(SampleBasics, SampleEOFBeforeSource) {
- loadDocuments(10);
- checkResults(5, 5);
-}
-
-/**
- * The incoming documents should not be modified by a $sample stage (except their metadata).
- */
-TEST_F(SampleBasics, DocsUnmodified) {
- createSample(1);
- source()->queue.push_back(DOC("a" << 1 << "b" << DOC("c" << 2)));
- auto next = sample()->getNext();
- ASSERT_TRUE(next.isAdvanced());
- auto doc = next.releaseDocument();
- ASSERT_EQUALS(1, doc["a"].getInt());
- ASSERT_EQUALS(2, doc["b"]["c"].getInt());
- ASSERT_TRUE(doc.hasRandMetaField());
- assertEOF();
-}
-
-/**
- * Fixture to test error cases of the $sample stage.
- */
-class InvalidSampleSpec : public Mock::Base, public unittest::Test {
-public:
- intrusive_ptr<DocumentSource> createSample(BSONObj sampleSpec) {
- auto specElem = sampleSpec.firstElement();
- return DocumentSourceSample::createFromBson(specElem, ctx());
- }
-
- BSONObj createSpec(BSONObj spec) {
- return BSON("$sample" << spec);
- }
-};
-
-TEST_F(InvalidSampleSpec, NonObject) {
- ASSERT_THROWS_CODE(createSample(BSON("$sample" << 1)), UserException, 28745);
- ASSERT_THROWS_CODE(createSample(BSON("$sample"
- << "string")),
- UserException,
- 28745);
-}
-
-TEST_F(InvalidSampleSpec, NonNumericSize) {
- ASSERT_THROWS_CODE(createSample(createSpec(BSON("size"
- << "string"))),
- UserException,
- 28746);
-}
-
-TEST_F(InvalidSampleSpec, NegativeSize) {
- ASSERT_THROWS_CODE(createSample(createSpec(BSON("size" << -1))), UserException, 28747);
- ASSERT_THROWS_CODE(createSample(createSpec(BSON("size" << -1.0))), UserException, 28747);
-}
-
-TEST_F(InvalidSampleSpec, ExtraOption) {
- ASSERT_THROWS_CODE(
- createSample(createSpec(BSON("size" << 1 << "extra" << 2))), UserException, 28748);
-}
-
-TEST_F(InvalidSampleSpec, MissingSize) {
- ASSERT_THROWS_CODE(createSample(createSpec(BSONObj())), UserException, 28749);
-}
-
-namespace DocumentSourceSampleFromRandomCursor {
-using mongo::DocumentSourceSampleFromRandomCursor;
-
-class SampleFromRandomCursorBasics : public SampleBasics {
-public:
- void createSample(long long size) override {
- _sample = DocumentSourceSampleFromRandomCursor::create(ctx(), size, "_id", 100);
- sample()->setSource(_mock.get());
- }
-};
-
-/**
- * A sample of size zero should not return any results.
- */
-TEST_F(SampleFromRandomCursorBasics, ZeroSize) {
- loadDocuments(2);
- checkResults(0, 0);
-}
-
-/**
- * When sampling with a size smaller than the number of documents our source stage can produce,
- * there should be no more than the sample size output.
- */
-TEST_F(SampleFromRandomCursorBasics, SourceEOFBeforeSample) {
- loadDocuments(5);
- checkResults(10, 5);
-}
-
-/**
- * When the source stage runs out of documents, the $sampleFromRandomCursors stage should be
- * exhausted.
- */
-TEST_F(SampleFromRandomCursorBasics, SampleEOFBeforeSource) {
- loadDocuments(10);
- checkResults(5, 5);
-}
-
-/**
- * The $sampleFromRandomCursor stage should not modify the contents of the documents.
- */
-TEST_F(SampleFromRandomCursorBasics, DocsUnmodified) {
- createSample(1);
- source()->queue.push_back(DOC("_id" << 1 << "b" << DOC("c" << 2)));
- auto next = sample()->getNext();
- ASSERT_TRUE(next.isAdvanced());
- auto doc = next.releaseDocument();
- ASSERT_EQUALS(1, doc["_id"].getInt());
- ASSERT_EQUALS(2, doc["b"]["c"].getInt());
- ASSERT_TRUE(doc.hasRandMetaField());
- assertEOF();
-}
-
-/**
- * The $sampleFromRandomCursor stage should ignore duplicate documents.
- */
-TEST_F(SampleFromRandomCursorBasics, IgnoreDuplicates) {
- createSample(2);
- source()->queue.push_back(DOC("_id" << 1));
- source()->queue.push_back(DOC("_id" << 1)); // Duplicate, should ignore.
- source()->queue.push_back(DOC("_id" << 2));
-
- auto next = sample()->getNext();
- ASSERT_TRUE(next.isAdvanced());
- auto doc = next.releaseDocument();
- ASSERT_EQUALS(1, doc["_id"].getInt());
- ASSERT_TRUE(doc.hasRandMetaField());
- double doc1Meta = doc.getRandMetaField();
-
- // Should ignore the duplicate {_id: 1}, and return {_id: 2}.
- next = sample()->getNext();
- ASSERT_TRUE(next.isAdvanced());
- doc = next.releaseDocument();
- ASSERT_EQUALS(2, doc["_id"].getInt());
- ASSERT_TRUE(doc.hasRandMetaField());
- double doc2Meta = doc.getRandMetaField();
- ASSERT_GTE(doc1Meta, doc2Meta);
-
- // Both stages should be exhausted.
- ASSERT_TRUE(source()->getNext().isEOF());
- assertEOF();
-}
-
-/**
- * The $sampleFromRandomCursor stage should error if it receives too many duplicate documents.
- */
-TEST_F(SampleFromRandomCursorBasics, TooManyDups) {
- createSample(2);
- for (int i = 0; i < 1000; i++) {
- source()->queue.push_back(DOC("_id" << 1));
- }
-
- // First should be successful, it's not a duplicate.
- ASSERT_TRUE(sample()->getNext().isAdvanced());
-
- // The rest are duplicates, should error.
- ASSERT_THROWS_CODE(sample()->getNext(), UserException, 28799);
-}
-
-/**
- * The $sampleFromRandomCursor stage should error if it receives a document without an _id.
- */
-TEST_F(SampleFromRandomCursorBasics, MissingIdField) {
- // Once with only a bad document.
- createSample(2); // _idField is '_id'.
- source()->queue.push_back(DOC("non_id" << 2));
- ASSERT_THROWS_CODE(sample()->getNext(), UserException, 28793);
-
- // Again, with some regular documents before a bad one.
- createSample(2); // _idField is '_id'.
- source()->queue.push_back(DOC("_id" << 1));
- source()->queue.push_back(DOC("_id" << 1));
- source()->queue.push_back(DOC("non_id" << 2));
-
- // First should be successful.
- ASSERT_TRUE(sample()->getNext().isAdvanced());
-
- ASSERT_THROWS_CODE(sample()->getNext(), UserException, 28793);
-}
-
-/**
- * The $sampleFromRandomCursor stage should set the random meta value in a way that mimics the
- * non-optimized case.
- */
-TEST_F(SampleFromRandomCursorBasics, MimicNonOptimized) {
- // Compute the average random meta value on the each doc returned.
- double firstTotal = 0.0;
- double secondTotal = 0.0;
- int nTrials = 10000;
- for (int i = 0; i < nTrials; i++) {
- // Sample 2 out of 3 documents.
- _sample = DocumentSourceSampleFromRandomCursor::create(ctx(), 2, "_id", 3);
- sample()->setSource(_mock.get());
-
- source()->queue.push_back(DOC("_id" << 1));
- source()->queue.push_back(DOC("_id" << 2));
-
- auto doc = sample()->getNext();
- ASSERT_TRUE(doc.isAdvanced());
- ASSERT_TRUE(doc.getDocument().hasRandMetaField());
- firstTotal += doc.getDocument().getRandMetaField();
-
- doc = sample()->getNext();
- ASSERT_TRUE(doc.isAdvanced());
- ASSERT_TRUE(doc.getDocument().hasRandMetaField());
- secondTotal += doc.getDocument().getRandMetaField();
- }
- // The average random meta value of the first document should be about 0.75. We assume that
- // 10000 trials is sufficient for us to apply the Central Limit Theorem. Using an error
- // tolerance of 0.02 gives us a spurious failure rate approximately equal to 10^-24.
- ASSERT_GTE(firstTotal / nTrials, 0.73);
- ASSERT_LTE(firstTotal / nTrials, 0.77);
-
- // The average random meta value of the second document should be about 0.5.
- ASSERT_GTE(secondTotal / nTrials, 0.48);
- ASSERT_LTE(secondTotal / nTrials, 0.52);
-}
-} // namespace DocumentSourceSampleFromRandomCursor
-
-} // namespace DocumentSourceSample
-
-namespace DocumentSourceSort {
-
-using mongo::DocumentSourceSort;
-using mongo::DocumentSourceMock;
-
-class Base : public Mock::Base {
-protected:
- void createSort(const BSONObj& sortKey = BSON("a" << 1)) {
- BSONObj spec = BSON("$sort" << sortKey);
- BSONElement specElement = spec.firstElement();
- _sort = DocumentSourceSort::createFromBson(specElement, ctx());
- checkBsonRepresentation(spec);
- }
- DocumentSourceSort* sort() {
- return dynamic_cast<DocumentSourceSort*>(_sort.get());
- }
- /** Assert that iterator state accessors consistently report the source is exhausted. */
- void assertEOF() const {
- ASSERT(_sort->getNext().isEOF());
- ASSERT(_sort->getNext().isEOF());
- ASSERT(_sort->getNext().isEOF());
- }
-
-private:
- /**
- * Check that the BSON representation generated by the souce matches the BSON it was
- * created with.
- */
- void checkBsonRepresentation(const BSONObj& spec) {
- vector<Value> arr;
- _sort->serializeToArray(arr);
- BSONObj generatedSpec = arr[0].getDocument().toBson();
- ASSERT_BSONOBJ_EQ(spec, generatedSpec);
- }
- intrusive_ptr<DocumentSource> _sort;
-};
-
-class SortWithLimit : public Base {
-public:
- void run() {
- createSort(BSON("a" << 1));
- ASSERT_EQUALS(sort()->getLimit(), -1);
-
- Pipeline::SourceContainer container;
- container.push_back(sort());
-
- { // pre-limit checks
- vector<Value> arr;
- sort()->serializeToArray(arr);
- ASSERT_BSONOBJ_EQ(arr[0].getDocument().toBson(), BSON("$sort" << BSON("a" << 1)));
-
- ASSERT(sort()->getShardSource() == NULL);
- ASSERT(sort()->getMergeSource() != NULL);
- }
-
- container.push_back(mkLimit(10));
- sort()->optimizeAt(container.begin(), &container);
- ASSERT_EQUALS(container.size(), 1U);
- ASSERT_EQUALS(sort()->getLimit(), 10);
-
- // unchanged
- container.push_back(mkLimit(15));
- sort()->optimizeAt(container.begin(), &container);
- ASSERT_EQUALS(container.size(), 1U);
- ASSERT_EQUALS(sort()->getLimit(), 10);
-
- // reduced
- container.push_back(mkLimit(5));
- sort()->optimizeAt(container.begin(), &container);
- ASSERT_EQUALS(container.size(), 1U);
- ASSERT_EQUALS(sort()->getLimit(), 5);
-
- vector<Value> arr;
- sort()->serializeToArray(arr);
- ASSERT_VALUE_EQ(
- Value(arr),
- DOC_ARRAY(DOC("$sort" << DOC("a" << 1)) << DOC("$limit" << sort()->getLimit())));
-
- ASSERT(sort()->getShardSource() != NULL);
- ASSERT(sort()->getMergeSource() != NULL);
- }
-
- intrusive_ptr<DocumentSource> mkLimit(int limit) {
- BSONObj obj = BSON("$limit" << limit);
- BSONElement e = obj.firstElement();
- return mongo::DocumentSourceLimit::createFromBson(e, ctx());
- }
-};
-
-class CheckResultsBase : public Base {
-public:
- virtual ~CheckResultsBase() {}
- void run() {
- createSort(sortSpec());
- auto source = DocumentSourceMock::create(inputData());
- sort()->setSource(source.get());
-
- // Load the results from the DocumentSourceUnwind.
- vector<Document> resultSet;
- for (auto output = sort()->getNext(); output.isAdvanced(); output = sort()->getNext()) {
- // Get the current result.
- resultSet.push_back(output.releaseDocument());
- }
- // Verify the DocumentSourceUnwind is exhausted.
- assertEOF();
-
- // Convert results to BSON once they all have been retrieved (to detect any errors
- // resulting from incorrectly shared sub objects).
- BSONArrayBuilder bsonResultSet;
- for (auto&& result : resultSet) {
- bsonResultSet << result;
- }
- // Check the result set.
- ASSERT_BSONOBJ_EQ(expectedResultSet(), bsonResultSet.arr());
- }
-
-protected:
- virtual std::deque<Document> inputData() {
- return {};
- }
- virtual BSONObj expectedResultSet() {
- BSONObj wrappedResult =
- // fromjson cannot parse an array, so place the array within an object.
- fromjson(string("{'':") + expectedResultSetString() + "}");
- return wrappedResult[""].embeddedObject().getOwned();
- }
- virtual string expectedResultSetString() {
- return "[]";
- }
- virtual BSONObj sortSpec() {
- return BSON("a" << 1);
- }
-};
-
-class InvalidSpecBase : public Base {
-public:
- virtual ~InvalidSpecBase() {}
- void run() {
- ASSERT_THROWS(createSort(sortSpec()), UserException);
- }
-
-protected:
- virtual BSONObj sortSpec() = 0;
-};
-
-class InvalidOperationBase : public Base {
-public:
- virtual ~InvalidOperationBase() {}
- void run() {
- createSort(sortSpec());
- auto source = DocumentSourceMock::create(inputData());
- sort()->setSource(source.get());
- ASSERT_THROWS(exhaust(), UserException);
- }
-
-protected:
- virtual std::deque<Document> inputData() = 0;
- virtual BSONObj sortSpec() {
- return BSON("a" << 1);
- }
-
-private:
- void exhaust() {
- for (auto output = sort()->getNext(); !output.isEOF(); output = sort()->getNext()) {
- invariant(!output.isPaused()); // do nothing
- }
- }
-};
-
-/** No documents in source. */
-class Empty : public CheckResultsBase {};
-
-/** Sort a single document. */
-class SingleValue : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("_id" << 0 << "a" << 1)};
- }
- string expectedResultSetString() {
- return "[{_id:0,a:1}]";
- }
-};
-
-/** Sort two documents. */
-class TwoValues : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("_id" << 0 << "a" << 2), DOC("_id" << 1 << "a" << 1)};
- }
- string expectedResultSetString() {
- return "[{_id:1,a:1},{_id:0,a:2}]";
- }
-};
-
-/** Sort spec is not an object. */
-class NonObjectSpec : public Base {
-public:
- void run() {
- BSONObj spec = BSON("$sort" << 1);
- BSONElement specElement = spec.firstElement();
- ASSERT_THROWS(DocumentSourceSort::createFromBson(specElement, ctx()), UserException);
- }
-};
-
-/** Sort spec is an empty object. */
-class EmptyObjectSpec : public InvalidSpecBase {
- BSONObj sortSpec() {
- return BSONObj();
- }
-};
-
-/** Sort spec value is not a number. */
-class NonNumberDirectionSpec : public InvalidSpecBase {
- BSONObj sortSpec() {
- return BSON("a"
- << "b");
- }
-};
-
-/** Sort spec value is not a valid number. */
-class InvalidNumberDirectionSpec : public InvalidSpecBase {
- BSONObj sortSpec() {
- return BSON("a" << 0);
- }
-};
-
-/** Sort spec with a descending field. */
-class DescendingOrder : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("_id" << 0 << "a" << 2), DOC("_id" << 1 << "a" << 1)};
- }
- string expectedResultSetString() {
- return "[{_id:0,a:2},{_id:1,a:1}]";
- }
- virtual BSONObj sortSpec() {
- return BSON("a" << -1);
- }
-};
-
-/** Sort spec with a dotted field. */
-class DottedSortField : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("_id" << 0 << "a" << DOC("b" << 2)), DOC("_id" << 1 << "a" << DOC("b" << 1))};
- }
- string expectedResultSetString() {
- return "[{_id:1,a:{b:1}},{_id:0,a:{b:2}}]";
- }
- virtual BSONObj sortSpec() {
- return BSON("a.b" << 1);
- }
-};
-
-/** Sort spec with a compound key. */
-class CompoundSortSpec : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("_id" << 0 << "a" << 1 << "b" << 3),
- DOC("_id" << 1 << "a" << 1 << "b" << 2),
- DOC("_id" << 2 << "a" << 0 << "b" << 4)};
- }
- string expectedResultSetString() {
- return "[{_id:2,a:0,b:4},{_id:1,a:1,b:2},{_id:0,a:1,b:3}]";
- }
- virtual BSONObj sortSpec() {
- return BSON("a" << 1 << "b" << 1);
- }
-};
-
-/** Sort spec with a compound key and descending order. */
-class CompoundSortSpecAlternateOrder : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("_id" << 0 << "a" << 1 << "b" << 3),
- DOC("_id" << 1 << "a" << 1 << "b" << 2),
- DOC("_id" << 2 << "a" << 0 << "b" << 4)};
- }
- string expectedResultSetString() {
- return "[{_id:1,a:1,b:2},{_id:0,a:1,b:3},{_id:2,a:0,b:4}]";
- }
- virtual BSONObj sortSpec() {
- return BSON("a" << -1 << "b" << 1);
- }
-};
-
-/** Sort spec with a compound key and descending order. */
-class CompoundSortSpecAlternateOrderSecondField : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("_id" << 0 << "a" << 1 << "b" << 3),
- DOC("_id" << 1 << "a" << 1 << "b" << 2),
- DOC("_id" << 2 << "a" << 0 << "b" << 4)};
- }
- string expectedResultSetString() {
- return "[{_id:2,a:0,b:4},{_id:0,a:1,b:3},{_id:1,a:1,b:2}]";
- }
- virtual BSONObj sortSpec() {
- return BSON("a" << 1 << "b" << -1);
- }
-};
-
-/** Sorting different types is not supported. */
-class InconsistentTypeSort : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("_id" << 0 << "a" << 1),
- DOC("_id" << 1 << "a"
- << "foo")};
- }
- string expectedResultSetString() {
- return "[{_id:0,a:1},{_id:1,a:\"foo\"}]";
- }
-};
-
-/** Sorting different numeric types is supported. */
-class MixedNumericSort : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("_id" << 0 << "a" << 2.3), DOC("_id" << 1 << "a" << 1)};
- }
- string expectedResultSetString() {
- return "[{_id:1,a:1},{_id:0,a:2.3}]";
- }
-};
-
-/** Ordering of a missing value. */
-class MissingValue : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("_id" << 0 << "a" << 1), DOC("_id" << 1)};
- }
- string expectedResultSetString() {
- return "[{_id:1},{_id:0,a:1}]";
- }
-};
-
-/** Ordering of a null value. */
-class NullValue : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("_id" << 0 << "a" << 1), DOC("_id" << 1 << "a" << BSONNULL)};
- }
- string expectedResultSetString() {
- return "[{_id:1,a:null},{_id:0,a:1}]";
- }
-};
-
-/**
- * Order by text score.
- */
-class TextScore : public CheckResultsBase {
- std::deque<Document> inputData() {
- MutableDocument first;
- first["_id"] = Value(0);
- first.setTextScore(10);
- MutableDocument second;
- second["_id"] = Value(1);
- second.setTextScore(20);
- return {first.freeze(), second.freeze()};
- }
-
- string expectedResultSetString() {
- return "[{_id:1},{_id:0}]";
- }
-
- BSONObj sortSpec() {
- return BSON("$computed0" << metaTextScore);
- }
-};
-
-/**
- * Order by random value in metadata.
- */
-class RandMeta : public CheckResultsBase {
- std::deque<Document> inputData() {
- MutableDocument first;
- first["_id"] = Value(0);
- first.setRandMetaField(0.01);
- MutableDocument second;
- second["_id"] = Value(1);
- second.setRandMetaField(0.02);
- return {first.freeze(), second.freeze()};
- }
-
- string expectedResultSetString() {
- return "[{_id:1},{_id:0}]";
- }
-
- BSONObj sortSpec() {
- return BSON("$computed0" << BSON("$meta"
- << "randVal"));
- }
-};
-
-/** A missing nested object within an array returns an empty array. */
-class MissingObjectWithinArray : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("_id" << 0 << "a" << DOC_ARRAY(1)),
- DOC("_id" << 1 << "a" << DOC_ARRAY(DOC("b" << 1)))};
- }
- string expectedResultSetString() {
- return "[{_id:0,a:[1]},{_id:1,a:[{b:1}]}]";
- }
- BSONObj sortSpec() {
- return BSON("a.b" << 1);
- }
-};
-
-/** Compare nested values from within an array. */
-class ExtractArrayValues : public CheckResultsBase {
- std::deque<Document> inputData() {
- return {DOC("_id" << 0 << "a" << DOC_ARRAY(DOC("b" << 1) << DOC("b" << 2))),
- DOC("_id" << 1 << "a" << DOC_ARRAY(DOC("b" << 1) << DOC("b" << 1)))};
- }
- string expectedResultSetString() {
- return "[{_id:1,a:[{b:1},{b:1}]},{_id:0,a:[{b:1},{b:2}]}]";
- }
- BSONObj sortSpec() {
- return BSON("a.b" << 1);
- }
-};
-
-/** Dependant field paths. */
-class Dependencies : public Base {
-public:
- void run() {
- createSort(BSON("a" << 1 << "b.c" << -1));
- DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, sort()->getDependencies(&dependencies));
- ASSERT_EQUALS(2U, dependencies.fields.size());
- ASSERT_EQUALS(1U, dependencies.fields.count("a"));
- ASSERT_EQUALS(1U, dependencies.fields.count("b.c"));
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(false, dependencies.getNeedTextScore());
- }
-};
-
-class OutputSort : public Base {
-public:
- void run() {
- createSort(BSON("a" << 1 << "b.c" << -1));
- BSONObjSet outputSort = sort()->getOutputSorts();
- ASSERT_EQUALS(outputSort.count(BSON("a" << 1)), 1U);
- ASSERT_EQUALS(outputSort.count(BSON("a" << 1 << "b.c" << -1)), 1U);
- ASSERT_EQUALS(outputSort.size(), 2U);
- }
-};
-
-} // namespace DocumentSourceSort
-
-namespace DocumentSourceUnwind {
-
-using mongo::DocumentSourceUnwind;
-using mongo::DocumentSourceMock;
-
-class CheckResultsBase : public Mock::Base {
-public:
- virtual ~CheckResultsBase() {}
-
- void run() {
- // Once with the simple syntax.
- createSimpleUnwind();
- assertResultsMatch(expectedResultSet(false, false));
-
- // Once with the full syntax.
- createUnwind(false, false);
- assertResultsMatch(expectedResultSet(false, false));
-
- // Once with the preserveNullAndEmptyArrays parameter.
- createUnwind(true, false);
- assertResultsMatch(expectedResultSet(true, false));
-
- // Once with the includeArrayIndex parameter.
- createUnwind(false, true);
- assertResultsMatch(expectedResultSet(false, true));
-
- // Once with both the preserveNullAndEmptyArrays and includeArrayIndex parameters.
- createUnwind(true, true);
- assertResultsMatch(expectedResultSet(true, true));
- }
-
-protected:
- virtual string unwindFieldPath() const {
- return "$a";
- }
-
- virtual string indexPath() const {
- return "index";
- }
-
- virtual std::deque<Document> inputData() {
- return {};
- }
-
- /**
- * Returns a json string representing the expected results for a normal $unwind without any
- * options.
- */
- virtual string expectedResultSetString() const {
- return "[]";
- }
-
- /**
- * Returns a json string representing the expected results for a $unwind with the
- * preserveNullAndEmptyArrays parameter set.
- */
- virtual string expectedPreservedResultSetString() const {
- return expectedResultSetString();
- }
-
- /**
- * Returns a json string representing the expected results for a $unwind with the
- * includeArrayIndex parameter set.
- */
- virtual string expectedIndexedResultSetString() const {
- return "[]";
- }
-
- /**
- * Returns a json string representing the expected results for a $unwind with both the
- * preserveNullAndEmptyArrays and the includeArrayIndex parameters set.
- */
- virtual string expectedPreservedIndexedResultSetString() const {
- return expectedIndexedResultSetString();
- }
-
-private:
- /**
- * Initializes '_unwind' using the simple '{$unwind: '$path'}' syntax.
- */
- void createSimpleUnwind() {
- auto specObj = BSON("$unwind" << unwindFieldPath());
- _unwind = static_cast<DocumentSourceUnwind*>(
- DocumentSourceUnwind::createFromBson(specObj.firstElement(), ctx()).get());
- checkBsonRepresentation(false, false);
- }
-
- /**
- * Initializes '_unwind' using the full '{$unwind: {path: '$path'}}' syntax.
- */
- void createUnwind(bool preserveNullAndEmptyArrays, bool includeArrayIndex) {
- auto specObj =
- DOC("$unwind" << DOC("path" << unwindFieldPath() << "preserveNullAndEmptyArrays"
- << preserveNullAndEmptyArrays
- << "includeArrayIndex"
- << (includeArrayIndex ? Value(indexPath()) : Value())));
- _unwind = static_cast<DocumentSourceUnwind*>(
- DocumentSourceUnwind::createFromBson(specObj.toBson().firstElement(), ctx()).get());
- checkBsonRepresentation(preserveNullAndEmptyArrays, includeArrayIndex);
- }
-
- /**
- * Extracts the documents from the $unwind stage, and asserts the actual results match the
- * expected results.
- *
- * '_unwind' must be initialized before calling this method.
- */
- void assertResultsMatch(BSONObj expectedResults) {
- auto source = DocumentSourceMock::create(inputData());
- _unwind->setSource(source.get());
- // Load the results from the DocumentSourceUnwind.
- vector<Document> resultSet;
- for (auto output = _unwind->getNext(); output.isAdvanced(); output = _unwind->getNext()) {
- // Get the current result.
- resultSet.push_back(output.releaseDocument());
- }
- // Verify the DocumentSourceUnwind is exhausted.
- assertEOF();
-
- // Convert results to BSON once they all have been retrieved (to detect any errors resulting
- // from incorrectly shared sub objects).
- BSONArrayBuilder bsonResultSet;
- for (vector<Document>::const_iterator i = resultSet.begin(); i != resultSet.end(); ++i) {
- bsonResultSet << *i;
- }
- // Check the result set.
- ASSERT_BSONOBJ_EQ(expectedResults, bsonResultSet.arr());
- }
-
- /**
- * Check that the BSON representation generated by the source matches the BSON it was
- * created with.
- */
- void checkBsonRepresentation(bool preserveNullAndEmptyArrays, bool includeArrayIndex) {
- vector<Value> arr;
- _unwind->serializeToArray(arr);
- BSONObj generatedSpec = Value(arr[0]).getDocument().toBson();
- ASSERT_BSONOBJ_EQ(expectedSerialization(preserveNullAndEmptyArrays, includeArrayIndex),
- generatedSpec);
- }
-
- BSONObj expectedSerialization(bool preserveNullAndEmptyArrays, bool includeArrayIndex) const {
- return DOC("$unwind" << DOC("path" << Value(unwindFieldPath())
- << "preserveNullAndEmptyArrays"
- << (preserveNullAndEmptyArrays ? Value(true) : Value())
- << "includeArrayIndex"
- << (includeArrayIndex ? Value(indexPath()) : Value())))
- .toBson();
- }
-
- /** Assert that iterator state accessors consistently report the source is exhausted. */
- void assertEOF() const {
- ASSERT(_unwind->getNext().isEOF());
- ASSERT(_unwind->getNext().isEOF());
- ASSERT(_unwind->getNext().isEOF());
- }
-
- BSONObj expectedResultSet(bool preserveNullAndEmptyArrays, bool includeArrayIndex) const {
- string expectedResultsString;
- if (preserveNullAndEmptyArrays) {
- if (includeArrayIndex) {
- expectedResultsString = expectedPreservedIndexedResultSetString();
- } else {
- expectedResultsString = expectedPreservedResultSetString();
- }
- } else {
- if (includeArrayIndex) {
- expectedResultsString = expectedIndexedResultSetString();
- } else {
- expectedResultsString = expectedResultSetString();
- }
- }
- // fromjson() cannot parse an array, so place the array within an object.
- BSONObj wrappedResult = fromjson(string("{'':") + expectedResultsString + "}");
- return wrappedResult[""].embeddedObject().getOwned();
- }
-
- intrusive_ptr<DocumentSourceUnwind> _unwind;
-};
-
-/** An empty collection produces no results. */
-class Empty : public CheckResultsBase {};
-
-/**
- * An empty array does not produce any results normally, but if preserveNullAndEmptyArrays is
- * passed, the document is preserved.
- */
-class EmptyArray : public CheckResultsBase {
- std::deque<Document> inputData() override {
- return {DOC("_id" << 0 << "a" << BSONArray())};
- }
- string expectedPreservedResultSetString() const override {
- return "[{_id: 0}]";
- }
- string expectedPreservedIndexedResultSetString() const override {
- return "[{_id: 0, index: null}]";
- }
-};
-
-/**
- * A missing value does not produce any results normally, but if preserveNullAndEmptyArrays is
- * passed, the document is preserved.
- */
-class MissingValue : public CheckResultsBase {
- std::deque<Document> inputData() override {
- return {DOC("_id" << 0)};
- }
- string expectedPreservedResultSetString() const override {
- return "[{_id: 0}]";
- }
- string expectedPreservedIndexedResultSetString() const override {
- return "[{_id: 0, index: null}]";
- }
-};
-
-/**
- * A null value does not produce any results normally, but if preserveNullAndEmptyArrays is passed,
- * the document is preserved.
- */
-class Null : public CheckResultsBase {
- std::deque<Document> inputData() override {
- return {DOC("_id" << 0 << "a" << BSONNULL)};
- }
- string expectedPreservedResultSetString() const override {
- return "[{_id: 0, a: null}]";
- }
- string expectedPreservedIndexedResultSetString() const override {
- return "[{_id: 0, a: null, index: null}]";
- }
-};
-
-/**
- * An undefined value does not produce any results normally, but if preserveNullAndEmptyArrays is
- * passed, the document is preserved.
- */
-class Undefined : public CheckResultsBase {
- std::deque<Document> inputData() override {
- return {DOC("_id" << 0 << "a" << BSONUndefined)};
- }
- string expectedPreservedResultSetString() const override {
- return "[{_id: 0, a: undefined}]";
- }
- string expectedPreservedIndexedResultSetString() const override {
- return "[{_id: 0, a: undefined, index: null}]";
- }
-};
-
-/** Unwind an array with one value. */
-class OneValue : public CheckResultsBase {
- std::deque<Document> inputData() override {
- return {DOC("_id" << 0 << "a" << DOC_ARRAY(1))};
- }
- string expectedResultSetString() const override {
- return "[{_id: 0, a: 1}]";
- }
- string expectedIndexedResultSetString() const override {
- return "[{_id: 0, a: 1, index: 0}]";
- }
-};
-
-/** Unwind an array with two values. */
-class TwoValues : public CheckResultsBase {
- std::deque<Document> inputData() override {
- return {DOC("_id" << 0 << "a" << DOC_ARRAY(1 << 2))};
- }
- string expectedResultSetString() const override {
- return "[{_id: 0, a: 1}, {_id: 0, a: 2}]";
- }
- string expectedIndexedResultSetString() const override {
- return "[{_id: 0, a: 1, index: 0}, {_id: 0, a: 2, index: 1}]";
- }
-};
-
-/** Unwind an array with two values, one of which is null. */
-class ArrayWithNull : public CheckResultsBase {
- std::deque<Document> inputData() override {
- return {DOC("_id" << 0 << "a" << DOC_ARRAY(1 << BSONNULL))};
- }
- string expectedResultSetString() const override {
- return "[{_id: 0, a: 1}, {_id: 0, a: null}]";
- }
- string expectedIndexedResultSetString() const override {
- return "[{_id: 0, a: 1, index: 0}, {_id: 0, a: null, index: 1}]";
- }
-};
-
-/** Unwind two documents with arrays. */
-class TwoDocuments : public CheckResultsBase {
- std::deque<Document> inputData() override {
- return {DOC("_id" << 0 << "a" << DOC_ARRAY(1 << 2)),
- DOC("_id" << 1 << "a" << DOC_ARRAY(3 << 4))};
- }
- string expectedResultSetString() const override {
- return "[{_id: 0, a: 1}, {_id: 0, a: 2}, {_id: 1, a: 3}, {_id: 1, a: 4}]";
- }
- string expectedIndexedResultSetString() const override {
- return "[{_id: 0, a: 1, index: 0}, {_id: 0, a: 2, index: 1},"
- " {_id: 1, a: 3, index: 0}, {_id: 1, a: 4, index: 1}]";
- }
-};
-
-/** Unwind an array in a nested document. */
-class NestedArray : public CheckResultsBase {
- std::deque<Document> inputData() override {
- return {DOC("_id" << 0 << "a" << DOC("b" << DOC_ARRAY(1 << 2) << "c" << 3))};
- }
- string unwindFieldPath() const override {
- return "$a.b";
- }
- string expectedResultSetString() const override {
- return "[{_id: 0, a: {b: 1, c: 3}}, {_id: 0, a: {b: 2, c: 3}}]";
- }
- string expectedIndexedResultSetString() const override {
- return "[{_id: 0, a: {b: 1, c: 3}, index: 0},"
- " {_id: 0, a: {b: 2, c: 3}, index: 1}]";
- }
-};
-
-/**
- * A nested path produces no results when there is no sub-document that matches the path, unless
- * preserveNullAndEmptyArrays is specified.
- */
-class NonObjectParent : public CheckResultsBase {
- std::deque<Document> inputData() override {
- return {DOC("_id" << 0 << "a" << 4)};
- }
- string unwindFieldPath() const override {
- return "$a.b";
- }
- string expectedPreservedResultSetString() const override {
- return "[{_id: 0, a: 4}]";
- }
- string expectedPreservedIndexedResultSetString() const override {
- return "[{_id: 0, a: 4, index: null}]";
- }
-};
-
-/** Unwind an array in a doubly nested document. */
-class DoubleNestedArray : public CheckResultsBase {
- std::deque<Document> inputData() override {
- return {DOC("_id" << 0 << "a"
- << DOC("b" << DOC("d" << DOC_ARRAY(1 << 2) << "e" << 4) << "c" << 3))};
- }
- string unwindFieldPath() const override {
- return "$a.b.d";
- }
- string expectedResultSetString() const override {
- return "[{_id: 0, a: {b: {d: 1, e: 4}, c: 3}}, {_id: 0, a: {b: {d: 2, e: 4}, c: 3}}]";
- }
- string expectedIndexedResultSetString() const override {
- return "[{_id: 0, a: {b: {d: 1, e: 4}, c: 3}, index: 0}, "
- " {_id: 0, a: {b: {d: 2, e: 4}, c: 3}, index: 1}]";
- }
-};
-
-/** Unwind several documents in a row. */
-class SeveralDocuments : public CheckResultsBase {
- std::deque<Document> inputData() override {
- return {DOC("_id" << 0 << "a" << DOC_ARRAY(1 << 2 << 3)),
- DOC("_id" << 1),
- DOC("_id" << 2),
- DOC("_id" << 3 << "a" << DOC_ARRAY(10 << 20)),
- DOC("_id" << 4 << "a" << DOC_ARRAY(30))};
- }
- string expectedResultSetString() const override {
- return "[{_id: 0, a: 1}, {_id: 0, a: 2}, {_id: 0, a: 3},"
- " {_id: 3, a: 10}, {_id: 3, a: 20},"
- " {_id: 4, a: 30}]";
- }
- string expectedPreservedResultSetString() const override {
- return "[{_id: 0, a: 1}, {_id: 0, a: 2}, {_id: 0, a: 3},"
- " {_id: 1},"
- " {_id: 2},"
- " {_id: 3, a: 10}, {_id: 3, a: 20},"
- " {_id: 4, a: 30}]";
- }
- string expectedIndexedResultSetString() const override {
- return "[{_id: 0, a: 1, index: 0},"
- " {_id: 0, a: 2, index: 1},"
- " {_id: 0, a: 3, index: 2},"
- " {_id: 3, a: 10, index: 0},"
- " {_id: 3, a: 20, index: 1},"
- " {_id: 4, a: 30, index: 0}]";
- }
- string expectedPreservedIndexedResultSetString() const override {
- return "[{_id: 0, a: 1, index: 0},"
- " {_id: 0, a: 2, index: 1},"
- " {_id: 0, a: 3, index: 2},"
- " {_id: 1, index: null},"
- " {_id: 2, index: null},"
- " {_id: 3, a: 10, index: 0},"
- " {_id: 3, a: 20, index: 1},"
- " {_id: 4, a: 30, index: 0}]";
- }
-};
-
-/** Unwind several more documents in a row. */
-class SeveralMoreDocuments : public CheckResultsBase {
- std::deque<Document> inputData() override {
- return {DOC("_id" << 0 << "a" << BSONNULL),
- DOC("_id" << 1),
- DOC("_id" << 2 << "a" << DOC_ARRAY("a"
- << "b")),
- DOC("_id" << 3),
- DOC("_id" << 4 << "a" << DOC_ARRAY(1 << 2 << 3)),
- DOC("_id" << 5 << "a" << DOC_ARRAY(4 << 5 << 6)),
- DOC("_id" << 6 << "a" << DOC_ARRAY(7 << 8 << 9)),
- DOC("_id" << 7 << "a" << BSONArray())};
- }
- string expectedResultSetString() const override {
- return "[{_id: 2, a: 'a'}, {_id: 2, a: 'b'},"
- " {_id: 4, a: 1}, {_id: 4, a: 2}, {_id: 4, a: 3},"
- " {_id: 5, a: 4}, {_id: 5, a: 5}, {_id: 5, a: 6},"
- " {_id: 6, a: 7}, {_id: 6, a: 8}, {_id: 6, a: 9}]";
- }
- string expectedPreservedResultSetString() const override {
- return "[{_id: 0, a: null},"
- " {_id: 1},"
- " {_id: 2, a: 'a'}, {_id: 2, a: 'b'},"
- " {_id: 3},"
- " {_id: 4, a: 1}, {_id: 4, a: 2}, {_id: 4, a: 3},"
- " {_id: 5, a: 4}, {_id: 5, a: 5}, {_id: 5, a: 6},"
- " {_id: 6, a: 7}, {_id: 6, a: 8}, {_id: 6, a: 9},"
- " {_id: 7}]";
- }
- string expectedIndexedResultSetString() const override {
- return "[{_id: 2, a: 'a', index: 0},"
- " {_id: 2, a: 'b', index: 1},"
- " {_id: 4, a: 1, index: 0},"
- " {_id: 4, a: 2, index: 1},"
- " {_id: 4, a: 3, index: 2},"
- " {_id: 5, a: 4, index: 0},"
- " {_id: 5, a: 5, index: 1},"
- " {_id: 5, a: 6, index: 2},"
- " {_id: 6, a: 7, index: 0},"
- " {_id: 6, a: 8, index: 1},"
- " {_id: 6, a: 9, index: 2}]";
- }
- string expectedPreservedIndexedResultSetString() const override {
- return "[{_id: 0, a: null, index: null},"
- " {_id: 1, index: null},"
- " {_id: 2, a: 'a', index: 0},"
- " {_id: 2, a: 'b', index: 1},"
- " {_id: 3, index: null},"
- " {_id: 4, a: 1, index: 0},"
- " {_id: 4, a: 2, index: 1},"
- " {_id: 4, a: 3, index: 2},"
- " {_id: 5, a: 4, index: 0},"
- " {_id: 5, a: 5, index: 1},"
- " {_id: 5, a: 6, index: 2},"
- " {_id: 6, a: 7, index: 0},"
- " {_id: 6, a: 8, index: 1},"
- " {_id: 6, a: 9, index: 2},"
- " {_id: 7, index: null}]";
- }
-};
-
-/**
- * Test the 'includeArrayIndex' option, where the specified path is part of a sub-object.
- */
-class IncludeArrayIndexSubObject : public CheckResultsBase {
- string indexPath() const override {
- return "b.index";
- }
- std::deque<Document> inputData() override {
- return {DOC("_id" << 0 << "a" << DOC_ARRAY(0) << "b" << DOC("x" << 100)),
- DOC("_id" << 1 << "a" << 1 << "b" << DOC("x" << 100)),
- DOC("_id" << 2 << "b" << DOC("x" << 100))};
- }
- string expectedResultSetString() const override {
- return "[{_id: 0, a: 0, b: {x: 100}}, {_id: 1, a: 1, b: {x: 100}}]";
- }
- string expectedPreservedResultSetString() const override {
- return "[{_id: 0, a: 0, b: {x: 100}}, {_id: 1, a: 1, b: {x: 100}}, {_id: 2, b: {x: 100}}]";
- }
- string expectedIndexedResultSetString() const override {
- return "[{_id: 0, a: 0, b: {x: 100, index: 0}}, {_id: 1, a: 1, b: {x: 100, index: null}}]";
- }
- string expectedPreservedIndexedResultSetString() const override {
- return "[{_id: 0, a: 0, b: {x: 100, index: 0}},"
- " {_id: 1, a: 1, b: {x: 100, index: null}},"
- " {_id: 2, b: {x: 100, index: null}}]";
- }
-};
-
-/**
- * Test the 'includeArrayIndex' option, where the specified path overrides an existing field.
- */
-class IncludeArrayIndexOverrideExisting : public CheckResultsBase {
- string indexPath() const override {
- return "b";
- }
- std::deque<Document> inputData() override {
- return {DOC("_id" << 0 << "a" << DOC_ARRAY(0) << "b" << 100),
- DOC("_id" << 1 << "a" << 1 << "b" << 100),
- DOC("_id" << 2 << "b" << 100)};
- }
- string expectedResultSetString() const override {
- return "[{_id: 0, a: 0, b: 100}, {_id: 1, a: 1, b: 100}]";
- }
- string expectedPreservedResultSetString() const override {
- return "[{_id: 0, a: 0, b: 100}, {_id: 1, a: 1, b: 100}, {_id: 2, b: 100}]";
- }
- string expectedIndexedResultSetString() const override {
- return "[{_id: 0, a: 0, b: 0}, {_id: 1, a: 1, b: null}]";
- }
- string expectedPreservedIndexedResultSetString() const override {
- return "[{_id: 0, a: 0, b: 0}, {_id: 1, a: 1, b: null}, {_id: 2, b: null}]";
- }
-};
-
-/**
- * Test the 'includeArrayIndex' option, where the specified path overrides an existing nested field.
- */
-class IncludeArrayIndexOverrideExistingNested : public CheckResultsBase {
- string indexPath() const override {
- return "b.index";
- }
- std::deque<Document> inputData() override {
- return {DOC("_id" << 0 << "a" << DOC_ARRAY(0) << "b" << 100),
- DOC("_id" << 1 << "a" << 1 << "b" << 100),
- DOC("_id" << 2 << "b" << 100)};
- }
- string expectedResultSetString() const override {
- return "[{_id: 0, a: 0, b: 100}, {_id: 1, a: 1, b: 100}]";
- }
- string expectedPreservedResultSetString() const override {
- return "[{_id: 0, a: 0, b: 100}, {_id: 1, a: 1, b: 100}, {_id: 2, b: 100}]";
- }
- string expectedIndexedResultSetString() const override {
- return "[{_id: 0, a: 0, b: {index: 0}}, {_id: 1, a: 1, b: {index: null}}]";
- }
- string expectedPreservedIndexedResultSetString() const override {
- return "[{_id: 0, a: 0, b: {index: 0}},"
- " {_id: 1, a: 1, b: {index: null}},"
- " {_id: 2, b: {index: null}}]";
- }
-};
-
-/**
- * Test the 'includeArrayIndex' option, where the specified path overrides the field that was being
- * unwound.
- */
-class IncludeArrayIndexOverrideUnwindPath : public CheckResultsBase {
- string indexPath() const override {
- return "a";
- }
- std::deque<Document> inputData() override {
- return {
- DOC("_id" << 0 << "a" << DOC_ARRAY(5)), DOC("_id" << 1 << "a" << 1), DOC("_id" << 2)};
- }
- string expectedResultSetString() const override {
- return "[{_id: 0, a: 5}, {_id: 1, a: 1}]";
- }
- string expectedPreservedResultSetString() const override {
- return "[{_id: 0, a: 5}, {_id: 1, a: 1}, {_id: 2}]";
- }
- string expectedIndexedResultSetString() const override {
- return "[{_id: 0, a: 0}, {_id: 1, a: null}]";
- }
- string expectedPreservedIndexedResultSetString() const override {
- return "[{_id: 0, a: 0}, {_id: 1, a: null}, {_id: 2, a: null}]";
- }
-};
-
-/**
- * Test the 'includeArrayIndex' option, where the specified path is a subfield of the field that was
- * being unwound.
- */
-class IncludeArrayIndexWithinUnwindPath : public CheckResultsBase {
- string indexPath() const override {
- return "a.index";
- }
- std::deque<Document> inputData() override {
- return {DOC("_id" << 0 << "a"
- << DOC_ARRAY(100 << DOC("b" << 1) << DOC("b" << 1 << "index" << -1)))};
- }
- string expectedResultSetString() const override {
- return "[{_id: 0, a: 100}, {_id: 0, a: {b: 1}}, {_id: 0, a: {b: 1, index: -1}}]";
- }
- string expectedIndexedResultSetString() const override {
- return "[{_id: 0, a: {index: 0}},"
- " {_id: 0, a: {b: 1, index: 1}},"
- " {_id: 0, a: {b: 1, index: 2}}]";
- }
-};
-
-/** Dependant field paths. */
-class Dependencies : public Mock::Base {
-public:
- void run() {
- auto unwind =
- DocumentSourceUnwind::create(ctx(), "x.y.z", false, boost::optional<string>("index"));
- DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, unwind->getDependencies(&dependencies));
- ASSERT_EQUALS(1U, dependencies.fields.size());
- ASSERT_EQUALS(1U, dependencies.fields.count("x.y.z"));
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(false, dependencies.getNeedTextScore());
- }
-};
-
-class OutputSort : public Mock::Base {
-public:
- void run() {
- auto unwind = DocumentSourceUnwind::create(ctx(), "x.y", false, boost::none);
- auto source = DocumentSourceMock::create();
- source->sorts = {BSON("a" << 1 << "x.y" << 1 << "b" << 1)};
-
- unwind->setSource(source.get());
-
- BSONObjSet outputSort = unwind->getOutputSorts();
- ASSERT_EQUALS(1U, outputSort.size());
- ASSERT_EQUALS(1U, outputSort.count(BSON("a" << 1)));
- }
-};
-
-//
-// Error cases.
-//
-
-/**
- * Fixture to test error cases of the $unwind stage.
- */
-class InvalidUnwindSpec : public Mock::Base, public unittest::Test {
-public:
- intrusive_ptr<DocumentSource> createUnwind(BSONObj spec) {
- auto specElem = spec.firstElement();
- return DocumentSourceUnwind::createFromBson(specElem, ctx());
- }
-};
-
-TEST_F(InvalidUnwindSpec, NonObjectNonString) {
- ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << 1)), UserException, 15981);
-}
-
-TEST_F(InvalidUnwindSpec, NoPathSpecified) {
- ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSONObj())), UserException, 28812);
-}
-
-TEST_F(InvalidUnwindSpec, NonStringPath) {
- ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSON("path" << 2))), UserException, 28808);
-}
-
-TEST_F(InvalidUnwindSpec, NonDollarPrefixedPath) {
- ASSERT_THROWS_CODE(createUnwind(BSON("$unwind"
- << "somePath")),
- UserException,
- 28818);
- ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSON("path"
- << "somePath"))),
- UserException,
- 28818);
-}
-
-TEST_F(InvalidUnwindSpec, NonBoolPreserveNullAndEmptyArrays) {
- ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSON("path"
- << "$x"
- << "preserveNullAndEmptyArrays"
- << 2))),
- UserException,
- 28809);
-}
-
-TEST_F(InvalidUnwindSpec, NonStringIncludeArrayIndex) {
- ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSON("path"
- << "$x"
- << "includeArrayIndex"
- << 2))),
- UserException,
- 28810);
-}
-
-TEST_F(InvalidUnwindSpec, EmptyStringIncludeArrayIndex) {
- ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSON("path"
- << "$x"
- << "includeArrayIndex"
- << ""))),
- UserException,
- 28810);
-}
-
-TEST_F(InvalidUnwindSpec, DollarPrefixedIncludeArrayIndex) {
- ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSON("path"
- << "$x"
- << "includeArrayIndex"
- << "$"))),
- UserException,
- 28822);
- ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSON("path"
- << "$x"
- << "includeArrayIndex"
- << "$path"))),
- UserException,
- 28822);
-}
-
-TEST_F(InvalidUnwindSpec, UnrecognizedOption) {
- ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSON("path"
- << "$x"
- << "preserveNullAndEmptyArrays"
- << true
- << "foo"
- << 3))),
- UserException,
- 28811);
- ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSON("path"
- << "$x"
- << "foo"
- << 3))),
- UserException,
- 28811);
-}
-} // namespace DocumentSourceUnwind
-
-namespace DocumentSourceGeoNear {
-using mongo::DocumentSourceGeoNear;
-using mongo::DocumentSourceLimit;
-
-class LimitCoalesce : public Mock::Base {
-public:
- void run() {
- intrusive_ptr<DocumentSourceGeoNear> geoNear = DocumentSourceGeoNear::create(ctx());
-
- Pipeline::SourceContainer container;
- container.push_back(geoNear);
-
- ASSERT_EQUALS(geoNear->getLimit(), DocumentSourceGeoNear::kDefaultLimit);
-
- container.push_back(DocumentSourceLimit::create(ctx(), 200));
- geoNear->optimizeAt(container.begin(), &container);
-
- ASSERT_EQUALS(container.size(), 1U);
- ASSERT_EQUALS(geoNear->getLimit(), DocumentSourceGeoNear::kDefaultLimit);
-
- container.push_back(DocumentSourceLimit::create(ctx(), 50));
- geoNear->optimizeAt(container.begin(), &container);
-
- ASSERT_EQUALS(container.size(), 1U);
- ASSERT_EQUALS(geoNear->getLimit(), 50);
-
- container.push_back(DocumentSourceLimit::create(ctx(), 30));
- geoNear->optimizeAt(container.begin(), &container);
-
- ASSERT_EQUALS(container.size(), 1U);
- ASSERT_EQUALS(geoNear->getLimit(), 30);
- }
-};
-
-class OutputSort : public Mock::Base {
-public:
- void run() {
- BSONObj queryObj = fromjson(
- "{geoNear: { near: {type: 'Point', coordinates: [0, 0]}, distanceField: 'dist', "
- "maxDistance: 2}}");
- intrusive_ptr<DocumentSource> geoNear =
- DocumentSourceGeoNear::createFromBson(queryObj.firstElement(), ctx());
-
- BSONObjSet outputSort = geoNear->getOutputSorts();
-
- ASSERT_EQUALS(outputSort.count(BSON("dist" << -1)), 1U);
- ASSERT_EQUALS(outputSort.size(), 1U);
- }
-};
-
-} // namespace DocumentSourceGeoNear
-
-namespace DocumentSourceMatch {
-using mongo::DocumentSourceMatch;
-
-using std::unique_ptr;
-
-// Helpers to make a DocumentSourceMatch from a query object or json string
-intrusive_ptr<DocumentSourceMatch> makeMatch(const BSONObj& query) {
- intrusive_ptr<DocumentSource> uncasted = DocumentSourceMatch::createFromBson(
- BSON("$match" << query).firstElement(), new ExpressionContext());
- return dynamic_cast<DocumentSourceMatch*>(uncasted.get());
-}
-intrusive_ptr<DocumentSourceMatch> makeMatch(const string& queryJson) {
- return makeMatch(fromjson(queryJson));
-}
-
-class RedactSafePortion {
-public:
- void test(string input, string safePortion) {
- try {
- intrusive_ptr<DocumentSourceMatch> match = makeMatch(input);
- ASSERT_BSONOBJ_EQ(match->redactSafePortion(), fromjson(safePortion));
- } catch (...) {
- unittest::log() << "Problem with redactSafePortion() of: " << input;
- throw;
- }
- }
-
- void run() {
- // Empty
- test("{}", "{}");
-
- // Basic allowed things
- test("{a:1}", "{a:1}");
-
- test("{a:'asdf'}", "{a:'asdf'}");
-
- test("{a:/asdf/i}", "{a:/asdf/i}");
-
- test("{a: {$regex: 'adsf'}}", "{a: {$regex: 'adsf'}}");
-
- test("{a: {$regex: 'adsf', $options: 'i'}}", "{a: {$regex: 'adsf', $options: 'i'}}");
-
- test("{a: {$mod: [1, 0]}}", "{a: {$mod: [1, 0]}}");
-
- test("{a: {$type: 1}}", "{a: {$type: 1}}");
-
- // Basic disallowed things
- test("{a: null}", "{}");
-
- test("{a: {}}", "{}");
-
- test("{a: []}", "{}");
-
- test("{'a.0': 1}", "{}");
-
- test("{'a.0.b': 1}", "{}");
-
- test("{a: {$ne: 1}}", "{}");
-
- test("{a: {$nin: [1, 2, 3]}}", "{}");
-
- test("{a: {$exists: true}}", // could be allowed but currently isn't
- "{}");
-
- test("{a: {$exists: false}}", // can never be allowed
- "{}");
-
- test("{a: {$size: 1}}", "{}");
-
- test("{$nor: [{a:1}]}", "{}");
-
- // Combinations
- test("{a:1, b: 'asdf'}", "{a:1, b: 'asdf'}");
-
- test("{a:1, b: null}", "{a:1}");
-
- test("{a:null, b: null}", "{}");
-
- // $elemMatch
-
- test("{a: {$elemMatch: {b: 1}}}", "{a: {$elemMatch: {b: 1}}}");
-
- test("{a: {$elemMatch: {b:null}}}", "{}");
-
- test("{a: {$elemMatch: {b:null, c:1}}}", "{a: {$elemMatch: {c: 1}}}");
-
- // explicit $and
- test("{$and:[{a: 1}]}", "{$and:[{a: 1}]}");
-
- test("{$and:[{a: 1}, {b: null}]}", "{$and:[{a: 1}]}");
-
- test("{$and:[{a: 1}, {b: null, c:1}]}", "{$and:[{a: 1}, {c:1}]}");
-
- test("{$and:[{a: null}, {b: null}]}", "{}");
-
- // explicit $or
- test("{$or:[{a: 1}]}", "{$or:[{a: 1}]}");
-
- test("{$or:[{a: 1}, {b: null}]}", "{}");
-
- test("{$or:[{a: 1}, {b: null, c:1}]}", "{$or:[{a: 1}, {c:1}]}");
-
- test("{$or:[{a: null}, {b: null}]}", "{}");
-
- test("{}", "{}");
-
- // $all and $in
- test("{a: {$all: [1, 0]}}", "{a: {$all: [1, 0]}}");
-
- test("{a: {$all: [1, 0, null]}}", "{a: {$all: [1, 0]}}");
-
- test("{a: {$all: [{$elemMatch: {b:1}}]}}", // could be allowed but currently isn't
- "{}");
-
- test("{a: {$all: [1, 0, null]}}", "{a: {$all: [1, 0]}}");
-
- test("{a: {$in: [1, 0]}}", "{a: {$in: [1, 0]}}");
-
- test("{a: {$in: [1, 0, null]}}", "{}");
-
- {
- const char* comparisonOps[] = {"$gt", "$lt", "$gte", "$lte", NULL};
- for (int i = 0; comparisonOps[i]; i++) {
- const char* op = comparisonOps[i];
- test(string("{a: {") + op + ": 1}}", string("{a: {") + op + ": 1}}");
-
- // $elemMatch takes direct expressions ...
- test(string("{a: {$elemMatch: {") + op + ": 1}}}",
- string("{a: {$elemMatch: {") + op + ": 1}}}");
-
- // ... or top-level style full matches
- test(string("{a: {$elemMatch: {b: {") + op + ": 1}}}}",
- string("{a: {$elemMatch: {b: {") + op + ": 1}}}}");
-
- test(string("{a: {") + op + ": null}}", "{}");
-
- test(string("{a: {") + op + ": {}}}", "{}");
-
- test(string("{a: {") + op + ": []}}", "{}");
-
- test(string("{'a.0': {") + op + ": null}}", "{}");
-
- test(string("{'a.0.b': {") + op + ": null}}", "{}");
- }
- }
- }
-};
-
-class DependenciesOrExpression {
-public:
- void run() {
- intrusive_ptr<DocumentSourceMatch> match = makeMatch("{$or: [{a: 1}, {'x.y': {$gt: 4}}]}");
- DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
- ASSERT_EQUALS(1U, dependencies.fields.count("a"));
- ASSERT_EQUALS(1U, dependencies.fields.count("x.y"));
- ASSERT_EQUALS(2U, dependencies.fields.size());
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(false, dependencies.getNeedTextScore());
- }
-};
-
-class DependenciesTextExpression {
-public:
- void run() {
- intrusive_ptr<DocumentSourceMatch> match = makeMatch("{$text: {$search: 'hello'} }");
- DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_ALL, match->getDependencies(&dependencies));
- ASSERT_EQUALS(true, dependencies.needWholeDocument);
- ASSERT_EQUALS(false, dependencies.getNeedTextScore());
- }
-};
-
-class DependenciesGTEExpression {
-public:
- void run() {
- // Parses to {a: {$eq: {notAField: {$gte: 4}}}}.
- intrusive_ptr<DocumentSourceMatch> match = makeMatch("{a: {notAField: {$gte: 4}}}");
- DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
- ASSERT_EQUALS(1U, dependencies.fields.count("a"));
- ASSERT_EQUALS(1U, dependencies.fields.size());
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(false, dependencies.getNeedTextScore());
- }
-};
-
-class DependenciesElemMatchExpression {
-public:
- void run() {
- intrusive_ptr<DocumentSourceMatch> match = makeMatch("{a: {$elemMatch: {c: {$gte: 4}}}}");
- DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
- ASSERT_EQUALS(1U, dependencies.fields.count("a.c"));
- ASSERT_EQUALS(1U, dependencies.fields.count("a"));
- ASSERT_EQUALS(2U, dependencies.fields.size());
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(false, dependencies.getNeedTextScore());
- }
-};
-
-class DependenciesElemMatchWithNoSubfield {
-public:
- void run() {
- intrusive_ptr<DocumentSourceMatch> match = makeMatch("{a: {$elemMatch: {$gt: 1, $lt: 5}}}");
- DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
- ASSERT_EQUALS(1U, dependencies.fields.count("a"));
- ASSERT_EQUALS(1U, dependencies.fields.size());
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(false, dependencies.getNeedTextScore());
- }
-};
-class DependenciesNotExpression {
-public:
- void run() {
- intrusive_ptr<DocumentSourceMatch> match = makeMatch("{b: {$not: {$gte: 4}}}}");
- DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
- ASSERT_EQUALS(1U, dependencies.fields.count("b"));
- ASSERT_EQUALS(1U, dependencies.fields.size());
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(false, dependencies.getNeedTextScore());
- }
-};
-
-class DependenciesNorExpression {
-public:
- void run() {
- intrusive_ptr<DocumentSourceMatch> match =
- makeMatch("{$nor: [{'a.b': {$gte: 4}}, {'b.c': {$in: [1, 2]}}]}");
- DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
- ASSERT_EQUALS(1U, dependencies.fields.count("a.b"));
- ASSERT_EQUALS(1U, dependencies.fields.count("b.c"));
- ASSERT_EQUALS(2U, dependencies.fields.size());
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(false, dependencies.getNeedTextScore());
- }
-};
-
-class DependenciesCommentExpression {
-public:
- void run() {
- intrusive_ptr<DocumentSourceMatch> match = makeMatch("{$comment: 'misleading?'}");
- DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
- ASSERT_EQUALS(0U, dependencies.fields.size());
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(false, dependencies.getNeedTextScore());
- }
-};
-
-class DependenciesCommentMatchExpression {
-public:
- void run() {
- intrusive_ptr<DocumentSourceMatch> match = makeMatch("{a: 4, $comment: 'irrelevant'}");
- DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, match->getDependencies(&dependencies));
- ASSERT_EQUALS(1U, dependencies.fields.count("a"));
- ASSERT_EQUALS(1U, dependencies.fields.size());
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(false, dependencies.getNeedTextScore());
- }
-};
-
-class Coalesce {
-public:
- void run() {
- intrusive_ptr<DocumentSourceMatch> match1 = makeMatch(BSON("a" << 1));
- intrusive_ptr<DocumentSourceMatch> match2 = makeMatch(BSON("b" << 1));
- intrusive_ptr<DocumentSourceMatch> match3 = makeMatch(BSON("c" << 1));
-
- Pipeline::SourceContainer container;
-
- // Check initial state
- ASSERT_BSONOBJ_EQ(match1->getQuery(), BSON("a" << 1));
- ASSERT_BSONOBJ_EQ(match2->getQuery(), BSON("b" << 1));
- ASSERT_BSONOBJ_EQ(match3->getQuery(), BSON("c" << 1));
-
- container.push_back(match1);
- container.push_back(match2);
- match1->optimizeAt(container.begin(), &container);
-
- ASSERT_EQUALS(container.size(), 1U);
- ASSERT_BSONOBJ_EQ(match1->getQuery(), fromjson("{'$and': [{a:1}, {b:1}]}"));
-
- container.push_back(match3);
- match1->optimizeAt(container.begin(), &container);
- ASSERT_EQUALS(container.size(), 1U);
- ASSERT_BSONOBJ_EQ(match1->getQuery(),
- fromjson("{'$and': [{'$and': [{a:1}, {b:1}]},"
- "{c:1}]}"));
- }
-};
-
-TEST(ObjectForMatch, ShouldExtractTopLevelFieldIfDottedFieldNeeded) {
- Document input(fromjson("{a: 1, b: {c: 1, d: 1}}"));
- BSONObj expected = fromjson("{b: {c: 1, d: 1}}");
- ASSERT_BSONOBJ_EQ(expected, DocumentSourceMatch::getObjectForMatch(input, {"b.c"}));
-}
-
-TEST(ObjectForMatch, ShouldExtractEntireArray) {
- Document input(fromjson("{a: [1, 2, 3], b: 1}"));
- BSONObj expected = fromjson("{a: [1, 2, 3]}");
- ASSERT_BSONOBJ_EQ(expected, DocumentSourceMatch::getObjectForMatch(input, {"a"}));
-}
-
-TEST(ObjectForMatch, ShouldOnlyAddPrefixedFieldOnceIfTwoDottedSubfields) {
- Document input(fromjson("{a: 1, b: {c: 1, f: {d: {e: 1}}}}"));
- BSONObj expected = fromjson("{b: {c: 1, f: {d: {e: 1}}}}");
- ASSERT_BSONOBJ_EQ(expected, DocumentSourceMatch::getObjectForMatch(input, {"b.f", "b.f.d.e"}));
-}
-
-TEST(ObjectForMatch, MissingFieldShouldNotAppearInResult) {
- Document input(fromjson("{a: 1}"));
- BSONObj expected;
- ASSERT_BSONOBJ_EQ(expected, DocumentSourceMatch::getObjectForMatch(input, {"b", "c"}));
-}
-
-TEST(ObjectForMatch, ShouldSerializeNothingIfNothingIsNeeded) {
- Document input(fromjson("{a: 1, b: {c: 1}}"));
- BSONObj expected;
- ASSERT_BSONOBJ_EQ(expected,
- DocumentSourceMatch::getObjectForMatch(input, std::set<std::string>{}));
-}
-
-TEST(ObjectForMatch, ShouldExtractEntireArrayFromPrefixOfDottedField) {
- Document input(fromjson("{a: [{b: 1}, {b: 2}], c: 1}"));
- BSONObj expected = fromjson("{a: [{b: 1}, {b: 2}]}");
- ASSERT_BSONOBJ_EQ(expected, DocumentSourceMatch::getObjectForMatch(input, {"a.b"}));
-}
-
-
-} // namespace DocumentSourceMatch
-
-namespace DocumentSourceLookUp {
-using mongo::DocumentSourceLookUp;
-
-class OutputSortTruncatesOnEquality : public Mock::Base {
-public:
- void run() {
- intrusive_ptr<DocumentSourceMock> source = DocumentSourceMock::create();
- source->sorts = {BSON("a" << 1 << "d.e" << 1 << "c" << 1)};
- intrusive_ptr<DocumentSource> lookup =
- DocumentSourceLookUp::createFromBson(BSON("$lookup" << BSON("from"
- << "a"
- << "localField"
- << "b"
- << "foreignField"
- << "c"
- << "as"
- << "d.e"))
- .firstElement(),
- ctx());
- lookup->setSource(source.get());
-
- BSONObjSet outputSort = lookup->getOutputSorts();
-
- ASSERT_EQUALS(outputSort.count(BSON("a" << 1)), 1U);
- ASSERT_EQUALS(outputSort.size(), 1U);
- }
-};
-
-class OutputSortTruncatesOnPrefix : public Mock::Base {
-public:
- void run() {
- intrusive_ptr<DocumentSourceMock> source = DocumentSourceMock::create();
- source->sorts = {BSON("a" << 1 << "d.e" << 1 << "c" << 1)};
- intrusive_ptr<DocumentSource> lookup =
- DocumentSourceLookUp::createFromBson(BSON("$lookup" << BSON("from"
- << "a"
- << "localField"
- << "b"
- << "foreignField"
- << "c"
- << "as"
- << "d"))
- .firstElement(),
- ctx());
- lookup->setSource(source.get());
-
- BSONObjSet outputSort = lookup->getOutputSorts();
-
- ASSERT_EQUALS(outputSort.count(BSON("a" << 1)), 1U);
- ASSERT_EQUALS(outputSort.size(), 1U);
- }
-};
-}
-
-namespace DocumentSourceSortByCount {
-using mongo::DocumentSourceSortByCount;
-using mongo::DocumentSourceGroup;
-using mongo::DocumentSourceSort;
-using std::vector;
-using boost::intrusive_ptr;
-
-/**
- * Fixture to test that $sortByCount returns a DocumentSourceGroup and DocumentSourceSort.
- */
-class SortByCountReturnsGroupAndSort : public Mock::Base, public unittest::Test {
-public:
- void testCreateFromBsonResult(BSONObj sortByCountSpec, Value expectedGroupExplain) {
- vector<intrusive_ptr<DocumentSource>> result =
- DocumentSourceSortByCount::createFromBson(sortByCountSpec.firstElement(), ctx());
-
- ASSERT_EQUALS(result.size(), 2UL);
-
- const auto* groupStage = dynamic_cast<DocumentSourceGroup*>(result[0].get());
- ASSERT(groupStage);
-
- const auto* sortStage = dynamic_cast<DocumentSourceSort*>(result[1].get());
- ASSERT(sortStage);
-
- // Serialize the DocumentSourceGroup and DocumentSourceSort from $sortByCount so that we can
- // check the explain output to make sure $group and $sort have the correct fields.
- const bool explain = true;
- vector<Value> explainedStages;
- groupStage->serializeToArray(explainedStages, explain);
- sortStage->serializeToArray(explainedStages, explain);
- ASSERT_EQUALS(explainedStages.size(), 2UL);
-
- auto groupExplain = explainedStages[0];
- ASSERT_VALUE_EQ(groupExplain["$group"], expectedGroupExplain);
-
- auto sortExplain = explainedStages[1];
- auto expectedSortExplain = Value{Document{{"sortKey", Document{{"count", -1}}}}};
- ASSERT_VALUE_EQ(sortExplain["$sort"], expectedSortExplain);
- }
-};
-
-TEST_F(SortByCountReturnsGroupAndSort, ExpressionFieldPathSpec) {
- BSONObj spec = BSON("$sortByCount"
- << "$x");
- Value expectedGroupExplain =
- Value{Document{{"_id", "$x"}, {"count", Document{{"$sum", Document{{"$const", 1}}}}}}};
- testCreateFromBsonResult(spec, expectedGroupExplain);
-}
-
-TEST_F(SortByCountReturnsGroupAndSort, ExpressionInObjectSpec) {
- BSONObj spec = BSON("$sortByCount" << BSON("$floor"
- << "$x"));
- Value expectedGroupExplain =
- Value{Document{{"_id", Document{{"$floor", Value{BSON_ARRAY("$x")}}}},
- {"count", Document{{"$sum", Document{{"$const", 1}}}}}}};
- testCreateFromBsonResult(spec, expectedGroupExplain);
-
- spec = BSON("$sortByCount" << BSON("$eq" << BSON_ARRAY("$x" << 15)));
- expectedGroupExplain =
- Value{Document{{"_id", Document{{"$eq", Value{BSON_ARRAY("$x" << BSON("$const" << 15))}}}},
- {"count", Document{{"$sum", Document{{"$const", 1}}}}}}};
- testCreateFromBsonResult(spec, expectedGroupExplain);
-}
-
-/**
- * Fixture to test error cases of the $sortByCount stage.
- */
-class InvalidSortByCountSpec : public Mock::Base, public unittest::Test {
-public:
- vector<intrusive_ptr<DocumentSource>> createSortByCount(BSONObj sortByCountSpec) {
- auto specElem = sortByCountSpec.firstElement();
- return DocumentSourceSortByCount::createFromBson(specElem, ctx());
- }
-};
-
-TEST_F(InvalidSortByCountSpec, NonObjectNonStringSpec) {
- BSONObj spec = BSON("$sortByCount" << 1);
- ASSERT_THROWS_CODE(createSortByCount(spec), UserException, 40149);
-
- spec = BSON("$sortByCount" << BSONNULL);
- ASSERT_THROWS_CODE(createSortByCount(spec), UserException, 40149);
-}
-
-TEST_F(InvalidSortByCountSpec, NonExpressionInObjectSpec) {
- BSONObj spec = BSON("$sortByCount" << BSON("field1"
- << "$x"));
- ASSERT_THROWS_CODE(createSortByCount(spec), UserException, 40147);
-}
-
-TEST_F(InvalidSortByCountSpec, NonFieldPathStringSpec) {
- BSONObj spec = BSON("$sortByCount"
- << "test");
- ASSERT_THROWS_CODE(createSortByCount(spec), UserException, 40148);
-}
-} // namespace DocumentSourceSortByCount
-
-namespace DocumentSourceCount {
-using mongo::DocumentSourceCount;
-using mongo::DocumentSourceGroup;
-using mongo::DocumentSourceProject;
-using std::vector;
-using boost::intrusive_ptr;
-
-class CountReturnsGroupAndProjectStages : public Mock::Base, public unittest::Test {
-public:
- void testCreateFromBsonResult(BSONObj countSpec) {
- vector<intrusive_ptr<DocumentSource>> result =
- DocumentSourceCount::createFromBson(countSpec.firstElement(), ctx());
-
- ASSERT_EQUALS(result.size(), 2UL);
-
- const auto* groupStage = dynamic_cast<DocumentSourceGroup*>(result[0].get());
- ASSERT(groupStage);
-
- // Project stages are actually implemented as SingleDocumentTransformations.
- const auto* projectStage =
- dynamic_cast<DocumentSourceSingleDocumentTransformation*>(result[1].get());
- ASSERT(projectStage);
-
- const bool explain = true;
- vector<Value> explainedStages;
- groupStage->serializeToArray(explainedStages, explain);
- projectStage->serializeToArray(explainedStages, explain);
- ASSERT_EQUALS(explainedStages.size(), 2UL);
-
- StringData countName = countSpec.firstElement().valueStringData();
- Value expectedGroupExplain =
- Value{Document{{"_id", Document{{"$const", BSONNULL}}},
- {countName, Document{{"$sum", Document{{"$const", 1}}}}}}};
- auto groupExplain = explainedStages[0];
- ASSERT_VALUE_EQ(groupExplain["$group"], expectedGroupExplain);
-
- Value expectedProjectExplain = Value{Document{{"_id", false}, {countName, true}}};
- auto projectExplain = explainedStages[1];
- ASSERT_VALUE_EQ(projectExplain["$project"], expectedProjectExplain);
- }
-};
-
-TEST_F(CountReturnsGroupAndProjectStages, ValidStringSpec) {
- BSONObj spec = BSON("$count"
- << "myCount");
- testCreateFromBsonResult(spec);
-
- spec = BSON("$count"
- << "quantity");
- testCreateFromBsonResult(spec);
-}
-
-class InvalidCountSpec : public Mock::Base, public unittest::Test {
-public:
- vector<intrusive_ptr<DocumentSource>> createCount(BSONObj countSpec) {
- auto specElem = countSpec.firstElement();
- return DocumentSourceCount::createFromBson(specElem, ctx());
- }
-};
-
-TEST_F(InvalidCountSpec, NonStringSpec) {
- BSONObj spec = BSON("$count" << 1);
- ASSERT_THROWS_CODE(createCount(spec), UserException, 40156);
-
- spec = BSON("$count" << BSON("field1"
- << "test"));
- ASSERT_THROWS_CODE(createCount(spec), UserException, 40156);
-}
-
-TEST_F(InvalidCountSpec, EmptyStringSpec) {
- BSONObj spec = BSON("$count"
- << "");
- ASSERT_THROWS_CODE(createCount(spec), UserException, 40157);
-}
-
-TEST_F(InvalidCountSpec, FieldPathSpec) {
- BSONObj spec = BSON("$count"
- << "$x");
- ASSERT_THROWS_CODE(createCount(spec), UserException, 40158);
-}
-
-TEST_F(InvalidCountSpec, EmbeddedNullByteSpec) {
- BSONObj spec = BSON("$count"
- << "te\0st"_sd);
- ASSERT_THROWS_CODE(createCount(spec), UserException, 40159);
-}
-
-TEST_F(InvalidCountSpec, PeriodInStringSpec) {
- BSONObj spec = BSON("$count"
- << "test.string");
- ASSERT_THROWS_CODE(createCount(spec), UserException, 40160);
-}
-} // namespace DocumentSourceCount
-
-namespace DocumentSourceBucket {
-using mongo::DocumentSourceBucket;
-using mongo::DocumentSourceGroup;
-using mongo::DocumentSourceSort;
-using mongo::DocumentSourceMock;
-using std::vector;
-using boost::intrusive_ptr;
-
-class BucketReturnsGroupAndSort : public Mock::Base, public unittest::Test {
-public:
- void testCreateFromBsonResult(BSONObj bucketSpec, Value expectedGroupExplain) {
- vector<intrusive_ptr<DocumentSource>> result =
- DocumentSourceBucket::createFromBson(bucketSpec.firstElement(), ctx());
-
- ASSERT_EQUALS(result.size(), 2UL);
-
- const auto* groupStage = dynamic_cast<DocumentSourceGroup*>(result[0].get());
- ASSERT(groupStage);
-
- const auto* sortStage = dynamic_cast<DocumentSourceSort*>(result[1].get());
- ASSERT(sortStage);
-
- // Serialize the DocumentSourceGroup and DocumentSourceSort from $bucket so that we can
- // check the explain output to make sure $group and $sort have the correct fields.
- const bool explain = true;
- vector<Value> explainedStages;
- groupStage->serializeToArray(explainedStages, explain);
- sortStage->serializeToArray(explainedStages, explain);
- ASSERT_EQUALS(explainedStages.size(), 2UL);
-
- auto groupExplain = explainedStages[0];
- ASSERT_VALUE_EQ(groupExplain["$group"], expectedGroupExplain);
-
- auto sortExplain = explainedStages[1];
-
- auto expectedSortExplain = Value{Document{{"sortKey", Document{{"_id", 1}}}}};
- ASSERT_VALUE_EQ(sortExplain["$sort"], expectedSortExplain);
- }
-};
-
-TEST_F(BucketReturnsGroupAndSort, BucketUsesDefaultOutputWhenNoOutputSpecified) {
- const auto spec =
- fromjson("{$bucket : {groupBy :'$x', boundaries : [ 0, 2 ], default : 'other'}}");
- auto expectedGroupExplain =
- Value(fromjson("{_id : {$switch : {branches : [{case : {$and : [{$gte : ['$x', {$const : "
- "0}]}, {$lt : ['$x', {$const : 2}]}]}, then : {$const : 0}}], default : "
- "{$const : 'other'}}}, count : {$sum : {$const : 1}}}"));
-
- testCreateFromBsonResult(spec, expectedGroupExplain);
-}
-
-TEST_F(BucketReturnsGroupAndSort, BucketSucceedsWhenOutputSpecified) {
- const auto spec = fromjson(
- "{$bucket : {groupBy : '$x', boundaries : [0, 2], output : { number : {$sum : 1}}}}");
- auto expectedGroupExplain = Value(fromjson(
- "{_id : {$switch : {branches : [{case : {$and : [{$gte : ['$x', {$const : 0}]}, {$lt : "
- "['$x', {$const : 2}]}]}, then : {$const : 0}}]}}, number : {$sum : {$const : 1}}}"));
-
- testCreateFromBsonResult(spec, expectedGroupExplain);
-}
-
-TEST_F(BucketReturnsGroupAndSort, BucketSucceedsWhenNoDefaultSpecified) {
- const auto spec = fromjson("{$bucket : { groupBy : '$x', boundaries : [0, 2]}}");
- auto expectedGroupExplain = Value(fromjson(
- "{_id : {$switch : {branches : [{case : {$and : [{$gte : ['$x', {$const : 0}]}, {$lt : "
- "['$x', {$const : 2}]}]}, then : {$const : 0}}]}}, count : {$sum : {$const : 1}}}"));
-
- testCreateFromBsonResult(spec, expectedGroupExplain);
-}
-
-TEST_F(BucketReturnsGroupAndSort, BucketSucceedsWhenBoundariesAreSameCanonicalType) {
- const auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [0, 1.5]}}");
- auto expectedGroupExplain = Value(fromjson(
- "{_id : {$switch : {branches : [{case : {$and : [{$gte : ['$x', {$const : 0}]}, {$lt : "
- "['$x', {$const : 1.5}]}]}, then : {$const : 0}}]}},count : {$sum : {$const : 1}}}"));
-
- testCreateFromBsonResult(spec, expectedGroupExplain);
-}
-
-TEST_F(BucketReturnsGroupAndSort, BucketSucceedsWhenBoundariesAreConstantExpressions) {
- const auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [0, {$add : [4, 5]}]}}");
- auto expectedGroupExplain = Value(fromjson(
- "{_id : {$switch : {branches : [{case : {$and : [{$gte : ['$x', {$const : 0}]}, {$lt : "
- "['$x', {$const : 9}]}]}, then : {$const : 0}}]}}, count : {$sum : {$const : 1}}}"));
-
- testCreateFromBsonResult(spec, expectedGroupExplain);
-}
-
-TEST_F(BucketReturnsGroupAndSort, BucketSucceedsWhenDefaultIsConstantExpression) {
- const auto spec =
- fromjson("{$bucket : {groupBy : '$x', boundaries : [0, 1], default: {$add : [4, 5]}}}");
- auto expectedGroupExplain =
- Value(fromjson("{_id : {$switch : {branches : [{case : {$and : [{$gte : ['$x', {$const :"
- "0}]}, {$lt : ['$x', {$const : 1}]}]}, then : {$const : 0}}], default : "
- "{$const : 9}}}, count : {$sum : {$const : 1}}}"));
-
- testCreateFromBsonResult(spec, expectedGroupExplain);
-}
-
-TEST_F(BucketReturnsGroupAndSort, BucketSucceedsWithMultipleBoundaryValues) {
- auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [0, 1, 2]}}");
- auto expectedGroupExplain =
- Value(fromjson("{_id : {$switch : {branches : [{case : {$and : [{$gte : ['$x', {$const : "
- "0}]}, {$lt : ['$x', {$const : 1}]}]}, then : {$const : 0}}, {case : {$and "
- ": [{$gte : ['$x', {$const : 1}]}, {$lt : ['$x', {$const : 2}]}]}, then : "
- "{$const : 1}}]}}, count : {$sum : {$const : 1}}}"));
-
- testCreateFromBsonResult(spec, expectedGroupExplain);
-}
-
-class InvalidBucketSpec : public Mock::Base, public unittest::Test {
-public:
- vector<intrusive_ptr<DocumentSource>> createBucket(BSONObj bucketSpec) {
- auto sources = DocumentSourceBucket::createFromBson(bucketSpec.firstElement(), ctx());
- for (auto&& source : sources) {
- source->injectExpressionContext(ctx());
- }
- return sources;
- }
-};
-
-TEST_F(InvalidBucketSpec, BucketFailsWithNonObject) {
- auto spec = fromjson("{$bucket : 1}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40201);
-
- spec = fromjson("{$bucket : 'test'}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40201);
-}
-
-TEST_F(InvalidBucketSpec, BucketFailsWithUnknownField) {
- const auto spec =
- fromjson("{$bucket : {groupBy : '$x', boundaries : [0, 1, 2], unknown : 'field'}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40197);
-}
-
-TEST_F(InvalidBucketSpec, BucketFailsWithNoGroupBy) {
- const auto spec = fromjson("{$bucket : {boundaries : [0, 1, 2]}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40198);
-}
-
-TEST_F(InvalidBucketSpec, BucketFailsWithNoBoundaries) {
- const auto spec = fromjson("{$bucket : {groupBy : '$x'}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40198);
-}
-
-TEST_F(InvalidBucketSpec, BucketFailsWithNonExpressionGroupBy) {
- auto spec = fromjson("{$bucket : {groupBy : {test : 'obj'}, boundaries : [0, 1, 2]}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40202);
-
- spec = fromjson("{$bucket : {groupBy : 'test', boundaries : [0, 1, 2]}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40202);
-
- spec = fromjson("{$bucket : {groupBy : 1, boundaries : [0, 1, 2]}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40202);
-}
-
-TEST_F(InvalidBucketSpec, BucketFailsWithNonArrayBoundaries) {
- auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : 'test'}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40200);
-
- spec = fromjson("{$bucket : {groupBy : '$x', boundaries : 1}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40200);
-
- spec = fromjson("{$bucket : {groupBy : '$x', boundaries : {test : 'obj'}}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40200);
-}
-
-TEST_F(InvalidBucketSpec, BucketFailsWithNotEnoughBoundaries) {
- auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [0]}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40192);
-
- spec = fromjson("{$bucket : {groupBy : '$x', boundaries : []}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40192);
-}
-
-TEST_F(InvalidBucketSpec, BucketFailsWithNonConstantValueBoundaries) {
- const auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : ['$x', '$y', '$z']}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40191);
-}
-
-TEST_F(InvalidBucketSpec, BucketFailsWithMixedTypesBoundaries) {
- const auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [0, 'test']}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40193);
-}
-
-TEST_F(InvalidBucketSpec, BucketFailsWithNonUniqueBoundaries) {
- auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [1, 1, 2, 3]}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40194);
-
- spec = fromjson("{$bucket : {groupBy : '$x', boundaries : ['a', 'b', 'b', 'c']}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40194);
-}
-
-TEST_F(InvalidBucketSpec, BucketFailsWithNonSortedBoundaries) {
- const auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [4, 5, 3, 6]}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40194);
-}
-
-TEST_F(InvalidBucketSpec, BucketFailsWithNonConstantExpressionDefault) {
- const auto spec =
- fromjson("{$bucket : {groupBy : '$x', boundaries : [0, 1, 2], default : '$x'}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40195);
-}
-
-TEST_F(InvalidBucketSpec, BucketFailsWhenDefaultIsInBoundariesRange) {
- auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [1, 2, 4], default : 3}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40199);
-
- spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [1, 2, 4], default : 1}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40199);
-}
-
-TEST_F(InvalidBucketSpec, GroupFailsForBucketWithInvalidOutputField) {
- auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [1, 2, 3], output : 'test'}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40196);
-
- spec = fromjson(
- "{$bucket : {groupBy : '$x', boundaries : [1, 2, 3], output : {number : 'test'}}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40234);
-
- spec = fromjson(
- "{$bucket : {groupBy : '$x', boundaries : [1, 2, 3], output : {'test.test' : {$sum : "
- "1}}}}");
- ASSERT_THROWS_CODE(createBucket(spec), UserException, 40235);
-}
-
-TEST_F(InvalidBucketSpec, SwitchFailsForBucketWhenNoDefaultSpecified) {
- const auto spec = fromjson("{$bucket : {groupBy : '$x', boundaries : [1, 2, 3]}}");
- vector<intrusive_ptr<DocumentSource>> bucketStages = createBucket(spec);
-
- ASSERT_EQUALS(bucketStages.size(), 2UL);
-
- auto* groupStage = dynamic_cast<DocumentSourceGroup*>(bucketStages[0].get());
- ASSERT(groupStage);
-
- const auto* sortStage = dynamic_cast<DocumentSourceSort*>(bucketStages[1].get());
- ASSERT(sortStage);
-
- auto doc = DOC("x" << 4);
- auto source = DocumentSourceMock::create(doc);
- groupStage->setSource(source.get());
- ASSERT_THROWS_CODE(groupStage->getNext(), UserException, 40066);
-}
-} // namespace DocumentSourceBucket
-
-namespace DocumentSourceBucketAuto {
-using mongo::DocumentSourceBucketAuto;
-using mongo::DocumentSourceMock;
-using std::vector;
-using std::deque;
-using boost::intrusive_ptr;
-
-class BucketAutoTests : public Mock::Base, public unittest::Test {
-public:
- intrusive_ptr<DocumentSource> createBucketAuto(BSONObj bucketAutoSpec) {
- return DocumentSourceBucketAuto::createFromBson(bucketAutoSpec.firstElement(), ctx());
- }
-
- vector<Document> getResults(BSONObj bucketAutoSpec, deque<Document> docs) {
- auto bucketAutoStage = createBucketAuto(bucketAutoSpec);
- assertBucketAutoType(bucketAutoStage);
-
- auto source = DocumentSourceMock::create(docs);
- bucketAutoStage->setSource(source.get());
-
- vector<Document> results;
- for (auto next = bucketAutoStage->getNext(); next.isAdvanced();
- next = bucketAutoStage->getNext()) {
- results.push_back(next.releaseDocument());
- }
-
- return results;
- }
-
- void testSerialize(BSONObj bucketAutoSpec, BSONObj expectedObj) {
- auto bucketAutoStage = createBucketAuto(bucketAutoSpec);
- assertBucketAutoType(bucketAutoStage);
-
- const bool explain = true;
- vector<Value> explainedStages;
- bucketAutoStage->serializeToArray(explainedStages, explain);
- ASSERT_EQUALS(explainedStages.size(), 1UL);
-
- Value expectedExplain = Value(expectedObj);
-
- auto bucketAutoExplain = explainedStages[0];
- ASSERT_VALUE_EQ(bucketAutoExplain["$bucketAuto"], expectedExplain);
- }
-
-private:
- void assertBucketAutoType(intrusive_ptr<DocumentSource> documentSource) {
- const auto* bucketAutoStage = dynamic_cast<DocumentSourceBucketAuto*>(documentSource.get());
- ASSERT(bucketAutoStage);
- }
-};
-
-TEST_F(BucketAutoTests, ReturnsNoBucketsWhenSourceIsEmpty) {
- auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets: 1}}");
- auto results = getResults(bucketAutoSpec, {});
- ASSERT_EQUALS(results.size(), 0UL);
-}
-
-TEST_F(BucketAutoTests, Returns1Of1RequestedBucketWhenAllUniqueValues) {
- auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets: 1}}");
-
- // Values are 1, 2, 3, 4
- auto intDocs = {Document{{"x", 4}}, Document{{"x", 1}}, Document{{"x", 3}}, Document{{"x", 2}}};
- auto results = getResults(bucketAutoSpec, intDocs);
- ASSERT_EQUALS(results.size(), 1UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 1, max : 4}, count : 4}")));
-
- // Values are 'a', 'b', 'c', 'd'
- auto stringDocs = {
- Document{{"x", "d"}}, Document{{"x", "b"}}, Document{{"x", "a"}}, Document{{"x", "c"}}};
- results = getResults(bucketAutoSpec, stringDocs);
- ASSERT_EQUALS(results.size(), 1UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 'a', max : 'd'}, count : 4}")));
-}
-
-TEST_F(BucketAutoTests, Returns1Of1RequestedBucketWithNonUniqueValues) {
- auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets: 1}}");
-
- // Values are 1, 2, 7, 7, 7
- auto docs = {Document{{"x", 7}},
- Document{{"x", 1}},
- Document{{"x", 7}},
- Document{{"x", 2}},
- Document{{"x", 7}}};
- auto results = getResults(bucketAutoSpec, docs);
- ASSERT_EQUALS(results.size(), 1UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 1, max : 7}, count : 5}")));
-}
-
-TEST_F(BucketAutoTests, Returns1Of1RequestedBucketWhen1ValueInSource) {
- auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets: 1}}");
- auto intDocs = {Document{{"x", 1}}};
- auto results = getResults(bucketAutoSpec, intDocs);
- ASSERT_EQUALS(results.size(), 1UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 1, max : 1}, count : 1}")));
-
- auto stringDocs = {Document{{"x", "a"}}};
- results = getResults(bucketAutoSpec, stringDocs);
- ASSERT_EQUALS(results.size(), 1UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 'a', max : 'a'}, count : 1}")));
-}
-
-TEST_F(BucketAutoTests, Returns2Of2RequestedBucketsWhenSmallestValueHasManyDuplicates) {
- auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2}}");
-
- // Values are 1, 1, 1, 1, 2
- auto docs = {Document{{"x", 1}},
- Document{{"x", 1}},
- Document{{"x", 1}},
- Document{{"x", 2}},
- Document{{"x", 1}}};
- auto results = getResults(bucketAutoSpec, docs);
- ASSERT_EQUALS(results.size(), 2UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 1, max : 2}, count : 4}")));
- ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 2, max : 2}, count : 1}")));
-}
-
-TEST_F(BucketAutoTests, Returns2Of2RequestedBucketsWhenLargestValueHasManyDuplicates) {
- auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2}}");
-
- // Values are 0, 1, 2, 3, 4, 5, 5, 5, 5
- auto docs = {Document{{"x", 5}},
- Document{{"x", 0}},
- Document{{"x", 2}},
- Document{{"x", 3}},
- Document{{"x", 5}},
- Document{{"x", 1}},
- Document{{"x", 5}},
- Document{{"x", 4}},
- Document{{"x", 5}}};
- auto results = getResults(bucketAutoSpec, docs);
-
- ASSERT_EQUALS(results.size(), 2UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 5}, count : 5}")));
- ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 5, max : 5}, count : 4}")));
-}
-
-TEST_F(BucketAutoTests, Returns3Of3RequestedBucketsWhenAllUniqueValues) {
- auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 3}}");
-
- // Values are 0, 1, 2, 3, 4, 5, 6, 7
- auto docs = {Document{{"x", 2}},
- Document{{"x", 4}},
- Document{{"x", 1}},
- Document{{"x", 7}},
- Document{{"x", 0}},
- Document{{"x", 5}},
- Document{{"x", 3}},
- Document{{"x", 6}}};
- auto results = getResults(bucketAutoSpec, docs);
-
- ASSERT_EQUALS(results.size(), 3UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 3}, count : 3}")));
- ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 3, max : 6}, count : 3}")));
- ASSERT_DOCUMENT_EQ(results[2], Document(fromjson("{_id : {min : 6, max : 7}, count : 2}")));
-}
-
-TEST_F(BucketAutoTests, Returns2Of3RequestedBucketsWhenLargestValueHasManyDuplicates) {
- // In this case, two buckets will be made because the approximate bucket size calculated will be
- // 7/3, which rounds to 2. Therefore, the boundaries will be calculated so that values 0 and 1
- // into the first bucket. All of the 2 values will then fall into a second bucket.
- auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 3}}");
-
- // Values are 0, 1, 2, 2, 2, 2, 2
- auto docs = {Document{{"x", 2}},
- Document{{"x", 0}},
- Document{{"x", 2}},
- Document{{"x", 2}},
- Document{{"x", 1}},
- Document{{"x", 2}},
- Document{{"x", 2}}};
- auto results = getResults(bucketAutoSpec, docs);
-
- ASSERT_EQUALS(results.size(), 2UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 2}, count : 2}")));
- ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 2, max : 2}, count : 5}")));
-}
-
-TEST_F(BucketAutoTests, Returns1Of3RequestedBucketsWhenLargestValueHasManyDuplicates) {
- // In this case, one bucket will be made because the approximate bucket size calculated will be
- // 8/3, which rounds to 3. Therefore, the boundaries will be calculated so that values 0, 1, and
- // 2 fall into the first bucket. Since 2 is repeated many times, all of the 2 values will be
- // pulled into the first bucket.
- auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 3}}");
-
- // Values are 0, 1, 2, 2, 2, 2, 2, 2
- auto docs = {Document{{"x", 2}},
- Document{{"x", 2}},
- Document{{"x", 0}},
- Document{{"x", 2}},
- Document{{"x", 2}},
- Document{{"x", 2}},
- Document{{"x", 1}},
- Document{{"x", 2}}};
- auto results = getResults(bucketAutoSpec, docs);
-
- ASSERT_EQUALS(results.size(), 1UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 2}, count : 8}")));
-}
-
-TEST_F(BucketAutoTests, Returns3Of3RequestedBucketsWhen3ValuesInSource) {
- auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 3}}");
- auto docs = {Document{{"x", 0}}, Document{{"x", 1}}, Document{{"x", 2}}};
- auto results = getResults(bucketAutoSpec, docs);
-
- ASSERT_EQUALS(results.size(), 3UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 1}, count : 1}")));
- ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 1, max : 2}, count : 1}")));
- ASSERT_DOCUMENT_EQ(results[2], Document(fromjson("{_id : {min : 2, max : 2}, count : 1}")));
-}
-
-TEST_F(BucketAutoTests, Returns3Of10RequestedBucketsWhen3ValuesInSource) {
- auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 10}}");
- auto docs = {Document{{"x", 0}}, Document{{"x", 1}}, Document{{"x", 2}}};
- auto results = getResults(bucketAutoSpec, docs);
-
- ASSERT_EQUALS(results.size(), 3UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 1}, count : 1}")));
- ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 1, max : 2}, count : 1}")));
- ASSERT_DOCUMENT_EQ(results[2], Document(fromjson("{_id : {min : 2, max : 2}, count : 1}")));
-}
-
-TEST_F(BucketAutoTests, EvaluatesAccumulatorsInOutputField) {
- auto bucketAutoSpec =
- fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, output : {avg : {$avg : '$x'}}}}");
- auto docs = {Document{{"x", 0}}, Document{{"x", 2}}, Document{{"x", 4}}, Document{{"x", 6}}};
- auto results = getResults(bucketAutoSpec, docs);
-
- ASSERT_EQUALS(results.size(), 2UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 4}, avg : 1}")));
- ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 4, max : 6}, avg : 5}")));
-}
-
-TEST_F(BucketAutoTests, EvaluatesNonFieldPathExpressionInGroupByField) {
- auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : {$add : ['$x', 1]}, buckets : 2}}");
- auto docs = {Document{{"x", 0}}, Document{{"x", 1}}, Document{{"x", 2}}, Document{{"x", 3}}};
- auto results = getResults(bucketAutoSpec, docs);
-
- ASSERT_EQUALS(results.size(), 2UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 1, max : 3}, count : 2}")));
- ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 3, max : 4}, count : 2}")));
-}
-
-TEST_F(BucketAutoTests, RespectsCanonicalTypeOrderingOfValues) {
- auto bucketAutoSpec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2}}");
- auto docs = {Document{{"x", "a"}},
- Document{{"x", 1}},
- Document{{"x", "b"}},
- Document{{"x", 2}},
- Document{{"x", 0.0}}};
- auto results = getResults(bucketAutoSpec, docs);
-
- ASSERT_EQUALS(results.size(), 2UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0.0, max : 'a'}, count : 3}")));
- ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 'a', max : 'b'}, count : 2}")));
-}
-
-TEST_F(BucketAutoTests, SourceNameIsBucketAuto) {
- auto bucketAuto = createBucketAuto(fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2}}"));
- ASSERT_EQUALS(std::string(bucketAuto->getSourceName()), "$bucketAuto");
-}
-
-TEST_F(BucketAutoTests, ShouldAddDependenciesOfGroupByFieldAndComputedFields) {
- auto bucketAuto =
- createBucketAuto(fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, output: {field1 : "
- "{$sum : '$a'}, field2 : {$avg : '$b'}}}}"));
-
- DepsTracker dependencies;
- ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_ALL, bucketAuto->getDependencies(&dependencies));
- ASSERT_EQUALS(3U, dependencies.fields.size());
-
- // Dependency from 'groupBy'
- ASSERT_EQUALS(1U, dependencies.fields.count("x"));
-
- // Dependencies from 'output'
- ASSERT_EQUALS(1U, dependencies.fields.count("a"));
- ASSERT_EQUALS(1U, dependencies.fields.count("b"));
-
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(false, dependencies.getNeedTextScore());
-}
-
-TEST_F(BucketAutoTests, ShouldNeedTextScoreInDependenciesFromGroupByField) {
- auto bucketAuto =
- createBucketAuto(fromjson("{$bucketAuto : {groupBy : {$meta: 'textScore'}, buckets : 2}}"));
-
- DepsTracker dependencies(DepsTracker::MetadataAvailable::kTextScore);
- ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_ALL, bucketAuto->getDependencies(&dependencies));
- ASSERT_EQUALS(0U, dependencies.fields.size());
-
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(true, dependencies.getNeedTextScore());
-}
-
-TEST_F(BucketAutoTests, ShouldNeedTextScoreInDependenciesFromOutputField) {
- auto bucketAuto =
- createBucketAuto(fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, output: {avg : "
- "{$avg : {$meta : 'textScore'}}}}}"));
-
- DepsTracker dependencies(DepsTracker::MetadataAvailable::kTextScore);
- ASSERT_EQUALS(DocumentSource::EXHAUSTIVE_ALL, bucketAuto->getDependencies(&dependencies));
- ASSERT_EQUALS(1U, dependencies.fields.size());
-
- // Dependency from 'groupBy'
- ASSERT_EQUALS(1U, dependencies.fields.count("x"));
-
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(true, dependencies.getNeedTextScore());
-}
-
-TEST_F(BucketAutoTests, SerializesDefaultAccumulatorIfOutputFieldIsNotSpecified) {
- BSONObj spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2}}");
- BSONObj expected =
- fromjson("{groupBy : '$x', buckets : 2, output : {count : {$sum : {$const : 1}}}}");
-
- testSerialize(spec, expected);
-}
-
-TEST_F(BucketAutoTests, SerializesOutputFieldIfSpecified) {
- BSONObj spec =
- fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, output : {field : {$avg : '$x'}}}}");
- BSONObj expected = fromjson("{groupBy : '$x', buckets : 2, output : {field : {$avg : '$x'}}}");
-
- testSerialize(spec, expected);
-}
-
-TEST_F(BucketAutoTests, SerializesGranularityFieldIfSpecified) {
- BSONObj spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity : 'R5'}}");
- BSONObj expected = fromjson(
- "{groupBy : '$x', buckets : 2, granularity : 'R5', output : {count : {$sum : {$const : "
- "1}}}}");
-
- testSerialize(spec, expected);
-}
-
-TEST_F(BucketAutoTests, ShouldBeAbleToReParseSerializedStage) {
- auto bucketAuto =
- createBucketAuto(fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity: 'R5', "
- "output : {field : {$avg : '$x'}}}}"));
- vector<Value> serialization;
- bucketAuto->serializeToArray(serialization);
- ASSERT_EQUALS(serialization.size(), 1UL);
- ASSERT_EQUALS(serialization[0].getType(), BSONType::Object);
-
- ASSERT_EQUALS(serialization[0].getDocument().size(), 1UL);
- ASSERT_EQUALS(serialization[0].getDocument()["$bucketAuto"].getType(), BSONType::Object);
-
- auto serializedBson = serialization[0].getDocument().toBson();
- auto roundTripped = createBucketAuto(serializedBson);
-
- vector<Value> newSerialization;
- roundTripped->serializeToArray(newSerialization);
-
- ASSERT_EQUALS(newSerialization.size(), 1UL);
- ASSERT_VALUE_EQ(newSerialization[0], serialization[0]);
-}
-
-TEST_F(BucketAutoTests, ReturnsNoBucketsWhenNoBucketsAreSpecifiedInCreate) {
- auto docs = {Document{{"x", 1}}};
- auto mock = DocumentSourceMock::create(docs);
- auto bucketAuto = DocumentSourceBucketAuto::create(ctx());
-
- bucketAuto->setSource(mock.get());
- ASSERT(bucketAuto->getNext().isEOF());
-}
-
-TEST_F(BucketAutoTests, FailsWithInvalidNumberOfBuckets) {
- auto spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 'test'}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40241);
-
- spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2147483648}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40242);
-
- spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 1.5}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40242);
-
- spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 0}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40243);
-
- spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : -1}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40243);
-}
-
-TEST_F(BucketAutoTests, FailsWithNonExpressionGroupBy) {
- auto spec = fromjson("{$bucketAuto : {groupBy : 'test', buckets : 1}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40239);
-
- spec = fromjson("{$bucketAuto : {groupBy : {test : 'test'}, buckets : 1}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40239);
-}
-
-TEST_F(BucketAutoTests, FailsWithNonObjectArgument) {
- auto spec = fromjson("{$bucketAuto : 'test'}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40240);
-
- spec = fromjson("{$bucketAuto : [1, 2, 3]}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40240);
-}
-
-TEST_F(BucketAutoTests, FailsWithNonObjectOutput) {
- auto spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 1, output : 'test'}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40244);
-
- spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 1, output : [1, 2, 3]}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40244);
-
- spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 1, output : 1}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40244);
-}
-
-TEST_F(BucketAutoTests, FailsWhenGroupByMissing) {
- auto spec = fromjson("{$bucketAuto : {buckets : 1}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40246);
-}
-
-TEST_F(BucketAutoTests, FailsWhenBucketsMissing) {
- auto spec = fromjson("{$bucketAuto : {groupBy : '$x'}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40246);
-}
-
-TEST_F(BucketAutoTests, FailsWithUnknownField) {
- auto spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 1, field : 'test'}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40245);
-}
-
-TEST_F(BucketAutoTests, FailsWithInvalidExpressionToAccumulator) {
- auto spec = fromjson(
- "{$bucketAuto : {groupBy : '$x', buckets : 1, output : {avg : {$avg : ['$x', 1]}}}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40237);
-
- spec = fromjson(
- "{$bucketAuto : {groupBy : '$x', buckets : 1, output : {test : {$avg : '$x', $sum : "
- "'$x'}}}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40238);
-}
-
-TEST_F(BucketAutoTests, FailsWithNonAccumulatorObjectOutputField) {
- auto spec =
- fromjson("{$bucketAuto : {groupBy : '$x', buckets : 1, output : {field : 'test'}}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40234);
-
- spec = fromjson("{$bucketAuto : {groupBy : '$x', buckets : 1, output : {field : 1}}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40234);
-
- spec = fromjson(
- "{$bucketAuto : {groupBy : '$x', buckets : 1, output : {test : {field : 'test'}}}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40234);
-}
-
-TEST_F(BucketAutoTests, FailsWithInvalidOutputFieldName) {
- auto spec = fromjson(
- "{$bucketAuto : {groupBy : '$x', buckets : 1, output : {'field.test' : {$avg : '$x'}}}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40235);
-
- spec = fromjson(
- "{$bucketAuto : {groupBy : '$x', buckets : 1, output : {'$field' : {$avg : '$x'}}}}");
- ASSERT_THROWS_CODE(createBucketAuto(spec), UserException, 40236);
-}
-
-TEST_F(BucketAutoTests, FailsWhenBufferingTooManyDocuments) {
- std::deque<Document> inputs;
- auto largeStr = std::string(1000, 'b');
- auto inputDoc = Document{{"a", largeStr}};
- ASSERT_GTE(inputDoc.getApproximateSize(), 1000UL);
- inputs.push_back(inputDoc);
- inputs.push_back(Document{{"a", largeStr}});
- auto mock = DocumentSourceMock::create(inputs);
-
- const uint64_t maxMemoryUsageBytes = 1000;
- const int numBuckets = 1;
- auto bucketAuto = DocumentSourceBucketAuto::create(ctx(), numBuckets, maxMemoryUsageBytes);
- bucketAuto->setSource(mock.get());
- ASSERT_THROWS_CODE(bucketAuto->getNext(), UserException, 16819);
-}
-
-TEST_F(BucketAutoTests, ShouldRoundUpMaximumBoundariesWithGranularitySpecified) {
- auto bucketAutoSpec =
- fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity : 'R5'}}");
-
- // Values are 0, 15, 24, 30, 50
- auto docs = {Document{{"x", 24}},
- Document{{"x", 15}},
- Document{{"x", 30}},
- Document{{"x", 50}},
- Document{{"x", 0}}};
- auto results = getResults(bucketAutoSpec, docs);
-
- ASSERT_EQUALS(results.size(), 2UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 25}, count : 3}")));
- ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 25, max : 63}, count : 2}")));
-}
-
-TEST_F(BucketAutoTests, ShouldRoundDownFirstMinimumBoundaryWithGranularitySpecified) {
- auto bucketAutoSpec =
- fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity : 'R5'}}");
-
- // Values are 1, 15, 24, 30, 50
- auto docs = {Document{{"x", 24}},
- Document{{"x", 15}},
- Document{{"x", 30}},
- Document{{"x", 50}},
- Document{{"x", 1}}};
- auto results = getResults(bucketAutoSpec, docs);
-
- ASSERT_EQUALS(results.size(), 2UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0.63, max : 25}, count : 3}")));
- ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 25, max : 63}, count : 2}")));
-}
-
-TEST_F(BucketAutoTests, ShouldAbsorbAllValuesSmallerThanAdjustedBoundaryWithGranularitySpecified) {
- auto bucketAutoSpec =
- fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity : 'R5'}}");
-
- auto docs = {Document{{"x", 0}},
- Document{{"x", 5}},
- Document{{"x", 10}},
- Document{{"x", 15}},
- Document{{"x", 30}}};
- auto results = getResults(bucketAutoSpec, docs);
-
- ASSERT_EQUALS(results.size(), 2UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 16}, count : 4}")));
- ASSERT_DOCUMENT_EQ(results[1], Document(fromjson("{_id : {min : 16, max : 40}, count : 1}")));
-}
-
-TEST_F(BucketAutoTests, ShouldBeAbleToAbsorbAllValuesIntoOneBucketWithGranularitySpecified) {
- auto bucketAutoSpec =
- fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity : 'R5'}}");
-
- auto docs = {Document{{"x", 0}},
- Document{{"x", 5}},
- Document{{"x", 10}},
- Document{{"x", 14}},
- Document{{"x", 15}}};
- auto results = getResults(bucketAutoSpec, docs);
-
- ASSERT_EQUALS(results.size(), 1UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 16}, count : 5}")));
-}
-
-TEST_F(BucketAutoTests, ShouldNotRoundZeroInFirstBucketWithGranularitySpecified) {
- auto bucketAutoSpec =
- fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity : 'R5'}}");
-
- auto docs = {Document{{"x", 0}}, Document{{"x", 0}}, Document{{"x", 1}}, Document{{"x", 1}}};
- auto results = getResults(bucketAutoSpec, docs);
-
- ASSERT_EQUALS(results.size(), 2UL);
- ASSERT_DOCUMENT_EQ(results[0], Document(fromjson("{_id : {min : 0, max : 0.63}, count : 2}")));
- ASSERT_DOCUMENT_EQ(results[1],
- Document(fromjson("{_id : {min : 0.63, max : 1.6}, count : 2}")));
-}
-
-TEST_F(BucketAutoTests, ShouldFailOnNaNWhenGranularitySpecified) {
- auto bucketAutoSpec =
- fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity : 'R5'}}");
-
- auto docs = {Document{{"x", 0}},
- Document{{"x", std::nan("NaN")}},
- Document{{"x", 1}},
- Document{{"x", 1}}};
- ASSERT_THROWS_CODE(getResults(bucketAutoSpec, docs), UserException, 40259);
-}
-
-TEST_F(BucketAutoTests, ShouldFailOnNonNumericValuesWhenGranularitySpecified) {
- auto bucketAutoSpec =
- fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity : 'R5'}}");
-
- auto docs = {
- Document{{"x", 0}}, Document{{"x", "test"}}, Document{{"x", 1}}, Document{{"x", 1}}};
- ASSERT_THROWS_CODE(getResults(bucketAutoSpec, docs), UserException, 40258);
-}
-
-TEST_F(BucketAutoTests, ShouldFailOnNegativeNumbersWhenGranularitySpecified) {
- auto bucketAutoSpec =
- fromjson("{$bucketAuto : {groupBy : '$x', buckets : 2, granularity : 'R5'}}");
-
- auto docs = {Document{{"x", 0}}, Document{{"x", -1}}, Document{{"x", 1}}, Document{{"x", 2}}};
- ASSERT_THROWS_CODE(getResults(bucketAutoSpec, docs), UserException, 40260);
-}
-} // namespace DocumentSourceBucketAuto
-
-namespace DocumentSourceAddFields {
-
-using mongo::DocumentSourceMock;
-using mongo::DocumentSourceAddFields;
-
-//
-// DocumentSourceAddFields delegates much of its responsibilities to the ParsedAddFields, which
-// derives from ParsedAggregationProjection.
-// Most of the functional tests are testing ParsedAddFields directly. These are meant as
-// simpler integration tests.
-//
-
-/**
- * Class which provides useful helpers to test the functionality of the $addFields stage.
- */
-class AddFieldsTest : public Mock::Base, public unittest::Test {
-
-public:
- AddFieldsTest() : _mock(DocumentSourceMock::create()) {}
-
-protected:
- /**
- * Creates the $addFields stage, which can be accessed via addFields().
- */
- void createAddFields(const BSONObj& fieldsToAdd) {
- BSONObj spec = BSON("$addFields" << fieldsToAdd);
- BSONElement specElement = spec.firstElement();
- _addFields = DocumentSourceAddFields::createFromBson(specElement, ctx());
- addFields()->setSource(_mock.get());
- }
-
- DocumentSource* addFields() {
- return _addFields.get();
- }
-
- DocumentSourceMock* source() {
- return _mock.get();
- }
-
- /**
- * Assert that iterator state accessors consistently report the source is exhausted.
- */
- void assertExhausted() const {
- ASSERT(_addFields->getNext().isEOF());
- ASSERT(_addFields->getNext().isEOF());
- ASSERT(_addFields->getNext().isEOF());
- }
-
-private:
- intrusive_ptr<DocumentSource> _addFields;
- intrusive_ptr<DocumentSourceMock> _mock;
-};
-
-// Verify that the addFields stage keeps existing fields in order when replacing fields, and adds
-// new fields at the end of the document.
-TEST_F(AddFieldsTest, KeepsUnspecifiedFieldsReplacesFieldsAndAddsNewFields) {
- createAddFields(BSON("e" << 2 << "b" << BSON("c" << 3)));
- source()->queue.push_back(Document{{"a", 1}, {"b", Document{{"c", 1}}}, {"d", 1}});
- auto next = addFields()->getNext();
- ASSERT_TRUE(next.isAdvanced());
- Document expected = Document{{"a", 1}, {"b", Document{{"c", 3}}}, {"d", 1}, {"e", 2}};
- ASSERT_DOCUMENT_EQ(next.releaseDocument(), expected);
-}
-
-// Verify that the addFields stage optimizes expressions passed as input to added fields.
-TEST_F(AddFieldsTest, OptimizesInnerExpressions) {
- createAddFields(BSON("a" << BSON("$and" << BSON_ARRAY(BSON("$const" << true)))));
- addFields()->optimize();
- // The $and should have been replaced with its only argument.
- vector<Value> serializedArray;
- addFields()->serializeToArray(serializedArray);
- ASSERT_BSONOBJ_EQ(serializedArray[0].getDocument().toBson(),
- fromjson("{$addFields: {a: {$const: true}}}"));
-}
-
-// Verify that the addFields stage requires a valid object specification.
-TEST_F(AddFieldsTest, ShouldErrorOnNonObjectSpec) {
- // Can't use createAddFields() helper because we want to give a non-object spec.
- BSONObj spec = BSON("$addFields"
- << "foo");
- BSONElement specElement = spec.firstElement();
- ASSERT_THROWS_CODE(
- DocumentSourceAddFields::createFromBson(specElement, ctx()), UserException, 40272);
-}
-
-// Verify that mutiple documents can be processed in a row with the addFields stage.
-TEST_F(AddFieldsTest, ProcessesMultipleDocuments) {
- createAddFields(BSON("a" << 10));
- source()->queue.push_back(Document{{"a", 1}, {"b", 2}});
- source()->queue.push_back(Document{{"c", 3}, {"d", 4}});
-
- auto next = addFields()->getNext();
- ASSERT_TRUE(next.isAdvanced());
- Document expected = Document{{"a", 10}, {"b", 2}};
- ASSERT_DOCUMENT_EQ(next.releaseDocument(), expected);
-
- next = addFields()->getNext();
- ASSERT_TRUE(next.isAdvanced());
- expected = Document{{"c", 3}, {"d", 4}, {"a", 10}};
- ASSERT_DOCUMENT_EQ(next.releaseDocument(), expected);
-
- assertExhausted();
-}
-
-// Verify that the addFields stage correctly reports its dependencies.
-TEST_F(AddFieldsTest, AddsDependenciesOfIncludedAndComputedFields) {
- createAddFields(
- fromjson("{a: true, x: '$b', y: {$and: ['$c','$d']}, z: {$meta: 'textScore'}}"));
- DepsTracker dependencies(DepsTracker::MetadataAvailable::kTextScore);
- ASSERT_EQUALS(DocumentSource::SEE_NEXT, addFields()->getDependencies(&dependencies));
- ASSERT_EQUALS(3U, dependencies.fields.size());
-
- // No implicit _id dependency.
- ASSERT_EQUALS(0U, dependencies.fields.count("_id"));
-
- // Replaced field is not dependent.
- ASSERT_EQUALS(0U, dependencies.fields.count("a"));
-
- // Field path expression dependency.
- ASSERT_EQUALS(1U, dependencies.fields.count("b"));
-
- // Nested expression dependencies.
- ASSERT_EQUALS(1U, dependencies.fields.count("c"));
- ASSERT_EQUALS(1U, dependencies.fields.count("d"));
- ASSERT_EQUALS(false, dependencies.needWholeDocument);
- ASSERT_EQUALS(true, dependencies.getNeedTextScore());
-}
-} // namespace DocumentSourceAddFields
-
-class All : public Suite {
-public:
- All() : Suite("documentsource") {}
- void setupTests() {
- add<DocumentSourceClass::Deps>();
-
- add<DocumentSourceLimit::DisposeSource>();
- add<DocumentSourceLimit::CombineLimit>();
- add<DocumentSourceLimit::DisposeSourceCascade>();
- add<DocumentSourceLimit::Dependencies>();
-
- add<DocumentSourceGroup::NonObject>();
- add<DocumentSourceGroup::EmptySpec>();
- add<DocumentSourceGroup::IdEmptyObject>();
- add<DocumentSourceGroup::IdObjectExpression>();
- add<DocumentSourceGroup::IdInvalidObjectExpression>();
- add<DocumentSourceGroup::TwoIdSpecs>();
- add<DocumentSourceGroup::IdEmptyString>();
- add<DocumentSourceGroup::IdStringConstant>();
- add<DocumentSourceGroup::IdFieldPath>();
- add<DocumentSourceGroup::IdInvalidFieldPath>();
- add<DocumentSourceGroup::IdNumericConstant>();
- add<DocumentSourceGroup::IdArrayConstant>();
- add<DocumentSourceGroup::IdRegularExpression>();
- add<DocumentSourceGroup::DollarAggregateFieldName>();
- add<DocumentSourceGroup::NonObjectAggregateSpec>();
- add<DocumentSourceGroup::EmptyObjectAggregateSpec>();
- add<DocumentSourceGroup::BadAccumulator>();
- add<DocumentSourceGroup::SumArray>();
- add<DocumentSourceGroup::MultipleAccumulatorsForAField>();
- add<DocumentSourceGroup::DuplicateAggregateFieldNames>();
- add<DocumentSourceGroup::AggregateObjectExpression>();
- add<DocumentSourceGroup::AggregateOperatorExpression>();
- add<DocumentSourceGroup::EmptyCollection>();
- add<DocumentSourceGroup::SingleDocument>();
- add<DocumentSourceGroup::TwoValuesSingleKey>();
- add<DocumentSourceGroup::TwoValuesTwoKeys>();
- add<DocumentSourceGroup::FourValuesTwoKeys>();
- add<DocumentSourceGroup::FourValuesTwoKeysTwoAccumulators>();
- add<DocumentSourceGroup::GroupNullUndefinedIds>();
- add<DocumentSourceGroup::ComplexId>();
- add<DocumentSourceGroup::UndefinedAccumulatorValue>();
- add<DocumentSourceGroup::RouterMerger>();
- add<DocumentSourceGroup::Dependencies>();
- add<DocumentSourceGroup::StringConstantIdAndAccumulatorExpressions>();
- add<DocumentSourceGroup::ArrayConstantAccumulatorExpression>();
-#if 0
- // Disabled tests until SERVER-23318 is implemented.
- add<DocumentSourceGroup::StreamingOptimization>();
- add<DocumentSourceGroup::StreamingWithMultipleIdFields>();
- add<DocumentSourceGroup::NoOptimizationIfMissingDoubleSort>();
- add<DocumentSourceGroup::NoOptimizationWithRawRoot>();
- add<DocumentSourceGroup::NoOptimizationIfUsingExpressions>();
- add<DocumentSourceGroup::StreamingWithMultipleLevels>();
- add<DocumentSourceGroup::StreamingWithConstant>();
- add<DocumentSourceGroup::StreamingWithEmptyId>();
- add<DocumentSourceGroup::StreamingWithRootSubfield>();
- add<DocumentSourceGroup::StreamingWithConstantAndFieldPath>();
- add<DocumentSourceGroup::StreamingWithFieldRepeated>();
-#endif
-
- add<DocumentSourceSort::Empty>();
- add<DocumentSourceSort::SingleValue>();
- add<DocumentSourceSort::TwoValues>();
- add<DocumentSourceSort::NonObjectSpec>();
- add<DocumentSourceSort::EmptyObjectSpec>();
- add<DocumentSourceSort::NonNumberDirectionSpec>();
- add<DocumentSourceSort::InvalidNumberDirectionSpec>();
- add<DocumentSourceSort::DescendingOrder>();
- add<DocumentSourceSort::DottedSortField>();
- add<DocumentSourceSort::CompoundSortSpec>();
- add<DocumentSourceSort::CompoundSortSpecAlternateOrder>();
- add<DocumentSourceSort::CompoundSortSpecAlternateOrderSecondField>();
- add<DocumentSourceSort::InconsistentTypeSort>();
- add<DocumentSourceSort::MixedNumericSort>();
- add<DocumentSourceSort::MissingValue>();
- add<DocumentSourceSort::NullValue>();
- add<DocumentSourceSort::TextScore>();
- add<DocumentSourceSort::RandMeta>();
- add<DocumentSourceSort::MissingObjectWithinArray>();
- add<DocumentSourceSort::ExtractArrayValues>();
- add<DocumentSourceSort::Dependencies>();
- add<DocumentSourceSort::OutputSort>();
-
- add<DocumentSourceUnwind::Empty>();
- add<DocumentSourceUnwind::EmptyArray>();
- add<DocumentSourceUnwind::MissingValue>();
- add<DocumentSourceUnwind::Null>();
- add<DocumentSourceUnwind::Undefined>();
- add<DocumentSourceUnwind::OneValue>();
- add<DocumentSourceUnwind::TwoValues>();
- add<DocumentSourceUnwind::ArrayWithNull>();
- add<DocumentSourceUnwind::TwoDocuments>();
- add<DocumentSourceUnwind::NestedArray>();
- add<DocumentSourceUnwind::NonObjectParent>();
- add<DocumentSourceUnwind::DoubleNestedArray>();
- add<DocumentSourceUnwind::SeveralDocuments>();
- add<DocumentSourceUnwind::SeveralMoreDocuments>();
- add<DocumentSourceUnwind::Dependencies>();
- add<DocumentSourceUnwind::OutputSort>();
- add<DocumentSourceUnwind::IncludeArrayIndexSubObject>();
- add<DocumentSourceUnwind::IncludeArrayIndexOverrideExisting>();
- add<DocumentSourceUnwind::IncludeArrayIndexOverrideExistingNested>();
- add<DocumentSourceUnwind::IncludeArrayIndexOverrideUnwindPath>();
- add<DocumentSourceUnwind::IncludeArrayIndexWithinUnwindPath>();
-
- add<DocumentSourceGeoNear::LimitCoalesce>();
- add<DocumentSourceGeoNear::OutputSort>();
-
- add<DocumentSourceLookUp::OutputSortTruncatesOnEquality>();
- add<DocumentSourceLookUp::OutputSortTruncatesOnPrefix>();
-
- add<DocumentSourceMatch::RedactSafePortion>();
- add<DocumentSourceMatch::Coalesce>();
- add<DocumentSourceMatch::DependenciesOrExpression>();
- add<DocumentSourceMatch::DependenciesGTEExpression>();
- add<DocumentSourceMatch::DependenciesElemMatchExpression>();
- add<DocumentSourceMatch::DependenciesElemMatchWithNoSubfield>();
- add<DocumentSourceMatch::DependenciesNotExpression>();
- add<DocumentSourceMatch::DependenciesNorExpression>();
- add<DocumentSourceMatch::DependenciesCommentExpression>();
- add<DocumentSourceMatch::DependenciesCommentMatchExpression>();
- }
-};
-
-SuiteInstance<All> myall;
-
-} // namespace DocumentSourceTests
+} // namespace
+} // namespace mongo
diff --git a/src/mongo/db/pipeline/document_source_unwind_test.cpp b/src/mongo/db/pipeline/document_source_unwind_test.cpp
new file mode 100644
index 00000000000..5bb251a64b0
--- /dev/null
+++ b/src/mongo/db/pipeline/document_source_unwind_test.cpp
@@ -0,0 +1,811 @@
+/**
+ * Copyright (C) 2016 MongoDB Inc.
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License, version 3,
+ * as published by the Free Software Foundation.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see <http://www.gnu.org/licenses/>.
+ *
+ * As a special exception, the copyright holders give permission to link the
+ * code of portions of this program with the OpenSSL library under certain
+ * conditions as described in each individual source file and distribute
+ * linked combinations including the program with the OpenSSL library. You
+ * must comply with the GNU Affero General Public License in all respects
+ * for all of the code used other than as permitted herein. If you modify
+ * file(s) with this exception, you may extend this exception to your
+ * version of the file(s), but you are not obligated to do so. If you do not
+ * wish to do so, delete this exception statement from your version. If you
+ * delete this exception statement from all source files in the program,
+ * then also delete it in the license file.
+ */
+
+#include "mongo/platform/basic.h"
+
+#include <boost/intrusive_ptr.hpp>
+#include <deque>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "mongo/bson/bsonmisc.h"
+#include "mongo/bson/bsonobj.h"
+#include "mongo/bson/json.h"
+#include "mongo/db/pipeline/aggregation_context_fixture.h"
+#include "mongo/db/pipeline/dependencies.h"
+#include "mongo/db/pipeline/document_source.h"
+#include "mongo/db/pipeline/document_value_test_util.h"
+#include "mongo/db/pipeline/expression_context.h"
+#include "mongo/db/pipeline/value_comparator.h"
+#include "mongo/db/query/query_test_service_context.h"
+#include "mongo/db/service_context.h"
+#include "mongo/dbtests/dbtests.h"
+#include "mongo/stdx/memory.h"
+#include "mongo/unittest/unittest.h"
+
+namespace mongo {
+namespace {
+using boost::intrusive_ptr;
+using std::deque;
+using std::string;
+using std::unique_ptr;
+using std::vector;
+
+static const char* const ns = "unittests.document_source_group_tests";
+
+/**
+ * Fixture for testing execution of the $unwind stage. Note this cannot inherit from
+ * AggregationContextFixture, since that inherits from unittest::Test, and this fixture is still
+ * being used for old-style tests manually added to the suite below.
+ */
+class CheckResultsBase {
+public:
+ CheckResultsBase()
+ : _queryServiceContext(stdx::make_unique<QueryTestServiceContext>()),
+ _opCtx(_queryServiceContext->makeOperationContext()),
+ _ctx(new ExpressionContext(_opCtx.get(), AggregationRequest(NamespaceString(ns), {}))) {}
+
+ virtual ~CheckResultsBase() {}
+
+ void run() {
+ // Once with the simple syntax.
+ createSimpleUnwind();
+ assertResultsMatch(expectedResultSet(false, false));
+
+ // Once with the full syntax.
+ createUnwind(false, false);
+ assertResultsMatch(expectedResultSet(false, false));
+
+ // Once with the preserveNullAndEmptyArrays parameter.
+ createUnwind(true, false);
+ assertResultsMatch(expectedResultSet(true, false));
+
+ // Once with the includeArrayIndex parameter.
+ createUnwind(false, true);
+ assertResultsMatch(expectedResultSet(false, true));
+
+ // Once with both the preserveNullAndEmptyArrays and includeArrayIndex parameters.
+ createUnwind(true, true);
+ assertResultsMatch(expectedResultSet(true, true));
+ }
+
+protected:
+ virtual string unwindFieldPath() const {
+ return "$a";
+ }
+
+ virtual string indexPath() const {
+ return "index";
+ }
+
+ virtual deque<Document> inputData() {
+ return {};
+ }
+
+ /**
+ * Returns a json string representing the expected results for a normal $unwind without any
+ * options.
+ */
+ virtual string expectedResultSetString() const {
+ return "[]";
+ }
+
+ /**
+ * Returns a json string representing the expected results for a $unwind with the
+ * preserveNullAndEmptyArrays parameter set.
+ */
+ virtual string expectedPreservedResultSetString() const {
+ return expectedResultSetString();
+ }
+
+ /**
+ * Returns a json string representing the expected results for a $unwind with the
+ * includeArrayIndex parameter set.
+ */
+ virtual string expectedIndexedResultSetString() const {
+ return "[]";
+ }
+
+ /**
+ * Returns a json string representing the expected results for a $unwind with both the
+ * preserveNullAndEmptyArrays and the includeArrayIndex parameters set.
+ */
+ virtual string expectedPreservedIndexedResultSetString() const {
+ return expectedIndexedResultSetString();
+ }
+
+ intrusive_ptr<ExpressionContext> ctx() const {
+ return _ctx;
+ }
+
+private:
+ /**
+ * Initializes '_unwind' using the simple '{$unwind: '$path'}' syntax.
+ */
+ void createSimpleUnwind() {
+ auto specObj = BSON("$unwind" << unwindFieldPath());
+ _unwind = static_cast<DocumentSourceUnwind*>(
+ DocumentSourceUnwind::createFromBson(specObj.firstElement(), ctx()).get());
+ checkBsonRepresentation(false, false);
+ }
+
+ /**
+ * Initializes '_unwind' using the full '{$unwind: {path: '$path'}}' syntax.
+ */
+ void createUnwind(bool preserveNullAndEmptyArrays, bool includeArrayIndex) {
+ auto specObj =
+ DOC("$unwind" << DOC("path" << unwindFieldPath() << "preserveNullAndEmptyArrays"
+ << preserveNullAndEmptyArrays
+ << "includeArrayIndex"
+ << (includeArrayIndex ? Value(indexPath()) : Value())));
+ _unwind = static_cast<DocumentSourceUnwind*>(
+ DocumentSourceUnwind::createFromBson(specObj.toBson().firstElement(), ctx()).get());
+ checkBsonRepresentation(preserveNullAndEmptyArrays, includeArrayIndex);
+ }
+
+ /**
+ * Extracts the documents from the $unwind stage, and asserts the actual results match the
+ * expected results.
+ *
+ * '_unwind' must be initialized before calling this method.
+ */
+ void assertResultsMatch(BSONObj expectedResults) {
+ auto source = DocumentSourceMock::create(inputData());
+ _unwind->setSource(source.get());
+ // Load the results from the DocumentSourceUnwind.
+ vector<Document> resultSet;
+ for (auto output = _unwind->getNext(); output.isAdvanced(); output = _unwind->getNext()) {
+ // Get the current result.
+ resultSet.push_back(output.releaseDocument());
+ }
+ // Verify the DocumentSourceUnwind is exhausted.
+ assertEOF();
+
+ // Convert results to BSON once they all have been retrieved (to detect any errors resulting
+ // from incorrectly shared sub objects).
+ BSONArrayBuilder bsonResultSet;
+ for (vector<Document>::const_iterator i = resultSet.begin(); i != resultSet.end(); ++i) {
+ bsonResultSet << *i;
+ }
+ // Check the result set.
+ ASSERT_BSONOBJ_EQ(expectedResults, bsonResultSet.arr());
+ }
+
+ /**
+ * Check that the BSON representation generated by the source matches the BSON it was
+ * created with.
+ */
+ void checkBsonRepresentation(bool preserveNullAndEmptyArrays, bool includeArrayIndex) {
+ vector<Value> arr;
+ _unwind->serializeToArray(arr);
+ BSONObj generatedSpec = Value(arr[0]).getDocument().toBson();
+ ASSERT_BSONOBJ_EQ(expectedSerialization(preserveNullAndEmptyArrays, includeArrayIndex),
+ generatedSpec);
+ }
+
+ BSONObj expectedSerialization(bool preserveNullAndEmptyArrays, bool includeArrayIndex) const {
+ return DOC("$unwind" << DOC("path" << Value(unwindFieldPath())
+ << "preserveNullAndEmptyArrays"
+ << (preserveNullAndEmptyArrays ? Value(true) : Value())
+ << "includeArrayIndex"
+ << (includeArrayIndex ? Value(indexPath()) : Value())))
+ .toBson();
+ }
+
+ /** Assert that iterator state accessors consistently report the source is exhausted. */
+ void assertEOF() const {
+ ASSERT(_unwind->getNext().isEOF());
+ ASSERT(_unwind->getNext().isEOF());
+ ASSERT(_unwind->getNext().isEOF());
+ }
+
+ BSONObj expectedResultSet(bool preserveNullAndEmptyArrays, bool includeArrayIndex) const {
+ string expectedResultsString;
+ if (preserveNullAndEmptyArrays) {
+ if (includeArrayIndex) {
+ expectedResultsString = expectedPreservedIndexedResultSetString();
+ } else {
+ expectedResultsString = expectedPreservedResultSetString();
+ }
+ } else {
+ if (includeArrayIndex) {
+ expectedResultsString = expectedIndexedResultSetString();
+ } else {
+ expectedResultsString = expectedResultSetString();
+ }
+ }
+ // fromjson() cannot parse an array, so place the array within an object.
+ BSONObj wrappedResult = fromjson(string("{'':") + expectedResultsString + "}");
+ return wrappedResult[""].embeddedObject().getOwned();
+ }
+
+ unique_ptr<QueryTestServiceContext> _queryServiceContext;
+ ServiceContext::UniqueOperationContext _opCtx;
+ intrusive_ptr<ExpressionContext> _ctx;
+ intrusive_ptr<DocumentSourceUnwind> _unwind;
+};
+
+/** An empty collection produces no results. */
+class Empty : public CheckResultsBase {};
+
+/**
+ * An empty array does not produce any results normally, but if preserveNullAndEmptyArrays is
+ * passed, the document is preserved.
+ */
+class EmptyArray : public CheckResultsBase {
+ deque<Document> inputData() override {
+ return {DOC("_id" << 0 << "a" << BSONArray())};
+ }
+ string expectedPreservedResultSetString() const override {
+ return "[{_id: 0}]";
+ }
+ string expectedPreservedIndexedResultSetString() const override {
+ return "[{_id: 0, index: null}]";
+ }
+};
+
+/**
+ * A missing value does not produce any results normally, but if preserveNullAndEmptyArrays is
+ * passed, the document is preserved.
+ */
+class MissingValue : public CheckResultsBase {
+ deque<Document> inputData() override {
+ return {DOC("_id" << 0)};
+ }
+ string expectedPreservedResultSetString() const override {
+ return "[{_id: 0}]";
+ }
+ string expectedPreservedIndexedResultSetString() const override {
+ return "[{_id: 0, index: null}]";
+ }
+};
+
+/**
+ * A null value does not produce any results normally, but if preserveNullAndEmptyArrays is passed,
+ * the document is preserved.
+ */
+class Null : public CheckResultsBase {
+ deque<Document> inputData() override {
+ return {DOC("_id" << 0 << "a" << BSONNULL)};
+ }
+ string expectedPreservedResultSetString() const override {
+ return "[{_id: 0, a: null}]";
+ }
+ string expectedPreservedIndexedResultSetString() const override {
+ return "[{_id: 0, a: null, index: null}]";
+ }
+};
+
+/**
+ * An undefined value does not produce any results normally, but if preserveNullAndEmptyArrays is
+ * passed, the document is preserved.
+ */
+class Undefined : public CheckResultsBase {
+ deque<Document> inputData() override {
+ return {DOC("_id" << 0 << "a" << BSONUndefined)};
+ }
+ string expectedPreservedResultSetString() const override {
+ return "[{_id: 0, a: undefined}]";
+ }
+ string expectedPreservedIndexedResultSetString() const override {
+ return "[{_id: 0, a: undefined, index: null}]";
+ }
+};
+
+/** Unwind an array with one value. */
+class OneValue : public CheckResultsBase {
+ deque<Document> inputData() override {
+ return {DOC("_id" << 0 << "a" << DOC_ARRAY(1))};
+ }
+ string expectedResultSetString() const override {
+ return "[{_id: 0, a: 1}]";
+ }
+ string expectedIndexedResultSetString() const override {
+ return "[{_id: 0, a: 1, index: 0}]";
+ }
+};
+
+/** Unwind an array with two values. */
+class TwoValues : public CheckResultsBase {
+ deque<Document> inputData() override {
+ return {DOC("_id" << 0 << "a" << DOC_ARRAY(1 << 2))};
+ }
+ string expectedResultSetString() const override {
+ return "[{_id: 0, a: 1}, {_id: 0, a: 2}]";
+ }
+ string expectedIndexedResultSetString() const override {
+ return "[{_id: 0, a: 1, index: 0}, {_id: 0, a: 2, index: 1}]";
+ }
+};
+
+/** Unwind an array with two values, one of which is null. */
+class ArrayWithNull : public CheckResultsBase {
+ deque<Document> inputData() override {
+ return {DOC("_id" << 0 << "a" << DOC_ARRAY(1 << BSONNULL))};
+ }
+ string expectedResultSetString() const override {
+ return "[{_id: 0, a: 1}, {_id: 0, a: null}]";
+ }
+ string expectedIndexedResultSetString() const override {
+ return "[{_id: 0, a: 1, index: 0}, {_id: 0, a: null, index: 1}]";
+ }
+};
+
+/** Unwind two documents with arrays. */
+class TwoDocuments : public CheckResultsBase {
+ deque<Document> inputData() override {
+ return {DOC("_id" << 0 << "a" << DOC_ARRAY(1 << 2)),
+ DOC("_id" << 1 << "a" << DOC_ARRAY(3 << 4))};
+ }
+ string expectedResultSetString() const override {
+ return "[{_id: 0, a: 1}, {_id: 0, a: 2}, {_id: 1, a: 3}, {_id: 1, a: 4}]";
+ }
+ string expectedIndexedResultSetString() const override {
+ return "[{_id: 0, a: 1, index: 0}, {_id: 0, a: 2, index: 1},"
+ " {_id: 1, a: 3, index: 0}, {_id: 1, a: 4, index: 1}]";
+ }
+};
+
+/** Unwind an array in a nested document. */
+class NestedArray : public CheckResultsBase {
+ deque<Document> inputData() override {
+ return {DOC("_id" << 0 << "a" << DOC("b" << DOC_ARRAY(1 << 2) << "c" << 3))};
+ }
+ string unwindFieldPath() const override {
+ return "$a.b";
+ }
+ string expectedResultSetString() const override {
+ return "[{_id: 0, a: {b: 1, c: 3}}, {_id: 0, a: {b: 2, c: 3}}]";
+ }
+ string expectedIndexedResultSetString() const override {
+ return "[{_id: 0, a: {b: 1, c: 3}, index: 0},"
+ " {_id: 0, a: {b: 2, c: 3}, index: 1}]";
+ }
+};
+
+/**
+ * A nested path produces no results when there is no sub-document that matches the path, unless
+ * preserveNullAndEmptyArrays is specified.
+ */
+class NonObjectParent : public CheckResultsBase {
+ deque<Document> inputData() override {
+ return {DOC("_id" << 0 << "a" << 4)};
+ }
+ string unwindFieldPath() const override {
+ return "$a.b";
+ }
+ string expectedPreservedResultSetString() const override {
+ return "[{_id: 0, a: 4}]";
+ }
+ string expectedPreservedIndexedResultSetString() const override {
+ return "[{_id: 0, a: 4, index: null}]";
+ }
+};
+
+/** Unwind an array in a doubly nested document. */
+class DoubleNestedArray : public CheckResultsBase {
+ deque<Document> inputData() override {
+ return {DOC("_id" << 0 << "a"
+ << DOC("b" << DOC("d" << DOC_ARRAY(1 << 2) << "e" << 4) << "c" << 3))};
+ }
+ string unwindFieldPath() const override {
+ return "$a.b.d";
+ }
+ string expectedResultSetString() const override {
+ return "[{_id: 0, a: {b: {d: 1, e: 4}, c: 3}}, {_id: 0, a: {b: {d: 2, e: 4}, c: 3}}]";
+ }
+ string expectedIndexedResultSetString() const override {
+ return "[{_id: 0, a: {b: {d: 1, e: 4}, c: 3}, index: 0}, "
+ " {_id: 0, a: {b: {d: 2, e: 4}, c: 3}, index: 1}]";
+ }
+};
+
+/** Unwind several documents in a row. */
+class SeveralDocuments : public CheckResultsBase {
+ deque<Document> inputData() override {
+ return {DOC("_id" << 0 << "a" << DOC_ARRAY(1 << 2 << 3)),
+ DOC("_id" << 1),
+ DOC("_id" << 2),
+ DOC("_id" << 3 << "a" << DOC_ARRAY(10 << 20)),
+ DOC("_id" << 4 << "a" << DOC_ARRAY(30))};
+ }
+ string expectedResultSetString() const override {
+ return "[{_id: 0, a: 1}, {_id: 0, a: 2}, {_id: 0, a: 3},"
+ " {_id: 3, a: 10}, {_id: 3, a: 20},"
+ " {_id: 4, a: 30}]";
+ }
+ string expectedPreservedResultSetString() const override {
+ return "[{_id: 0, a: 1}, {_id: 0, a: 2}, {_id: 0, a: 3},"
+ " {_id: 1},"
+ " {_id: 2},"
+ " {_id: 3, a: 10}, {_id: 3, a: 20},"
+ " {_id: 4, a: 30}]";
+ }
+ string expectedIndexedResultSetString() const override {
+ return "[{_id: 0, a: 1, index: 0},"
+ " {_id: 0, a: 2, index: 1},"
+ " {_id: 0, a: 3, index: 2},"
+ " {_id: 3, a: 10, index: 0},"
+ " {_id: 3, a: 20, index: 1},"
+ " {_id: 4, a: 30, index: 0}]";
+ }
+ string expectedPreservedIndexedResultSetString() const override {
+ return "[{_id: 0, a: 1, index: 0},"
+ " {_id: 0, a: 2, index: 1},"
+ " {_id: 0, a: 3, index: 2},"
+ " {_id: 1, index: null},"
+ " {_id: 2, index: null},"
+ " {_id: 3, a: 10, index: 0},"
+ " {_id: 3, a: 20, index: 1},"
+ " {_id: 4, a: 30, index: 0}]";
+ }
+};
+
+/** Unwind several more documents in a row. */
+class SeveralMoreDocuments : public CheckResultsBase {
+ deque<Document> inputData() override {
+ return {DOC("_id" << 0 << "a" << BSONNULL),
+ DOC("_id" << 1),
+ DOC("_id" << 2 << "a" << DOC_ARRAY("a"
+ << "b")),
+ DOC("_id" << 3),
+ DOC("_id" << 4 << "a" << DOC_ARRAY(1 << 2 << 3)),
+ DOC("_id" << 5 << "a" << DOC_ARRAY(4 << 5 << 6)),
+ DOC("_id" << 6 << "a" << DOC_ARRAY(7 << 8 << 9)),
+ DOC("_id" << 7 << "a" << BSONArray())};
+ }
+ string expectedResultSetString() const override {
+ return "[{_id: 2, a: 'a'}, {_id: 2, a: 'b'},"
+ " {_id: 4, a: 1}, {_id: 4, a: 2}, {_id: 4, a: 3},"
+ " {_id: 5, a: 4}, {_id: 5, a: 5}, {_id: 5, a: 6},"
+ " {_id: 6, a: 7}, {_id: 6, a: 8}, {_id: 6, a: 9}]";
+ }
+ string expectedPreservedResultSetString() const override {
+ return "[{_id: 0, a: null},"
+ " {_id: 1},"
+ " {_id: 2, a: 'a'}, {_id: 2, a: 'b'},"
+ " {_id: 3},"
+ " {_id: 4, a: 1}, {_id: 4, a: 2}, {_id: 4, a: 3},"
+ " {_id: 5, a: 4}, {_id: 5, a: 5}, {_id: 5, a: 6},"
+ " {_id: 6, a: 7}, {_id: 6, a: 8}, {_id: 6, a: 9},"
+ " {_id: 7}]";
+ }
+ string expectedIndexedResultSetString() const override {
+ return "[{_id: 2, a: 'a', index: 0},"
+ " {_id: 2, a: 'b', index: 1},"
+ " {_id: 4, a: 1, index: 0},"
+ " {_id: 4, a: 2, index: 1},"
+ " {_id: 4, a: 3, index: 2},"
+ " {_id: 5, a: 4, index: 0},"
+ " {_id: 5, a: 5, index: 1},"
+ " {_id: 5, a: 6, index: 2},"
+ " {_id: 6, a: 7, index: 0},"
+ " {_id: 6, a: 8, index: 1},"
+ " {_id: 6, a: 9, index: 2}]";
+ }
+ string expectedPreservedIndexedResultSetString() const override {
+ return "[{_id: 0, a: null, index: null},"
+ " {_id: 1, index: null},"
+ " {_id: 2, a: 'a', index: 0},"
+ " {_id: 2, a: 'b', index: 1},"
+ " {_id: 3, index: null},"
+ " {_id: 4, a: 1, index: 0},"
+ " {_id: 4, a: 2, index: 1},"
+ " {_id: 4, a: 3, index: 2},"
+ " {_id: 5, a: 4, index: 0},"
+ " {_id: 5, a: 5, index: 1},"
+ " {_id: 5, a: 6, index: 2},"
+ " {_id: 6, a: 7, index: 0},"
+ " {_id: 6, a: 8, index: 1},"
+ " {_id: 6, a: 9, index: 2},"
+ " {_id: 7, index: null}]";
+ }
+};
+
+/**
+ * Test the 'includeArrayIndex' option, where the specified path is part of a sub-object.
+ */
+class IncludeArrayIndexSubObject : public CheckResultsBase {
+ string indexPath() const override {
+ return "b.index";
+ }
+ deque<Document> inputData() override {
+ return {DOC("_id" << 0 << "a" << DOC_ARRAY(0) << "b" << DOC("x" << 100)),
+ DOC("_id" << 1 << "a" << 1 << "b" << DOC("x" << 100)),
+ DOC("_id" << 2 << "b" << DOC("x" << 100))};
+ }
+ string expectedResultSetString() const override {
+ return "[{_id: 0, a: 0, b: {x: 100}}, {_id: 1, a: 1, b: {x: 100}}]";
+ }
+ string expectedPreservedResultSetString() const override {
+ return "[{_id: 0, a: 0, b: {x: 100}}, {_id: 1, a: 1, b: {x: 100}}, {_id: 2, b: {x: 100}}]";
+ }
+ string expectedIndexedResultSetString() const override {
+ return "[{_id: 0, a: 0, b: {x: 100, index: 0}}, {_id: 1, a: 1, b: {x: 100, index: null}}]";
+ }
+ string expectedPreservedIndexedResultSetString() const override {
+ return "[{_id: 0, a: 0, b: {x: 100, index: 0}},"
+ " {_id: 1, a: 1, b: {x: 100, index: null}},"
+ " {_id: 2, b: {x: 100, index: null}}]";
+ }
+};
+
+/**
+ * Test the 'includeArrayIndex' option, where the specified path overrides an existing field.
+ */
+class IncludeArrayIndexOverrideExisting : public CheckResultsBase {
+ string indexPath() const override {
+ return "b";
+ }
+ deque<Document> inputData() override {
+ return {DOC("_id" << 0 << "a" << DOC_ARRAY(0) << "b" << 100),
+ DOC("_id" << 1 << "a" << 1 << "b" << 100),
+ DOC("_id" << 2 << "b" << 100)};
+ }
+ string expectedResultSetString() const override {
+ return "[{_id: 0, a: 0, b: 100}, {_id: 1, a: 1, b: 100}]";
+ }
+ string expectedPreservedResultSetString() const override {
+ return "[{_id: 0, a: 0, b: 100}, {_id: 1, a: 1, b: 100}, {_id: 2, b: 100}]";
+ }
+ string expectedIndexedResultSetString() const override {
+ return "[{_id: 0, a: 0, b: 0}, {_id: 1, a: 1, b: null}]";
+ }
+ string expectedPreservedIndexedResultSetString() const override {
+ return "[{_id: 0, a: 0, b: 0}, {_id: 1, a: 1, b: null}, {_id: 2, b: null}]";
+ }
+};
+
+/**
+ * Test the 'includeArrayIndex' option, where the specified path overrides an existing nested field.
+ */
+class IncludeArrayIndexOverrideExistingNested : public CheckResultsBase {
+ string indexPath() const override {
+ return "b.index";
+ }
+ deque<Document> inputData() override {
+ return {DOC("_id" << 0 << "a" << DOC_ARRAY(0) << "b" << 100),
+ DOC("_id" << 1 << "a" << 1 << "b" << 100),
+ DOC("_id" << 2 << "b" << 100)};
+ }
+ string expectedResultSetString() const override {
+ return "[{_id: 0, a: 0, b: 100}, {_id: 1, a: 1, b: 100}]";
+ }
+ string expectedPreservedResultSetString() const override {
+ return "[{_id: 0, a: 0, b: 100}, {_id: 1, a: 1, b: 100}, {_id: 2, b: 100}]";
+ }
+ string expectedIndexedResultSetString() const override {
+ return "[{_id: 0, a: 0, b: {index: 0}}, {_id: 1, a: 1, b: {index: null}}]";
+ }
+ string expectedPreservedIndexedResultSetString() const override {
+ return "[{_id: 0, a: 0, b: {index: 0}},"
+ " {_id: 1, a: 1, b: {index: null}},"
+ " {_id: 2, b: {index: null}}]";
+ }
+};
+
+/**
+ * Test the 'includeArrayIndex' option, where the specified path overrides the field that was being
+ * unwound.
+ */
+class IncludeArrayIndexOverrideUnwindPath : public CheckResultsBase {
+ string indexPath() const override {
+ return "a";
+ }
+ deque<Document> inputData() override {
+ return {
+ DOC("_id" << 0 << "a" << DOC_ARRAY(5)), DOC("_id" << 1 << "a" << 1), DOC("_id" << 2)};
+ }
+ string expectedResultSetString() const override {
+ return "[{_id: 0, a: 5}, {_id: 1, a: 1}]";
+ }
+ string expectedPreservedResultSetString() const override {
+ return "[{_id: 0, a: 5}, {_id: 1, a: 1}, {_id: 2}]";
+ }
+ string expectedIndexedResultSetString() const override {
+ return "[{_id: 0, a: 0}, {_id: 1, a: null}]";
+ }
+ string expectedPreservedIndexedResultSetString() const override {
+ return "[{_id: 0, a: 0}, {_id: 1, a: null}, {_id: 2, a: null}]";
+ }
+};
+
+/**
+ * Test the 'includeArrayIndex' option, where the specified path is a subfield of the field that was
+ * being unwound.
+ */
+class IncludeArrayIndexWithinUnwindPath : public CheckResultsBase {
+ string indexPath() const override {
+ return "a.index";
+ }
+ deque<Document> inputData() override {
+ return {DOC("_id" << 0 << "a"
+ << DOC_ARRAY(100 << DOC("b" << 1) << DOC("b" << 1 << "index" << -1)))};
+ }
+ string expectedResultSetString() const override {
+ return "[{_id: 0, a: 100}, {_id: 0, a: {b: 1}}, {_id: 0, a: {b: 1, index: -1}}]";
+ }
+ string expectedIndexedResultSetString() const override {
+ return "[{_id: 0, a: {index: 0}},"
+ " {_id: 0, a: {b: 1, index: 1}},"
+ " {_id: 0, a: {b: 1, index: 2}}]";
+ }
+};
+
+/**
+ * New-style fixture for testing the $unwind stage. Provides access to an ExpressionContext which
+ * can be used to construct DocumentSourceUnwind.
+ */
+class UnwindStageTest : public AggregationContextFixture {
+public:
+ intrusive_ptr<DocumentSource> createUnwind(BSONObj spec) {
+ auto specElem = spec.firstElement();
+ return DocumentSourceUnwind::createFromBson(specElem, getExpCtx());
+ }
+};
+
+TEST_F(UnwindStageTest, AddsUnwoundPathToDependencies) {
+ auto unwind =
+ DocumentSourceUnwind::create(getExpCtx(), "x.y.z", false, boost::optional<string>("index"));
+ DepsTracker dependencies;
+ ASSERT_EQUALS(DocumentSource::SEE_NEXT, unwind->getDependencies(&dependencies));
+ ASSERT_EQUALS(1U, dependencies.fields.size());
+ ASSERT_EQUALS(1U, dependencies.fields.count("x.y.z"));
+ ASSERT_EQUALS(false, dependencies.needWholeDocument);
+ ASSERT_EQUALS(false, dependencies.getNeedTextScore());
+}
+
+TEST_F(UnwindStageTest, TruncatesOutputSortAtUnwoundPath) {
+ auto unwind = DocumentSourceUnwind::create(getExpCtx(), "x.y", false, boost::none);
+ auto source = DocumentSourceMock::create();
+ source->sorts = {BSON("a" << 1 << "x.y" << 1 << "b" << 1)};
+
+ unwind->setSource(source.get());
+
+ BSONObjSet outputSort = unwind->getOutputSorts();
+ ASSERT_EQUALS(1U, outputSort.size());
+ ASSERT_EQUALS(1U, outputSort.count(BSON("a" << 1)));
+}
+
+//
+// Error cases.
+//
+
+TEST_F(UnwindStageTest, ShouldRejectNonObjectNonString) {
+ ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << 1)), UserException, 15981);
+}
+
+TEST_F(UnwindStageTest, ShouldRejectSpecWithoutPath) {
+ ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSONObj())), UserException, 28812);
+}
+
+TEST_F(UnwindStageTest, ShouldRejectNonStringPath) {
+ ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSON("path" << 2))), UserException, 28808);
+}
+
+TEST_F(UnwindStageTest, ShouldRejectNonDollarPrefixedPath) {
+ ASSERT_THROWS_CODE(createUnwind(BSON("$unwind"
+ << "somePath")),
+ UserException,
+ 28818);
+ ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSON("path"
+ << "somePath"))),
+ UserException,
+ 28818);
+}
+
+TEST_F(UnwindStageTest, ShouldRejectNonBoolPreserveNullAndEmptyArrays) {
+ ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSON("path"
+ << "$x"
+ << "preserveNullAndEmptyArrays"
+ << 2))),
+ UserException,
+ 28809);
+}
+
+TEST_F(UnwindStageTest, ShouldRejectNonStringIncludeArrayIndex) {
+ ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSON("path"
+ << "$x"
+ << "includeArrayIndex"
+ << 2))),
+ UserException,
+ 28810);
+}
+
+TEST_F(UnwindStageTest, ShouldRejectEmptyStringIncludeArrayIndex) {
+ ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSON("path"
+ << "$x"
+ << "includeArrayIndex"
+ << ""))),
+ UserException,
+ 28810);
+}
+
+TEST_F(UnwindStageTest, ShoudlRejectDollarPrefixedIncludeArrayIndex) {
+ ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSON("path"
+ << "$x"
+ << "includeArrayIndex"
+ << "$"))),
+ UserException,
+ 28822);
+ ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSON("path"
+ << "$x"
+ << "includeArrayIndex"
+ << "$path"))),
+ UserException,
+ 28822);
+}
+
+TEST_F(UnwindStageTest, ShouldRejectUnrecognizedOption) {
+ ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSON("path"
+ << "$x"
+ << "preserveNullAndEmptyArrays"
+ << true
+ << "foo"
+ << 3))),
+ UserException,
+ 28811);
+ ASSERT_THROWS_CODE(createUnwind(BSON("$unwind" << BSON("path"
+ << "$x"
+ << "foo"
+ << 3))),
+ UserException,
+ 28811);
+}
+
+class All : public Suite {
+public:
+ All() : Suite("DocumentSourceUnwindTests") {}
+ void setupTests() {
+ add<Empty>();
+ add<EmptyArray>();
+ add<MissingValue>();
+ add<Null>();
+ add<Undefined>();
+ add<OneValue>();
+ add<TwoValues>();
+ add<ArrayWithNull>();
+ add<TwoDocuments>();
+ add<NestedArray>();
+ add<NonObjectParent>();
+ add<DoubleNestedArray>();
+ add<SeveralDocuments>();
+ add<SeveralMoreDocuments>();
+ add<IncludeArrayIndexSubObject>();
+ add<IncludeArrayIndexOverrideExisting>();
+ add<IncludeArrayIndexOverrideExistingNested>();
+ add<IncludeArrayIndexOverrideUnwindPath>();
+ add<IncludeArrayIndexWithinUnwindPath>();
+ }
+};
+
+SuiteInstance<All> myall;
+
+} // namespace
+} // namespace mongo