summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/mongo/db/exec/bucket_unpacker.cpp16
-rw-r--r--src/mongo/db/exec/bucket_unpacker.h5
-rw-r--r--src/mongo/db/exec/bucket_unpacker_test.cpp104
-rw-r--r--src/mongo/db/pipeline/document_source_internal_unpack_bucket.cpp9
-rw-r--r--src/mongo/db/pipeline/document_source_internal_unpack_bucket_test/unpack_bucket_exec_test.cpp227
5 files changed, 0 insertions, 361 deletions
diff --git a/src/mongo/db/exec/bucket_unpacker.cpp b/src/mongo/db/exec/bucket_unpacker.cpp
index 947cc02cad1..829e490213b 100644
--- a/src/mongo/db/exec/bucket_unpacker.cpp
+++ b/src/mongo/db/exec/bucket_unpacker.cpp
@@ -206,15 +206,6 @@ Document BucketUnpacker::getNext() {
measurement.addField(name, Value{_computedMetaProjections[name]});
}
- if (_spec.includeBucketIdAndRowIndex) {
- MutableDocument nestedMeasurement{};
- nestedMeasurement.addField("bucketId", Value{_bucket[timeseries::kBucketIdFieldName]});
- int rowIndex;
- uassertStatusOK(NumberParser()(currentIdx, &rowIndex));
- nestedMeasurement.addField("rowIndex", Value{rowIndex});
- nestedMeasurement.addField("rowData", measurement.freezeToValue());
- return nestedMeasurement.freeze();
- }
return measurement.freeze();
}
@@ -250,13 +241,6 @@ Document BucketUnpacker::extractSingleMeasurement(int j) {
measurement.addField(name, Value{_computedMetaProjections[name]});
}
- if (_spec.includeBucketIdAndRowIndex) {
- MutableDocument nestedMeasurement{};
- nestedMeasurement.addField("bucketId", Value{_bucket[timeseries::kBucketIdFieldName]});
- nestedMeasurement.addField("rowIndex", Value{j});
- nestedMeasurement.addField("rowData", measurement.freezeToValue());
- return nestedMeasurement.freeze();
- }
return measurement.freeze();
}
} // namespace mongo
diff --git a/src/mongo/db/exec/bucket_unpacker.h b/src/mongo/db/exec/bucket_unpacker.h
index b4094bf41bc..fd159092efa 100644
--- a/src/mongo/db/exec/bucket_unpacker.h
+++ b/src/mongo/db/exec/bucket_unpacker.h
@@ -54,11 +54,6 @@ struct BucketSpec {
// Vector of computed meta field projection names. Added at the end of materialized
// measurements.
std::vector<std::string> computedMetaProjFields;
-
- // An includeBucketIdAndRowIndex flag to indicate that materialized measurements will
- // contain the bucketId of the bucket that measurement was extracted from along with its row
- // position.
- bool includeBucketIdAndRowIndex = false;
};
/**
diff --git a/src/mongo/db/exec/bucket_unpacker_test.cpp b/src/mongo/db/exec/bucket_unpacker_test.cpp
index b3697d81303..311f626854e 100644
--- a/src/mongo/db/exec/bucket_unpacker_test.cpp
+++ b/src/mongo/db/exec/bucket_unpacker_test.cpp
@@ -505,62 +505,6 @@ TEST_F(BucketUnpackerTest, ExtractSingleMeasurement) {
ASSERT_DOCUMENT_EQ(next, expected);
}
-TEST_F(BucketUnpackerTest, ExtractSingleMeasurementIncludeBucketIdRowIndex) {
- std::set<std::string> fields{
- "_id", kUserDefinedMetaName.toString(), kUserDefinedTimeName.toString(), "a", "b"};
- auto spec = BucketSpec{
- kUserDefinedTimeName.toString(), kUserDefinedMetaName.toString(), std::move(fields)};
- spec.includeBucketIdAndRowIndex = true;
- auto unpacker = BucketUnpacker{std::move(spec), BucketUnpacker::Behavior::kInclude};
-
- auto d1 = dateFromISOString("2020-02-17T00:00:00.000Z").getValue();
- auto d2 = dateFromISOString("2020-02-17T01:00:00.000Z").getValue();
- auto d3 = dateFromISOString("2020-02-17T02:00:00.000Z").getValue();
- auto bucket = BSON("meta" << BSON("m1" << 999 << "m2" << 9999) << "data"
- << BSON("_id" << BSON("0" << 1 << "1" << 2 << "2" << 3) << "time"
- << BSON("0" << d1 << "1" << d2 << "2" << d3) << "a"
- << BSON("0" << 1 << "1" << 2 << "2" << 3) << "b"
- << BSON("1" << 1 << "2" << 2))
- << "_id" << 0);
-
- unpacker.reset(std::move(bucket));
-
- auto next = unpacker.extractSingleMeasurement(0);
- auto expected = Document{
- {"bucketId", 0},
- {"rowIndex", 0},
- {"rowData",
- Document{
- {"myMeta", Document{{"m1", 999}, {"m2", 9999}}}, {"_id", 1}, {"time", d1}, {"a", 1}}}};
- ASSERT_DOCUMENT_EQ(next, expected);
-
- next = unpacker.extractSingleMeasurement(2);
- expected = Document{{"bucketId", 0},
- {"rowIndex", 2},
- {"rowData",
- Document{{"myMeta", Document{{"m1", 999}, {"m2", 9999}}},
- {"_id", 3},
- {"time", d3},
- {"a", 3},
- {"b", 2}}}};
- ASSERT_DOCUMENT_EQ(next, expected);
-
- next = unpacker.extractSingleMeasurement(1);
- expected = Document{{"bucketId", 0},
- {"rowIndex", 1},
- {"rowData",
- Document{{"myMeta", Document{{"m1", 999}, {"m2", 9999}}},
- {"_id", 2},
- {"time", d2},
- {"a", 2},
- {"b", 1}}}};
- ASSERT_DOCUMENT_EQ(next, expected);
-
- // Can we extract the middle element again?
- next = unpacker.extractSingleMeasurement(1);
- ASSERT_DOCUMENT_EQ(next, expected);
-}
-
TEST_F(BucketUnpackerTest, ExtractSingleMeasurementSparse) {
std::set<std::string> fields{
"_id", kUserDefinedMetaName.toString(), kUserDefinedTimeName.toString(), "a", "b"};
@@ -598,54 +542,6 @@ TEST_F(BucketUnpackerTest, ExtractSingleMeasurementSparse) {
ASSERT_DOCUMENT_EQ(next, expected);
}
-TEST_F(BucketUnpackerTest, ExtractSingleMeasurementSparseIncludeBucketIdRowIndex) {
- std::set<std::string> fields{
- "_id", kUserDefinedMetaName.toString(), kUserDefinedTimeName.toString(), "a", "b"};
- auto spec = BucketSpec{
- kUserDefinedTimeName.toString(), kUserDefinedMetaName.toString(), std::move(fields)};
- spec.includeBucketIdAndRowIndex = true;
- auto unpacker = BucketUnpacker{std::move(spec), BucketUnpacker::Behavior::kInclude};
-
- auto d1 = dateFromISOString("2020-02-17T00:00:00.000Z").getValue();
- auto d2 = dateFromISOString("2020-02-17T01:00:00.000Z").getValue();
- auto bucket = BSON("meta" << BSON("m1" << 999 << "m2" << 9999) << "data"
- << BSON("_id" << BSON("0" << 1 << "1" << 2) << "time"
- << BSON("0" << d1 << "1" << d2) << "a" << BSON("0" << 1)
- << "b" << BSON("1" << 1))
- << "_id" << 0);
-
- unpacker.reset(std::move(bucket));
- auto next = unpacker.extractSingleMeasurement(1);
- auto expected = Document{{{"bucketId", 0},
- {"rowIndex", 1},
- {"rowData",
- Document{{"myMeta", Document{{"m1", 999}, {"m2", 9999}}},
- {"_id", 2},
- {"time", d2},
- {"b", 1}}}}};
- ASSERT_DOCUMENT_EQ(next, expected);
-
- // Can we extract the same element again?
- next = unpacker.extractSingleMeasurement(1);
- ASSERT_DOCUMENT_EQ(next, expected);
-
- next = unpacker.extractSingleMeasurement(0);
- expected = Document{
- {"bucketId", 0},
- {"rowIndex", 0},
- {"rowData",
- Document{
- {"myMeta", Document{{"m1", 999}, {"m2", 9999}}}, {"_id", 1}, {"time", d1}, {"a", 1}}}};
- ASSERT_DOCUMENT_EQ(next, expected);
-
- // Can we extract the same element twice in a row?
- next = unpacker.extractSingleMeasurement(0);
- ASSERT_DOCUMENT_EQ(next, expected);
-
- next = unpacker.extractSingleMeasurement(0);
- ASSERT_DOCUMENT_EQ(next, expected);
-}
-
TEST_F(BucketUnpackerTest, ComputeMeasurementCountLowerBoundsAreCorrect) {
// The last table entry is a sentinel for an upper bound on the interval that covers measurement
// counts up to 16 MB.
diff --git a/src/mongo/db/pipeline/document_source_internal_unpack_bucket.cpp b/src/mongo/db/pipeline/document_source_internal_unpack_bucket.cpp
index ff60684e3de..70d5feb04e3 100644
--- a/src/mongo/db/pipeline/document_source_internal_unpack_bucket.cpp
+++ b/src/mongo/db/pipeline/document_source_internal_unpack_bucket.cpp
@@ -311,12 +311,6 @@ boost::intrusive_ptr<DocumentSource> DocumentSourceInternalUnpackBucket::createF
field.find('.') == std::string::npos);
bucketSpec.computedMetaProjFields.emplace_back(field);
}
- } else if (fieldName == "includeBucketIdAndRowIndex") {
- uassert(5809600,
- str::stream() << "includeBucketIdAndRowIndex field must be a boolean, got: "
- << elem.type(),
- elem.type() == BSONType::Bool);
- bucketSpec.includeBucketIdAndRowIndex = elem.boolean();
} else {
uasserted(5346506,
str::stream()
@@ -399,9 +393,6 @@ void DocumentSourceInternalUnpackBucket::serializeToArray(
if (spec.metaField) {
out.addField(timeseries::kMetaFieldName, Value{*spec.metaField});
}
- if (spec.includeBucketIdAndRowIndex) {
- out.addField("includeBucketIdAndRowIndex", Value{true});
- }
out.addField(kBucketMaxSpanSeconds, Value{_bucketMaxSpanSeconds});
if (!spec.computedMetaProjFields.empty())
diff --git a/src/mongo/db/pipeline/document_source_internal_unpack_bucket_test/unpack_bucket_exec_test.cpp b/src/mongo/db/pipeline/document_source_internal_unpack_bucket_test/unpack_bucket_exec_test.cpp
index 3cc062880d6..35667959188 100644
--- a/src/mongo/db/pipeline/document_source_internal_unpack_bucket_test/unpack_bucket_exec_test.cpp
+++ b/src/mongo/db/pipeline/document_source_internal_unpack_bucket_test/unpack_bucket_exec_test.cpp
@@ -90,182 +90,6 @@ TEST_F(InternalUnpackBucketExecTest, UnpackBasicIncludeAllMeasurementFields) {
ASSERT_TRUE(next.isEOF());
}
-TEST_F(InternalUnpackBucketExecTest, UnpackIncludeBucketIdRowIndexInvalidRowIndex) {
- auto expCtx = getExpCtx();
-
- auto spec = BSON(
- DocumentSourceInternalUnpackBucket::kStageNameInternal
- << BSON(DocumentSourceInternalUnpackBucket::kInclude
- << BSON_ARRAY("_id" << kUserDefinedTimeName << kUserDefinedMetaName << "a"
- << "b")
- << timeseries::kTimeFieldName << kUserDefinedTimeName << timeseries::kMetaFieldName
- << kUserDefinedMetaName << DocumentSourceInternalUnpackBucket::kBucketMaxSpanSeconds
- << 3600 << "includeBucketIdAndRowIndex" << true));
- auto unpack =
- DocumentSourceInternalUnpackBucket::createFromBsonInternal(spec.firstElement(), expCtx);
- // This source will produce two buckets with invalid timeField indices.
- auto source = DocumentSourceMock::createForTest(
- {"{meta: {'m1': 999, 'm2': 9999}, data: {_id: {'0':1, '1':2}, time: {'a':1,'b':2}, "
- "a:{'0':1, '1':2}, b:{'1':1}}, _id: 0}",
- "{meta: {'m1': 9, 'm2': 9, 'm3': 9}, data: {_id: {'0':3, '1':4}, time: {'c':3, "
- "'d':4}, "
- "a:{'0':1, '1':2}, b:{'1':1}}, _id: 1}"},
- expCtx);
- unpack->setSource(source.get());
- // The first result throws a FailedToParse exception.
- ASSERT_THROWS_CODE(unpack->getNext(), DBException, ErrorCodes::FailedToParse);
-}
-
-TEST_F(InternalUnpackBucketExecTest, UnpackBasicIncludeBucketIdRowIndexTrue) {
- auto expCtx = getExpCtx();
-
- auto spec = BSON(
- DocumentSourceInternalUnpackBucket::kStageNameInternal
- << BSON(DocumentSourceInternalUnpackBucket::kInclude
- << BSON_ARRAY("_id" << kUserDefinedTimeName << kUserDefinedMetaName << "a"
- << "b")
- << timeseries::kTimeFieldName << kUserDefinedTimeName << timeseries::kMetaFieldName
- << kUserDefinedMetaName << DocumentSourceInternalUnpackBucket::kBucketMaxSpanSeconds
- << 3600 << "includeBucketIdAndRowIndex" << true));
- auto unpack =
- DocumentSourceInternalUnpackBucket::createFromBsonInternal(spec.firstElement(), expCtx);
- // This source will produce two buckets.
- auto source = DocumentSourceMock::createForTest(
- {"{meta: {'m1': 999, 'm2': 9999}, data: {_id: {'0':1, '1':2}, time: {'0':1, '1':2}, "
- "a:{'0':1, '1':2}, b:{'1':1}}, _id: 0}",
- "{meta: {'m1': 9, 'm2': 9, 'm3': 9}, data: {_id: {'0':3, '1':4}, time: {'0':3, '1':4}, "
- "a:{'0':1, '1':2}, b:{'1':1}}, _id: 1}"},
- expCtx);
- unpack->setSource(source.get());
- // The first result exists and is as expected.
- auto next = unpack->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.getDocument(),
- Document(fromjson("{bucketId: 0, rowIndex: 0, rowData: {time: 1, myMeta: "
- "{m1: 999, m2: 9999}, _id: 1, a: 1}}")));
-
- next = unpack->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.getDocument(),
- Document(fromjson("{bucketId: 0, rowIndex: 1, rowData: {time: 2, myMeta: "
- "{m1: 999, m2: 9999}, _id: 2, a: 2, b: 1}}")));
-
- // Second bucket
- next = unpack->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.getDocument(),
- Document(fromjson("{bucketId: 1, rowIndex: 0, rowData: {time: 3, myMeta: "
- "{m1: 9, m2: 9, m3: 9}, _id: 3, a: 1}}")));
-
- next = unpack->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.getDocument(),
- Document(fromjson("{bucketId: 1, rowIndex: 1, rowData: {time: 4, myMeta: "
- "{m1: 9, m2: 9, m3: 9}, _id: 4, a: 2, b: 1}}")));
-
- next = unpack->getNext();
- ASSERT_TRUE(next.isEOF());
-}
-
-TEST_F(InternalUnpackBucketExecTest, UnpackBasicIncludeBucketIdRowIndexFalse) {
- auto expCtx = getExpCtx();
-
- auto spec = BSON(
- DocumentSourceInternalUnpackBucket::kStageNameInternal
- << BSON(DocumentSourceInternalUnpackBucket::kInclude
- << BSON_ARRAY("_id" << kUserDefinedTimeName << kUserDefinedMetaName << "a"
- << "b")
- << timeseries::kTimeFieldName << kUserDefinedTimeName << timeseries::kMetaFieldName
- << kUserDefinedMetaName << DocumentSourceInternalUnpackBucket::kBucketMaxSpanSeconds
- << 3600 << "includeBucketIdAndRowIndex" << false));
- auto unpack =
- DocumentSourceInternalUnpackBucket::createFromBsonInternal(spec.firstElement(), expCtx);
- // This source will produce two buckets.
- auto source = DocumentSourceMock::createForTest(
- {"{meta: {'m1': 999, 'm2': 9999}, data: {_id: {'0':1, '1':2}, time: {'0':1, '1':2}, "
- "a:{'0':1, '1':2}, b:{'1':1}}, _id: 0}",
- "{meta: {'m1': 9, 'm2': 9, 'm3': 9}, data: {_id: {'0':3, '1':4}, time: {'0':3, '1':4}, "
- "a:{'0':1, '1':2}, b:{'1':1}}, _id: 1}"},
- expCtx);
- unpack->setSource(source.get());
- // The first result exists and is as expected.
- auto next = unpack->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.getDocument(),
- Document(fromjson("{time: 1, myMeta: {m1: 999, m2: 9999}, _id: 1, a: 1}")));
-
- next = unpack->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.getDocument(),
- Document(fromjson("{time: 2, myMeta: {m1: 999, "
- "m2: 9999}, _id: 2, a: 2, b: 1}")));
-
- // Second bucket
- next = unpack->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(
- next.getDocument(),
- Document(fromjson("{time: 3, myMeta: {m1: 9, m2: 9, m3: 9}, _id: 3, a: 1}")));
-
- next = unpack->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.getDocument(),
- Document(fromjson("{time: 4, myMeta: {m1: 9, m2: "
- "9, m3: 9}, _id: 4, a: 2, b: 1}")));
-
- next = unpack->getNext();
- ASSERT_TRUE(next.isEOF());
-}
-
-TEST_F(InternalUnpackBucketExecTest, UnpackExcludeASingleFieldIncludeBucketIdRowIndex) {
- auto expCtx = getExpCtx();
- auto spec = BSON(DocumentSourceInternalUnpackBucket::kStageNameInternal << BSON(
- DocumentSourceInternalUnpackBucket::kExclude
- << BSON_ARRAY("b") << timeseries::kTimeFieldName << kUserDefinedTimeName
- << timeseries::kMetaFieldName << kUserDefinedMetaName
- << DocumentSourceInternalUnpackBucket::kBucketMaxSpanSeconds << 3600
- << "includeBucketIdAndRowIndex" << true));
-
- auto unpack =
- DocumentSourceInternalUnpackBucket::createFromBsonInternal(spec.firstElement(), expCtx);
-
- auto source = DocumentSourceMock::createForTest(
- {"{meta: {'m1': 999, 'm2': 9999}, data: {_id: {'0':1, '1':2}, time: {'0':1, '1':2}, "
- "a:{'0':1, '1':2}, b:{'1':1}}, _id: 0}",
- "{meta: {m1: 9, m2: 9, m3: 9}, data: {_id: {'0':3, '1':4}, time: {'0':3, '1':4}, "
- "a:{'0':1, '1':2}, b:{'1':1}}, _id: 1}"},
- expCtx);
- unpack->setSource(source.get());
- // The first result exists and is as expected.
- auto next = unpack->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.getDocument(),
- Document(fromjson("{bucketId: 0, rowIndex: 0, rowData: {time: 1, myMeta: "
- "{m1: 999, m2: 9999}, _id: 1, a: 1}}")));
-
- next = unpack->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.getDocument(),
- Document(fromjson("{bucketId: 0, rowIndex: 1, rowData: {time: 2, myMeta: "
- "{m1: 999, m2: 9999}, _id: 2, a: 2}}")));
-
- // Second bucket
- next = unpack->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.getDocument(),
- Document(fromjson("{bucketId: 1, rowIndex: 0, rowData: {time: 3, myMeta: "
- "{m1: 9, m2: 9, m3: 9}, _id: 3, a: 1}}")));
-
- next = unpack->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.getDocument(),
- Document(fromjson("{bucketId: 1, rowIndex: 1, rowData: {time: 4, myMeta: "
- "{m1: 9, m2: 9, m3: 9}, _id: 4, a: 2}}")));
-
- next = unpack->getNext();
- ASSERT_TRUE(next.isEOF());
-}
-
TEST_F(InternalUnpackBucketExecTest, UnpackExcludeASingleField) {
auto expCtx = getExpCtx();
auto spec = BSON(DocumentSourceInternalUnpackBucket::kStageNameInternal << BSON(
@@ -533,57 +357,6 @@ TEST_F(InternalUnpackBucketExecTest, UnpackBasicIncludeWithDollarPrefix) {
ASSERT_TRUE(next.isEOF());
}
-TEST_F(InternalUnpackBucketExecTest, UnpackBasicIncludeWithDollarPrefixIncludeBucketIdRowIndex) {
- auto expCtx = getExpCtx();
-
- auto spec = BSON(
- DocumentSourceInternalUnpackBucket::kStageNameInternal
- << BSON(DocumentSourceInternalUnpackBucket::kInclude
- << BSON_ARRAY("_id" << kUserDefinedTimeName << kUserDefinedMetaName << "$a"
- << "b")
- << timeseries::kTimeFieldName << kUserDefinedTimeName << timeseries::kMetaFieldName
- << kUserDefinedMetaName << DocumentSourceInternalUnpackBucket::kBucketMaxSpanSeconds
- << 3600 << "includeBucketIdAndRowIndex" << true));
- auto unpack =
- DocumentSourceInternalUnpackBucket::createFromBsonInternal(spec.firstElement(), expCtx);
- // This source will produce two buckets.
- auto source = DocumentSourceMock::createForTest(
- {"{meta: {'m1': 999, 'm2': 9999}, data: {_id: {'0':1, '1':2}, time: {'0':1, '1':2}, "
- "$a:{'0':1, '1':2}, b:{'1':1}}, _id: 0}",
- "{meta: {m1: 9, m2: 9, m3: 9}, data: {_id: {'0':3, '1':4}, time: {'0':3, '1':4}, "
- "$a:{'0':1, '1':2}, b:{'1':1}}, _id: 1}"},
- expCtx);
- unpack->setSource(source.get());
- // The first result exists and is as expected.
- auto next = unpack->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.getDocument(),
- Document(fromjson("{bucketId: 0, rowIndex: 0, rowData: {time: 1, myMeta: "
- "{m1: 999, m2: 9999}, _id: 1, $a: 1}}")));
-
- next = unpack->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.getDocument(),
- Document(fromjson("{bucketId: 0, rowIndex: 1, rowData: {time: 2, myMeta: "
- "{m1: 999, m2: 9999}, _id: 2, $a: 2, b: 1}}")));
-
- // Second bucket
- next = unpack->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.getDocument(),
- Document(fromjson("{bucketId: 1, rowIndex: 0, rowData: {time: 3, myMeta: "
- "{m1: 9, m2: 9, m3: 9}, _id: 3, $a: 1}}")));
-
- next = unpack->getNext();
- ASSERT_TRUE(next.isAdvanced());
- ASSERT_DOCUMENT_EQ(next.getDocument(),
- Document(fromjson("{bucketId: 1, rowIndex: 1, rowData: {time: 4, myMeta: "
- "{m1: 9, m2: 9, m3: 9}, _id: 4, $a: 2, b: 1}}")));
-
- next = unpack->getNext();
- ASSERT_TRUE(next.isEOF());
-}
-
TEST_F(InternalUnpackBucketExecTest, UnpackMetadataOnly) {
auto expCtx = getExpCtx();
auto spec = BSON(DocumentSourceInternalUnpackBucket::kStageNameInternal