summaryrefslogtreecommitdiff
path: root/src/mongo/db/exec
diff options
context:
space:
mode:
authorGregory Noma <gregory.noma@gmail.com>2021-09-01 21:44:36 -0400
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2021-09-02 16:05:57 +0000
commitb30ecb0fdf8f5d707d28c2ad012f3420fb582b6c (patch)
treeb886b7346efc9b5e42aa1f41e4b7a96e911fb6e9 /src/mongo/db/exec
parent53c498736bef89572d34a5201c3af749f65e68a2 (diff)
downloadmongo-b30ecb0fdf8f5d707d28c2ad012f3420fb582b6c.tar.gz
Revert "SERVER-58096 Add IncludeBucketIdAndRowIndex option to $_internalUnpackBucket"
This reverts commit c4bdb6ff31ec8773a0dda817949006aec31753c8.
Diffstat (limited to 'src/mongo/db/exec')
-rw-r--r--src/mongo/db/exec/bucket_unpacker.cpp16
-rw-r--r--src/mongo/db/exec/bucket_unpacker.h5
-rw-r--r--src/mongo/db/exec/bucket_unpacker_test.cpp104
3 files changed, 0 insertions, 125 deletions
diff --git a/src/mongo/db/exec/bucket_unpacker.cpp b/src/mongo/db/exec/bucket_unpacker.cpp
index 947cc02cad1..829e490213b 100644
--- a/src/mongo/db/exec/bucket_unpacker.cpp
+++ b/src/mongo/db/exec/bucket_unpacker.cpp
@@ -206,15 +206,6 @@ Document BucketUnpacker::getNext() {
measurement.addField(name, Value{_computedMetaProjections[name]});
}
- if (_spec.includeBucketIdAndRowIndex) {
- MutableDocument nestedMeasurement{};
- nestedMeasurement.addField("bucketId", Value{_bucket[timeseries::kBucketIdFieldName]});
- int rowIndex;
- uassertStatusOK(NumberParser()(currentIdx, &rowIndex));
- nestedMeasurement.addField("rowIndex", Value{rowIndex});
- nestedMeasurement.addField("rowData", measurement.freezeToValue());
- return nestedMeasurement.freeze();
- }
return measurement.freeze();
}
@@ -250,13 +241,6 @@ Document BucketUnpacker::extractSingleMeasurement(int j) {
measurement.addField(name, Value{_computedMetaProjections[name]});
}
- if (_spec.includeBucketIdAndRowIndex) {
- MutableDocument nestedMeasurement{};
- nestedMeasurement.addField("bucketId", Value{_bucket[timeseries::kBucketIdFieldName]});
- nestedMeasurement.addField("rowIndex", Value{j});
- nestedMeasurement.addField("rowData", measurement.freezeToValue());
- return nestedMeasurement.freeze();
- }
return measurement.freeze();
}
} // namespace mongo
diff --git a/src/mongo/db/exec/bucket_unpacker.h b/src/mongo/db/exec/bucket_unpacker.h
index b4094bf41bc..fd159092efa 100644
--- a/src/mongo/db/exec/bucket_unpacker.h
+++ b/src/mongo/db/exec/bucket_unpacker.h
@@ -54,11 +54,6 @@ struct BucketSpec {
// Vector of computed meta field projection names. Added at the end of materialized
// measurements.
std::vector<std::string> computedMetaProjFields;
-
- // An includeBucketIdAndRowIndex flag to indicate that materialized measurements will
- // contain the bucketId of the bucket that measurement was extracted from along with its row
- // position.
- bool includeBucketIdAndRowIndex = false;
};
/**
diff --git a/src/mongo/db/exec/bucket_unpacker_test.cpp b/src/mongo/db/exec/bucket_unpacker_test.cpp
index b3697d81303..311f626854e 100644
--- a/src/mongo/db/exec/bucket_unpacker_test.cpp
+++ b/src/mongo/db/exec/bucket_unpacker_test.cpp
@@ -505,62 +505,6 @@ TEST_F(BucketUnpackerTest, ExtractSingleMeasurement) {
ASSERT_DOCUMENT_EQ(next, expected);
}
-TEST_F(BucketUnpackerTest, ExtractSingleMeasurementIncludeBucketIdRowIndex) {
- std::set<std::string> fields{
- "_id", kUserDefinedMetaName.toString(), kUserDefinedTimeName.toString(), "a", "b"};
- auto spec = BucketSpec{
- kUserDefinedTimeName.toString(), kUserDefinedMetaName.toString(), std::move(fields)};
- spec.includeBucketIdAndRowIndex = true;
- auto unpacker = BucketUnpacker{std::move(spec), BucketUnpacker::Behavior::kInclude};
-
- auto d1 = dateFromISOString("2020-02-17T00:00:00.000Z").getValue();
- auto d2 = dateFromISOString("2020-02-17T01:00:00.000Z").getValue();
- auto d3 = dateFromISOString("2020-02-17T02:00:00.000Z").getValue();
- auto bucket = BSON("meta" << BSON("m1" << 999 << "m2" << 9999) << "data"
- << BSON("_id" << BSON("0" << 1 << "1" << 2 << "2" << 3) << "time"
- << BSON("0" << d1 << "1" << d2 << "2" << d3) << "a"
- << BSON("0" << 1 << "1" << 2 << "2" << 3) << "b"
- << BSON("1" << 1 << "2" << 2))
- << "_id" << 0);
-
- unpacker.reset(std::move(bucket));
-
- auto next = unpacker.extractSingleMeasurement(0);
- auto expected = Document{
- {"bucketId", 0},
- {"rowIndex", 0},
- {"rowData",
- Document{
- {"myMeta", Document{{"m1", 999}, {"m2", 9999}}}, {"_id", 1}, {"time", d1}, {"a", 1}}}};
- ASSERT_DOCUMENT_EQ(next, expected);
-
- next = unpacker.extractSingleMeasurement(2);
- expected = Document{{"bucketId", 0},
- {"rowIndex", 2},
- {"rowData",
- Document{{"myMeta", Document{{"m1", 999}, {"m2", 9999}}},
- {"_id", 3},
- {"time", d3},
- {"a", 3},
- {"b", 2}}}};
- ASSERT_DOCUMENT_EQ(next, expected);
-
- next = unpacker.extractSingleMeasurement(1);
- expected = Document{{"bucketId", 0},
- {"rowIndex", 1},
- {"rowData",
- Document{{"myMeta", Document{{"m1", 999}, {"m2", 9999}}},
- {"_id", 2},
- {"time", d2},
- {"a", 2},
- {"b", 1}}}};
- ASSERT_DOCUMENT_EQ(next, expected);
-
- // Can we extract the middle element again?
- next = unpacker.extractSingleMeasurement(1);
- ASSERT_DOCUMENT_EQ(next, expected);
-}
-
TEST_F(BucketUnpackerTest, ExtractSingleMeasurementSparse) {
std::set<std::string> fields{
"_id", kUserDefinedMetaName.toString(), kUserDefinedTimeName.toString(), "a", "b"};
@@ -598,54 +542,6 @@ TEST_F(BucketUnpackerTest, ExtractSingleMeasurementSparse) {
ASSERT_DOCUMENT_EQ(next, expected);
}
-TEST_F(BucketUnpackerTest, ExtractSingleMeasurementSparseIncludeBucketIdRowIndex) {
- std::set<std::string> fields{
- "_id", kUserDefinedMetaName.toString(), kUserDefinedTimeName.toString(), "a", "b"};
- auto spec = BucketSpec{
- kUserDefinedTimeName.toString(), kUserDefinedMetaName.toString(), std::move(fields)};
- spec.includeBucketIdAndRowIndex = true;
- auto unpacker = BucketUnpacker{std::move(spec), BucketUnpacker::Behavior::kInclude};
-
- auto d1 = dateFromISOString("2020-02-17T00:00:00.000Z").getValue();
- auto d2 = dateFromISOString("2020-02-17T01:00:00.000Z").getValue();
- auto bucket = BSON("meta" << BSON("m1" << 999 << "m2" << 9999) << "data"
- << BSON("_id" << BSON("0" << 1 << "1" << 2) << "time"
- << BSON("0" << d1 << "1" << d2) << "a" << BSON("0" << 1)
- << "b" << BSON("1" << 1))
- << "_id" << 0);
-
- unpacker.reset(std::move(bucket));
- auto next = unpacker.extractSingleMeasurement(1);
- auto expected = Document{{{"bucketId", 0},
- {"rowIndex", 1},
- {"rowData",
- Document{{"myMeta", Document{{"m1", 999}, {"m2", 9999}}},
- {"_id", 2},
- {"time", d2},
- {"b", 1}}}}};
- ASSERT_DOCUMENT_EQ(next, expected);
-
- // Can we extract the same element again?
- next = unpacker.extractSingleMeasurement(1);
- ASSERT_DOCUMENT_EQ(next, expected);
-
- next = unpacker.extractSingleMeasurement(0);
- expected = Document{
- {"bucketId", 0},
- {"rowIndex", 0},
- {"rowData",
- Document{
- {"myMeta", Document{{"m1", 999}, {"m2", 9999}}}, {"_id", 1}, {"time", d1}, {"a", 1}}}};
- ASSERT_DOCUMENT_EQ(next, expected);
-
- // Can we extract the same element twice in a row?
- next = unpacker.extractSingleMeasurement(0);
- ASSERT_DOCUMENT_EQ(next, expected);
-
- next = unpacker.extractSingleMeasurement(0);
- ASSERT_DOCUMENT_EQ(next, expected);
-}
-
TEST_F(BucketUnpackerTest, ComputeMeasurementCountLowerBoundsAreCorrect) {
// The last table entry is a sentinel for an upper bound on the interval that covers measurement
// counts up to 16 MB.