summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorYoonsoo Kim <yoonsoo.kim@mongodb.com>2021-03-16 23:54:13 +0000
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2021-03-18 16:15:42 +0000
commita97223f6f28f150eede362227db8b36048beeec4 (patch)
treec851b9640072650d820a877d911c2666766be8d7
parent926ee400907f538c8eb240d97cb292797cd60571 (diff)
downloadmongo-a97223f6f28f150eede362227db8b36048beeec4.tar.gz
SERVER-55061 Handle missing meta and null meta differently
-rw-r--r--jstests/core/timeseries/timeseries_metadata.js18
-rw-r--r--jstests/noPassthrough/timeseries_insert_ordered_false.js9
-rw-r--r--jstests/noPassthrough/timeseries_insert_ordered_true.js9
-rw-r--r--src/mongo/db/pipeline/document_source_internal_unpack_bucket.cpp15
-rw-r--r--src/mongo/db/pipeline/document_source_internal_unpack_bucket_test/unpack_bucket_exec_test.cpp80
5 files changed, 95 insertions, 36 deletions
diff --git a/jstests/core/timeseries/timeseries_metadata.js b/jstests/core/timeseries/timeseries_metadata.js
index 95dcc35fbfa..26384adc4a8 100644
--- a/jstests/core/timeseries/timeseries_metadata.js
+++ b/jstests/core/timeseries/timeseries_metadata.js
@@ -118,10 +118,13 @@ runTest(
]);
runTest(
- // No metadata field in first bucket.
+ // Null metadata field in first bucket. Temporarily use null meta instead of missing meta
+ // to accomodate the new $_internalUnpackBucket behavior which is null meta in a bucket
+ // is materialized as "null" meta.
+ // TODO SERVER-55213: Need to test both missing meta case and null meta case.
[
- {_id: 0, time: t[0], x: 0},
- {_id: 1, time: t[1], x: 10},
+ {_id: 0, time: t[0], meta: null, x: 0},
+ {_id: 1, time: t[1], meta: null, x: 10},
],
[
{_id: 2, time: t[2], meta: 123, x: 20},
@@ -134,10 +137,13 @@ runTest(
{_id: 0, time: t[0], meta: [1, 2, 3], x: 0},
{_id: 1, time: t[1], meta: [1, 2, 3], x: 10},
],
- // No metadata field in second bucket.
+ // Null metadata field in second bucket. Temporarily use null meta instead of missing meta
+ // to accomodate the new $_internalUnpackBucket behavior which is null meta in a bucket
+ // is materialized as "null" meta.
+ // TODO SERVER-55213: Need to test both missing meta case and null meta case.
[
- {_id: 2, time: t[2], x: 20},
- {_id: 3, time: t[3], x: 30},
+ {_id: 2, time: t[2], meta: null, x: 20},
+ {_id: 3, time: t[3], meta: null, x: 30},
]);
runTest(
diff --git a/jstests/noPassthrough/timeseries_insert_ordered_false.js b/jstests/noPassthrough/timeseries_insert_ordered_false.js
index e38147f9467..1e1e95d7fc8 100644
--- a/jstests/noPassthrough/timeseries_insert_ordered_false.js
+++ b/jstests/noPassthrough/timeseries_insert_ordered_false.js
@@ -33,10 +33,13 @@ resetColl();
configureFailPoint(conn, 'failTimeseriesInsert', {metadata: 'fail'});
+// Temporarily use null meta instead of missing meta to accomodate the new $_internalUnpackBucket
+// behavior which is null meta in a bucket is materialized as "null" meta.
+// TODO SERVER-55213: Remove 'meta: null'.
const docs = [
- {_id: 0, [timeFieldName]: ISODate()},
- {_id: 1, [timeFieldName]: ISODate()},
- {_id: 2, [timeFieldName]: ISODate()},
+ {_id: 0, meta: null, [timeFieldName]: ISODate()},
+ {_id: 1, meta: null, [timeFieldName]: ISODate()},
+ {_id: 2, meta: null, [timeFieldName]: ISODate()},
];
let res = assert.commandWorked(coll.insert(docs, {ordered: false}));
diff --git a/jstests/noPassthrough/timeseries_insert_ordered_true.js b/jstests/noPassthrough/timeseries_insert_ordered_true.js
index d444981cbb4..7ef189b5ffc 100644
--- a/jstests/noPassthrough/timeseries_insert_ordered_true.js
+++ b/jstests/noPassthrough/timeseries_insert_ordered_true.js
@@ -33,10 +33,13 @@ resetColl();
configureFailPoint(conn, 'failTimeseriesInsert', {metadata: 'fail'});
+// Temporarily use null meta instead of missing meta to accomodate the new $_internalUnpackBucket
+// behavior which is null meta in a bucket is materialized as "null" meta.
+// TODO SERVER-55213: Remove 'meta: null'.
const docs = [
- {_id: 0, [timeFieldName]: ISODate()},
- {_id: 1, [timeFieldName]: ISODate()},
- {_id: 2, [timeFieldName]: ISODate()},
+ {_id: 0, meta: null, [timeFieldName]: ISODate()},
+ {_id: 1, meta: null, [timeFieldName]: ISODate()},
+ {_id: 2, meta: null, [timeFieldName]: ISODate()},
];
let res = assert.commandWorked(coll.insert(docs, {ordered: true}));
diff --git a/src/mongo/db/pipeline/document_source_internal_unpack_bucket.cpp b/src/mongo/db/pipeline/document_source_internal_unpack_bucket.cpp
index b20be4cdd68..bb156781132 100644
--- a/src/mongo/db/pipeline/document_source_internal_unpack_bucket.cpp
+++ b/src/mongo/db/pipeline/document_source_internal_unpack_bucket.cpp
@@ -225,13 +225,13 @@ void BucketUnpacker::reset(BSONObj&& bucket) {
_metaValue = _bucket[kBucketMetaFieldName];
if (_spec.metaField) {
- // The spec indicates that there should be a metadata region. Missing metadata in this case
- // is expressed with null, so the field is expected to be present. We also disallow
- // undefined since the undefined BSON type is deprecated.
+ // The spec indicates that there might be a metadata region. Missing metadata in
+ // measurements is expressed with missing metadata in a bucket. But we disallow undefined
+ // since the undefined BSON type is deprecated.
uassert(5369600,
- "The $_internalUnpackBucket stage requires metadata to be present in a bucket if "
- "metaField parameter is provided",
- (_metaValue.type() != BSONType::Undefined) && _metaValue);
+ "The $_internalUnpackBucket stage allows metadata to be absent or otherwise, it "
+ "must not be the deprecated undefined bson type",
+ !_metaValue || _metaValue.type() != BSONType::Undefined);
} else {
// If the spec indicates that the time series collection has no metadata field, then we
// should not find a metadata region in the underlying bucket documents.
@@ -276,7 +276,8 @@ Document BucketUnpacker::getNext() {
measurement.addField(_spec.timeField, Value{timeElem});
}
- if (_includeMetaField && !_metaValue.isNull()) {
+ // Includes metaField when we're instructed to do so and metaField value exists.
+ if (_includeMetaField && _metaValue) {
measurement.addField(*_spec.metaField, Value{_metaValue});
}
diff --git a/src/mongo/db/pipeline/document_source_internal_unpack_bucket_test/unpack_bucket_exec_test.cpp b/src/mongo/db/pipeline/document_source_internal_unpack_bucket_test/unpack_bucket_exec_test.cpp
index ae409de7e69..fcc63c7aaaa 100644
--- a/src/mongo/db/pipeline/document_source_internal_unpack_bucket_test/unpack_bucket_exec_test.cpp
+++ b/src/mongo/db/pipeline/document_source_internal_unpack_bucket_test/unpack_bucket_exec_test.cpp
@@ -560,30 +560,33 @@ TEST_F(InternalUnpackBucketExecTest, BucketUnpackerThrowsOnUndefinedMetadata) {
ASSERT_THROWS_CODE(unpack->getNext(), AssertionException, 5369600);
}
-TEST_F(InternalUnpackBucketExecTest, BucketUnpackerThrowsOnMissingMetadataWhenExpectedInBuckets) {
+TEST_F(InternalUnpackBucketExecTest, BucketUnpackerThrowsWhenMetadataIsPresentUnexpectedly) {
auto expCtx = getExpCtx();
auto spec =
BSON("$_internalUnpackBucket"
<< BSON("exclude" << BSONArray() << DocumentSourceInternalUnpackBucket::kTimeFieldName
- << kUserDefinedTimeName
- << DocumentSourceInternalUnpackBucket::kMetaFieldName
- << kUserDefinedMetaName));
+ << kUserDefinedTimeName));
auto unpack = DocumentSourceInternalUnpackBucket::createFromBson(spec.firstElement(), expCtx);
auto source =
- DocumentSourceMock::createForTest({"{data: {_id: {'1':1, "
- "'0':2, '2': 3}, time: {'1':1, '0': 2, '2': 3}}}"},
+ DocumentSourceMock::createForTest({"{meta: {'m1': 999, 'm2': 9999}, data: {_id: {'1':1, "
+ "'0':2, '2': 3}, time: {'1':1, '0': 2, '2': 3}}}",
+ "{meta: null, data: {_id: {'1':4, "
+ "'0':5, '2':6}, time: {'1':4, '0': 5, '2': 6}}}"},
expCtx);
unpack->setSource(source.get());
- ASSERT_THROWS_CODE(unpack->getNext(), AssertionException, 5369600);
+
+ ASSERT_THROWS_CODE(unpack->getNext(), AssertionException, 5369601);
}
-TEST_F(InternalUnpackBucketExecTest, BucketUnpackerThrowsWhenMetadataIsPresentUnexpectedly) {
+TEST_F(InternalUnpackBucketExecTest, BucketUnpackerHandlesNullMetadata) {
auto expCtx = getExpCtx();
auto spec =
BSON("$_internalUnpackBucket"
<< BSON("exclude" << BSONArray() << DocumentSourceInternalUnpackBucket::kTimeFieldName
- << kUserDefinedTimeName));
+ << kUserDefinedTimeName
+ << DocumentSourceInternalUnpackBucket::kMetaFieldName
+ << kUserDefinedMetaName));
auto unpack = DocumentSourceInternalUnpackBucket::createFromBson(spec.firstElement(), expCtx);
auto source =
@@ -594,10 +597,39 @@ TEST_F(InternalUnpackBucketExecTest, BucketUnpackerThrowsWhenMetadataIsPresentUn
expCtx);
unpack->setSource(source.get());
- ASSERT_THROWS_CODE(unpack->getNext(), AssertionException, 5369601);
+ auto next = unpack->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ ASSERT_DOCUMENT_EQ(next.getDocument(),
+ Document(fromjson("{time: 1, myMeta: {m1: 999, m2: 9999}, _id: 1}")));
+
+ next = unpack->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ ASSERT_DOCUMENT_EQ(next.getDocument(),
+ Document(fromjson("{time: 2, myMeta: {m1: 999, m2: 9999}, _id: 2}")));
+
+ next = unpack->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ ASSERT_DOCUMENT_EQ(next.getDocument(),
+ Document(fromjson("{time: 3, myMeta: {m1: 999, m2: 9999}, _id: 3}")));
+
+ // Second bucket
+ next = unpack->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ ASSERT_DOCUMENT_EQ(next.getDocument(), Document(fromjson("{time: 4, myMeta: null, _id: 4}")));
+
+ next = unpack->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ ASSERT_DOCUMENT_EQ(next.getDocument(), Document(fromjson("{time: 5, myMeta: null, _id: 5}")));
+
+ next = unpack->getNext();
+ ASSERT_TRUE(next.isAdvanced());
+ ASSERT_DOCUMENT_EQ(next.getDocument(), Document(fromjson("{time: 6, myMeta: null, _id: 6}")));
+
+ next = unpack->getNext();
+ ASSERT_TRUE(next.isEOF());
}
-TEST_F(InternalUnpackBucketExecTest, BucketUnpackerHandlesNullMetadata) {
+TEST_F(InternalUnpackBucketExecTest, BucketUnpackerHandlesMissingMetadata) {
auto expCtx = getExpCtx();
auto spec =
BSON("$_internalUnpackBucket"
@@ -607,12 +639,26 @@ TEST_F(InternalUnpackBucketExecTest, BucketUnpackerHandlesNullMetadata) {
<< kUserDefinedMetaName));
auto unpack = DocumentSourceInternalUnpackBucket::createFromBson(spec.firstElement(), expCtx);
- auto source =
- DocumentSourceMock::createForTest({"{meta: {'m1': 999, 'm2': 9999}, data: {_id: {'1':1, "
- "'0':2, '2': 3}, time: {'1':1, '0': 2, '2': 3}}}",
- "{meta: null, data: {_id: {'1':4, "
- "'0':5, '2':6}, time: {'1':4, '0': 5, '2': 6}}}"},
- expCtx);
+ auto source = DocumentSourceMock::createForTest(
+ {
+ R"(
+{
+ meta: {
+ 'm1': 999, 'm2': 9999
+ },
+ data: {
+ _id: {'1':1, '0':2, '2': 3},
+ time: {'1':1, '0': 2, '2': 3}
+ }
+})",
+ R"(
+{
+ data: {
+ _id: {'1':4, '0':5, '2':6},
+ time: {'1':4, '0': 5, '2': 6}
+ }
+})"},
+ expCtx);
unpack->setSource(source.get());
auto next = unpack->getNext();