summaryrefslogtreecommitdiff
path: root/jstests/core
diff options
context:
space:
mode:
Diffstat (limited to 'jstests/core')
-rw-r--r--jstests/core/apitest_db_profile_level.js32
-rw-r--r--jstests/core/arrayfind8.js10
-rw-r--r--jstests/core/bench_test2.js1
-rw-r--r--jstests/core/cover_null_queries.js16
-rw-r--r--jstests/core/field_name_empty.js39
-rw-r--r--jstests/core/profile_update.js27
-rw-r--r--jstests/core/timeseries/nondefault_collation.js12
-rw-r--r--jstests/core/timeseries/timeseries_index_partial.js25
-rw-r--r--jstests/core/timeseries/timeseries_index_use.js25
-rw-r--r--jstests/core/timeseries/timeseries_match_pushdown.js411
-rw-r--r--jstests/core/timeseries/timeseries_match_pushdown_with_project.js130
11 files changed, 671 insertions, 57 deletions
diff --git a/jstests/core/apitest_db_profile_level.js b/jstests/core/apitest_db_profile_level.js
index cc01b2914ce..f32b996f8ce 100644
--- a/jstests/core/apitest_db_profile_level.js
+++ b/jstests/core/apitest_db_profile_level.js
@@ -3,6 +3,7 @@
* @tags: [
* does_not_support_stepdowns,
* requires_profiling,
+ * requires_fcv_62,
* ]
*/
@@ -19,24 +20,23 @@ assert(db.setProfilingLevel, "setProfilingLevel");
// A test-specific database is used for profiler testing so as not to interfere with
// other tests that modify profiler level, when run in parallel.
-var profileLevelDB = db.getSiblingDB("apitest_db_profile_level");
+const profileLevelDB = db.getSiblingDB("apitest_db_profile_level");
-// Checks for the log that was expected to be created when profile level changed.
-function profilerChangeWasLogged({from, to} = {}) {
+// Checks the log for the expected change in profile level and applicable database.
+function profilerChangeWasLogged({from, to, db}) {
const globalLog = assert.commandWorked(profileLevelDB.adminCommand({getLog: 'global'}));
const fieldMatcher = {msg: "Profiler settings changed"};
- if (from && to) {
- const lines = [...findMatchingLogLines(globalLog.log, fieldMatcher)];
- return lines.find(line => line.match(new RegExp(/"from":{/.source + from.source)) &&
- line.match(new RegExp(/"to":{/.source + to.source)));
- } else {
- return findMatchingLogLine(globalLog.log, fieldMatcher);
- }
+ const lines = [...findMatchingLogLines(globalLog.log, fieldMatcher)];
+ const matches = lines.filter((line) => {
+ const attr = JSON.parse(line).attr;
+ return attr.from.level == from && attr.to.level == to && attr.db == db;
+ });
+ return matches.length ? matches : false;
}
profileLevelDB.getProfilingLevel();
-assert(!profilerChangeWasLogged({from: /"level":0/, to: /"level":-1/}),
+assert(!profilerChangeWasLogged({from: 0, to: -1, db: profileLevelDB}),
"Didn't expect anything to be logged");
assert.throws(() => {
@@ -45,27 +45,27 @@ assert.throws(() => {
profileLevelDB.setProfilingLevel(0);
assert(profileLevelDB.getProfilingLevel() == 0, "prof level 0");
-assert(profilerChangeWasLogged({from: /"level":0/, to: /"level":0/}),
+assert(profilerChangeWasLogged({from: 0, to: 0, db: profileLevelDB}),
"Didn't find expected log line");
profileLevelDB.setProfilingLevel(1);
assert(profileLevelDB.getProfilingLevel() == 1, "p1");
-assert(profilerChangeWasLogged({from: /"level":0/, to: /"level":1/}),
+assert(profilerChangeWasLogged({from: 0, to: 1, db: profileLevelDB}),
"Didn't find expected log line");
profileLevelDB.setProfilingLevel(2);
assert(profileLevelDB.getProfilingLevel() == 2, "p2");
-assert(profilerChangeWasLogged({from: /"level":1/, to: /"level":2/}),
+assert(profilerChangeWasLogged({from: 1, to: 2, db: profileLevelDB}),
"Didn't find expected log line");
profileLevelDB.setProfilingLevel(0);
assert(profileLevelDB.getProfilingLevel() == 0, "prof level 0");
-assert(profilerChangeWasLogged({from: /"level":2/, to: /"level":0/}),
+assert(profilerChangeWasLogged({from: 2, to: 0, db: profileLevelDB}),
"Didn't find expected log line");
assert.throws(() => {
profileLevelDB.setProfilingLevel(10);
});
// Check that didn't log an invalid profile level change.
-assert(!profilerChangeWasLogged({from: /"level":0/, to: /"level":10/}), "Didn't expect log line");
+assert(!profilerChangeWasLogged({from: 0, to: 10, db: profileLevelDB}), "Didn't expect log line");
})();
diff --git a/jstests/core/arrayfind8.js b/jstests/core/arrayfind8.js
index 87a3a8d701a..087410efab2 100644
--- a/jstests/core/arrayfind8.js
+++ b/jstests/core/arrayfind8.js
@@ -5,8 +5,9 @@
(function() {
"use strict";
-const coll = db.jstests_arrayfind8;
-coll.drop();
+const collNamePrefix = 'jstests_arrayfind8_';
+let collCount = 0;
+let coll;
// May be changed during the test.
let currentIndexSpec = {a: 1};
@@ -55,6 +56,8 @@ function checkMatch(bothMatch, elemMatch, nonElemMatch, standardQuery, elemMatch
* @param additionalConstraints - additional query parameters not generated from @param subQuery
*/
function checkQuery(subQuery, bothMatch, elemMatch, nonElemMatch, additionalConstraints) {
+ const collNameSuffix = collCount++;
+ coll = db.getCollection(collNamePrefix + 'noindex_' + collNameSuffix);
coll.drop();
additionalConstraints = additionalConstraints || {};
@@ -86,7 +89,8 @@ function checkQuery(subQuery, bothMatch, elemMatch, nonElemMatch, additionalCons
// Check matching and index bounds for a single key index.
- assert.eq(coll.drop(), true);
+ coll = db.getCollection(collNamePrefix + 'index_' + collNameSuffix);
+ coll.drop();
insertValueIfNotNull(bothMatch);
insertValueIfNotNull(elemMatch);
// The nonElemMatch document is not tested here, as it will often make the index multikey.
diff --git a/jstests/core/bench_test2.js b/jstests/core/bench_test2.js
index 4d03c3b5fef..d47e0020699 100644
--- a/jstests/core/bench_test2.js
+++ b/jstests/core/bench_test2.js
@@ -1,6 +1,7 @@
/**
* @tags: [
* uses_multiple_connections,
+ * uses_parallel_shell,
* ]
*/
(function() {
diff --git a/jstests/core/cover_null_queries.js b/jstests/core/cover_null_queries.js
index c17b9e71929..7054a16039d 100644
--- a/jstests/core/cover_null_queries.js
+++ b/jstests/core/cover_null_queries.js
@@ -11,6 +11,7 @@
load("jstests/aggregation/extras/utils.js"); // For arrayEq().
load("jstests/libs/analyze_plan.js"); // For getAggPlanStages() and getPlanStages().
+load("jstests/libs/clustered_collections/clustered_collection_util.js");
const coll = db.cover_null_queries;
coll.drop();
@@ -95,6 +96,15 @@ function validateCountAggCmdOutputAndPlan({filter, expectedStages, expectedCount
validateStages({cmdObj, expectedStages, isAgg: true});
}
+function getExpectedStagesIndexScanAndFetch(extraStages) {
+ const clustered = ClusteredCollectionUtil.areAllCollectionsClustered(db.getMongo());
+ const result = clustered ? {"CLUSTERED_IXSCAN": 1} : {"FETCH": 1, "IXSCAN": 1};
+ for (const stage in extraStages) {
+ result[stage] = extraStages[stage];
+ }
+ return result;
+}
+
assert.commandWorked(coll.createIndex({a: 1, _id: 1}));
// Verify count({a: null}) can be covered by an index. In the simplest case we can use two count
@@ -272,18 +282,18 @@ validateFindCmdOutputAndPlan({
validateSimpleCountCmdOutputAndPlan({
filter: {a: null, _id: 3},
expectedCount: 1,
- expectedStages: {"FETCH": 1, "IXSCAN": 1, "OR": 0, "COUNT_SCAN": 0}
+ expectedStages: getExpectedStagesIndexScanAndFetch({"OR": 0, "COUNT_SCAN": 0}),
});
validateCountAggCmdOutputAndPlan({
filter: {a: null, _id: 3},
expectedCount: 1,
- expectedStages: {"FETCH": 1, "IXSCAN": 1, "OR": 0, "COUNT_SCAN": 0},
+ expectedStages: getExpectedStagesIndexScanAndFetch({"OR": 0, "COUNT_SCAN": 0}),
});
validateFindCmdOutputAndPlan({
filter: {a: null, _id: 3},
projection: {_id: 1},
expectedOutput: [{_id: 3}],
- expectedStages: {"IXSCAN": 1, "FETCH": 1, "PROJECTION_SIMPLE": 1},
+ expectedStages: getExpectedStagesIndexScanAndFetch({"PROJECTION_SIMPLE": 1}),
});
// Verify that if the index is multikey and the query searches for null and empty array values, then
diff --git a/jstests/core/field_name_empty.js b/jstests/core/field_name_empty.js
index 3f3757ed5c2..260256de7c6 100644
--- a/jstests/core/field_name_empty.js
+++ b/jstests/core/field_name_empty.js
@@ -23,22 +23,53 @@ assert.commandWorked(coll.insertMany([
{_id: 7, x: {"": 3}},
{_id: 8, x: {"": [3]}},
{_id: 9, x: [{"": 3}]},
- {_id: 10, x: [{"": [3]}]}
+ {_id: 10, x: [{"": [3]}]},
+ {_id: 11, x: {"": [{"": 3}]}},
+ {_id: 12, x: {"": {y: 3}}},
+ {_id: 13, "": [1]},
]));
function runTest({filter, expected} = {}) {
const result = coll.find(filter).toArray();
- assertArrayEq({actual: result, expected: expected});
+ const explain = coll.explain("executionStats").find(filter).finish();
+ assertArrayEq({actual: result, expected: expected, extraErrorMsg: tojson(explain)});
}
-runTest({filter: {".": 1}, expected: [{_id: 1, "": {"": 1}}]});
+runTest({filter: {".": 1}, expected: [{_id: 1, "": {"": 1}}, {_id: 13, "": [1]}]});
runTest({filter: {"..": 1}, expected: [{_id: 2, "": {"": {"": 1}}}]});
runTest({filter: {"...": 1}, expected: [{_id: 3, "": {"": {"": {"": 1}}}}]});
-runTest({filter: {"": 1}, expected: [{_id: 0, "": 1}, {_id: 4, "": 1, a: 1}]});
+runTest({filter: {"": 1}, expected: [{_id: 0, "": 1}, {_id: 4, "": 1, a: 1}, {_id: 13, "": [1]}]});
runTest({filter: {"": 1, a: 1}, expected: [{_id: 4, "": 1, a: 1}]});
runTest({filter: {"": 1, a: 2}, expected: []});
runTest({
filter: {'x.': 3},
expected: [{_id: 6, x: [3]}, {_id: 7, x: {"": 3}}, {_id: 8, x: {"": [3]}}]
});
+runTest({filter: {'x..y': 3}, expected: [{_id: 12, x: {"": {y: 3}}}]});
+runTest({filter: {'x..': 3}, expected: [{_id: 8, x: {"": [3]}}, {_id: 10, x: [{"": [3]}]}]});
+runTest({
+ filter: {$and: [{'x.': 3}, {_id: {$lt: 8}}]},
+ expected: [{_id: 6, x: [3]}, {_id: 7, x: {"": 3}}]
+});
+runTest({
+ filter: {"x.": {$exists: false}},
+ expected: [
+ {_id: 0, "": 1},
+ {_id: 1, "": {"": 1}},
+ {_id: 2, "": {"": {"": 1}}},
+ {_id: 3, "": {"": {"": {"": 1}}}},
+ {_id: 4, "": 1, a: 1},
+ {_id: 5, x: 3},
+ {_id: 13, "": [1]},
+ ]
+});
+runTest({
+ filter: {x: {$elemMatch: {"": 3}}},
+ expected: [{_id: 9, x: [{"": 3}]}, {_id: 10, x: [{"": [3]}]}]
+});
+runTest({filter: {x: {$elemMatch: {"": {$type: "array"}}}}, expected: [{_id: 10, x: [{"": [3]}]}]});
+runTest({
+ filter: {"x.": {$elemMatch: {"": 3}}},
+ expected: [{_id: 9, x: [{"": 3}]}, {_id: 10, x: [{"": [3]}]}, {_id: 11, x: {"": [{"": 3}]}}]
+});
})();
diff --git a/jstests/core/profile_update.js b/jstests/core/profile_update.js
index deeeb8ee8a3..ebd0505e58d 100644
--- a/jstests/core/profile_update.js
+++ b/jstests/core/profile_update.js
@@ -100,12 +100,9 @@ assert.commandWorked(coll.update({_id: "new value", a: 4}, {$inc: {b: 1}}, {upse
profileObj = getLatestProfilerEntry(testDB);
const collectionIsClustered = ClusteredCollectionUtil.areAllCollectionsClustered(db.getMongo());
-// A clustered collection has no actual index on _id. While a bounded collection scan is in
-// principle an efficient option, the query planner only defaults to collection scan if no suitable
-// index is available.
-const expectedPlan = collectionIsClustered ? "IXSCAN { a: 1 }" : "IXSCAN { _id: 1 }";
-const expectedKeysExamined = collectionIsClustered ? 1 : 0;
-const expectedDocsExamined = expectedKeysExamined;
+const expectedPlan = collectionIsClustered ? "CLUSTERED_IXSCAN" : "IXSCAN { _id: 1 }";
+const expectedKeysExamined = 0;
+const expectedDocsExamined = collectionIsClustered ? 1 : 0;
const expectedKeysInserted = collectionIsClustered ? 1 : 2;
assert.eq(profileObj.command,
@@ -149,13 +146,17 @@ for (var i = 0; i < indices.length; i++) {
const profileObj = profiles[i];
const index = indices[i];
- // A clustered collection has no actual index on _id. While a bounded collection scan is in
- // principle an efficient option, the query planner only defaults to collection scan if no
- // suitable index is available.
- const expectedPlan = collectionIsClustered ? "IXSCAN { a: 1 }" : "IXSCAN { _id: 1 }";
- const expectedKeysExamined = collectionIsClustered ? 1 : 0;
- const expectedDocsExamined = expectedKeysExamined;
- const expectedKeysInserted = collectionIsClustered ? 1 : 2;
+ let expectedPlan = "IXSCAN { _id: 1 }";
+ let expectedKeysExamined = 0;
+ let expectedDocsExamined = 0;
+ let expectedKeysInserted = 2;
+
+ if (collectionIsClustered) {
+ expectedPlan = "CLUSTERED_IXSCAN";
+ expectedKeysExamined = 0;
+ expectedDocsExamined = (i + 1 == indices.length) ? 1 : 2;
+ expectedKeysInserted = 1;
+ }
assert.eq(
profileObj.command,
diff --git a/jstests/core/timeseries/nondefault_collation.js b/jstests/core/timeseries/nondefault_collation.js
index 307d40cbfc3..4e791a81dd3 100644
--- a/jstests/core/timeseries/nondefault_collation.js
+++ b/jstests/core/timeseries/nondefault_collation.js
@@ -44,7 +44,7 @@ const englishCollation = {
};
const simpleCollation = {
- collation: {locale: "simple"}
+ locale: "simple"
};
assert.commandWorked(db.createCollection(coll.getName(), {
@@ -127,17 +127,17 @@ assert.commandWorked(coll.insert(
// its metadata using simple collation. These tests confirm that queries on the indexed field using
// nondefault (simple) collation use the index. They also confirm that queries that don't involve
// strings but do use default collation, on indexed fields, also use the index.
-const nonDefaultCollationQuery = coll.find({meta: 2}, {collation: englishCollation}).explain();
+const nonDefaultCollationQuery = coll.find({meta: 2}).collation(englishCollation).explain();
assert(aggPlanHasStage(nonDefaultCollationQuery, "IXSCAN"), nonDefaultCollationQuery);
-const simpleNonDefaultCollationQuery = coll.find({meta: 2}, simpleCollation).explain();
+const simpleNonDefaultCollationQuery = coll.find({meta: 2}).collation(simpleCollation).explain();
assert(aggPlanHasStage(simpleNonDefaultCollationQuery, "IXSCAN"), simpleNonDefaultCollationQuery);
-const defaultCollationQuery = coll.find({meta: 1}, {collation: defaultCollation}).explain();
+const defaultCollationQuery = coll.find({meta: 1}).collation(defaultCollation).explain();
assert(aggPlanHasStage(defaultCollationQuery, "IXSCAN"), defaultCollationQuery);
// This test guarantees that the bucket's min/max matches the query's min/max regardless of
// collation.
-results = coll.find({value: {$gt: "4"}}, simpleCollation);
-assert.eq(4, results.itcount());
+results = coll.find({value: {$gt: "4"}}).collation(simpleCollation);
+assert.eq(1, results.itcount());
}());
diff --git a/jstests/core/timeseries/timeseries_index_partial.js b/jstests/core/timeseries/timeseries_index_partial.js
index 55ece446457..7d97209173e 100644
--- a/jstests/core/timeseries/timeseries_index_partial.js
+++ b/jstests/core/timeseries/timeseries_index_partial.js
@@ -97,10 +97,24 @@ assert.commandFailedWithCode(coll.createIndex({a: 1}, {partialFilterExpression:
// Test creating and using a partial index.
{
- // Make sure the query uses the {a: 1} index.
+ let ixscanInWinningPlan = 0;
+
+ // Make sure the {a: 1} index was considered for this query.
function checkPlan(predicate) {
const explain = coll.find(predicate).explain();
- const scan = getAggPlanStage(explain, 'IXSCAN');
+ let scan = getAggPlanStage(explain, 'IXSCAN');
+ // If scan is not present, check rejected plans
+ if (scan === null) {
+ const rejectedPlans = getRejectedPlans(getAggPlanStage(explain, "$cursor")["$cursor"]);
+ if (rejectedPlans.length === 1) {
+ const scans = getPlanStages(getRejectedPlan(rejectedPlans[0]), "IXSCAN");
+ if (scans.length === 1) {
+ scan = scans[0];
+ }
+ }
+ } else {
+ ixscanInWinningPlan++;
+ }
const indexes = buckets.getIndexes();
assert(scan,
"Expected an index scan for predicate: " + tojson(predicate) +
@@ -109,7 +123,7 @@ assert.commandFailedWithCode(coll.createIndex({a: 1}, {partialFilterExpression:
}
// Make sure the query results match a collection-scan plan.
function checkResults(predicate) {
- const result = coll.aggregate({$match: predicate}).toArray();
+ const result = coll.aggregate([{$match: predicate}], {hint: {a: 1}}).toArray();
const unindexed =
coll.aggregate([{$_internalInhibitOptimization: {}}, {$match: predicate}]).toArray();
assert.docEq(result, unindexed);
@@ -152,10 +166,6 @@ assert.commandFailedWithCode(coll.createIndex({a: 1}, {partialFilterExpression:
const t1 = ISODate('2000-01-01T00:00:01Z');
const t2 = ISODate('2000-01-01T00:00:02Z');
- // When the collection is sharded, there is an index on time that can win, instead of the
- // partial index. So only check the results in that case, not the plan.
- const check = FixtureHelpers.isSharded(buckets) ? checkResults : checkPlanAndResults;
-
assert.commandWorked(coll.dropIndex({a: 1}));
assert.commandWorked(
coll.createIndex({a: 1}, {partialFilterExpression: {[timeField]: {$lt: t1}}}));
@@ -184,6 +194,7 @@ assert.commandFailedWithCode(coll.createIndex({a: 1}, {partialFilterExpression:
assert.commandWorked(coll.dropIndex({a: 1}));
assert.sameMembers(coll.getIndexes(), extraIndexes);
assert.sameMembers(buckets.getIndexes(), extraBucketIndexes);
+ assert.gt(ixscanInWinningPlan, 0);
}
// Check that partialFilterExpression can use a mixture of metadata, time, and measurement fields,
diff --git a/jstests/core/timeseries/timeseries_index_use.js b/jstests/core/timeseries/timeseries_index_use.js
index b1106e4ead0..ebcea9d8e6b 100644
--- a/jstests/core/timeseries/timeseries_index_use.js
+++ b/jstests/core/timeseries/timeseries_index_use.js
@@ -58,8 +58,9 @@ const generateTest = (useHint) => {
/**
* Creates the index specified by the spec and options, then explains the query to ensure
- * that the created index is used. Runs the query and verifies that the expected number of
- * documents are matched. Finally, deletes the created index.
+ * that the created index is used or was considered by multi-planner.
+ * Runs the query and verifies that the expected number of documents are matched.
+ * Finally, deletes the created index.
*/
const testQueryUsesIndex = function(
filter, numMatches, indexSpec, indexOpts = {}, queryOpts = {}) {
@@ -75,9 +76,23 @@ const generateTest = (useHint) => {
assert.eq(numMatches, query.itcount());
const explain = query.explain();
- const ixscan = getAggPlanStage(explain, "IXSCAN");
- assert.neq(null, ixscan, tojson(explain));
- assert.eq("testIndexName", ixscan.indexName, tojson(ixscan));
+ if (useHint) {
+ const ixscan = getAggPlanStage(explain, "IXSCAN");
+ assert.neq(null, ixscan, tojson(explain));
+ assert.eq("testIndexName", ixscan.indexName, tojson(ixscan));
+ } else {
+ let ixscan = getAggPlanStage(explain, "IXSCAN");
+ // If ixscan is not present, check rejected plans
+ if (ixscan === null) {
+ const rejectedPlans =
+ getRejectedPlans(getAggPlanStage(explain, "$cursor")["$cursor"]);
+ assert.eq(1, rejectedPlans.length);
+ const ixscans = getPlanStages(getRejectedPlan(rejectedPlans[0]), "IXSCAN");
+ assert.eq(1, ixscans.length);
+ ixscan = ixscans[0];
+ }
+ assert.eq("testIndexName", ixscan.indexName, tojson(ixscan));
+ }
assert.commandWorked(coll.dropIndex("testIndexName"));
};
diff --git a/jstests/core/timeseries/timeseries_match_pushdown.js b/jstests/core/timeseries/timeseries_match_pushdown.js
new file mode 100644
index 00000000000..7fe13e878a6
--- /dev/null
+++ b/jstests/core/timeseries/timeseries_match_pushdown.js
@@ -0,0 +1,411 @@
+/**
+ * Tests that the $match stage followed by unpacking stage has been pushed down with correct
+ * predicates.
+ *
+ * @tags: [
+ * requires_timeseries,
+ * requires_fcv_62,
+ * does_not_support_stepdowns,
+ * directly_against_shardsvrs_incompatible,
+ * ]
+ */
+(function() {
+"use strict";
+
+load("jstests/libs/analyze_plan.js"); // For getAggPlanStages
+
+const coll = db.timeseries_match_pushdown;
+coll.drop();
+
+const timeField = 'time';
+const metaField = 'meta';
+const measureField = 'a';
+assert.commandWorked(db.createCollection(coll.getName(), {timeseries: {timeField, metaField}}));
+
+// Insert documents into the collection. The bucketing is designed so that some buckets match the
+// query entirely, some buckets match the query partially, and some with no matches.
+assert.commandWorked(coll.insert([
+ {[timeField]: ISODate('2022-01-01T00:00:01'), [measureField]: 1, [metaField]: 0},
+ {[timeField]: ISODate('2022-01-01T00:00:02'), [measureField]: 2, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:03'), [measureField]: 3, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:04'), [measureField]: 4, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:05'), [measureField]: 5, [metaField]: 2},
+ {[timeField]: ISODate('2022-01-01T00:00:06'), [measureField]: 6, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:07'), [measureField]: 7, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:08'), [measureField]: 8, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:09'), [measureField]: 9, [metaField]: 4},
+]));
+const aTime = ISODate('2022-01-01T00:00:03');
+const bTime = ISODate('2022-01-01T00:00:07');
+const bMeta = 3;
+const aMeasure = 3;
+
+/**
+ * Runs a $match query with the specified 'eventFilter' or a 'pipeline'.
+ * Assert the 'wholeBucketFilter' is attached correctly to the unpacking stage, and has the expected
+ * result 'expectedDocs'.
+ */
+const runTest = function({pipeline, eventFilter, wholeBucketFilter, expectedDocs}) {
+ if (!pipeline) {
+ pipeline = [{$match: eventFilter}];
+ }
+ const explain = assert.commandWorked(coll.explain().aggregate(pipeline));
+ const unpackStages = getAggPlanStages(explain, '$_internalUnpackBucket');
+ assert.eq(1,
+ unpackStages.length,
+ "Should only have a single $_internalUnpackBucket stage: " + tojson(explain));
+ const unpackStage = unpackStages[0].$_internalUnpackBucket;
+ assert.docEq(unpackStage.eventFilter, eventFilter, "Incorrect eventFilter: " + tojson(explain));
+ if (wholeBucketFilter) {
+ assert.docEq(unpackStage.wholeBucketFilter,
+ wholeBucketFilter,
+ "Incorrect wholeBucketFilter: " + tojson(explain));
+ } else {
+ assert(!unpackStage.wholeBucketFilter, "Incorrect wholeBucketFilter: " + tojson(explain));
+ }
+
+ const docs = coll.aggregate([...pipeline, {$sort: {time: 1}}]).toArray();
+ assert.eq(docs.length, expectedDocs.length, "Incorrect docs: " + tojson(docs));
+ expectedDocs.forEach((doc, i) => {
+ assert.docEq(doc, expectedDocs[i], "Incorrect docs: " + tojson(docs));
+ });
+};
+
+const minTimeField = `control.min.${timeField}`;
+const maxTimeField = `control.max.${timeField}`;
+
+// $gt on time
+runTest({
+ eventFilter: {[timeField]: {$gt: aTime}},
+ wholeBucketFilter: {[minTimeField]: {$gt: aTime}},
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:04'), [measureField]: 4, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:05'), [measureField]: 5, [metaField]: 2},
+ {[timeField]: ISODate('2022-01-01T00:00:06'), [measureField]: 6, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:07'), [measureField]: 7, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:08'), [measureField]: 8, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:09'), [measureField]: 9, [metaField]: 4},
+ ],
+});
+
+// $gt on measurement
+runTest({
+ eventFilter: {[measureField]: {$gt: aMeasure}},
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:04'), [measureField]: 4, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:05'), [measureField]: 5, [metaField]: 2},
+ {[timeField]: ISODate('2022-01-01T00:00:06'), [measureField]: 6, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:07'), [measureField]: 7, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:08'), [measureField]: 8, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:09'), [measureField]: 9, [metaField]: 4},
+ ],
+});
+
+// $gt in $expr on time
+runTest({
+ pipeline: [{$match: {$expr: {$gt: [`$${timeField}`, {$const: aTime}]}}}],
+ eventFilter: {
+ $and: [
+ {[timeField]: {$_internalExprGt: aTime}},
+ {$expr: {$gt: [`$${timeField}`, {$const: aTime}]}},
+ ]
+ },
+ wholeBucketFilter: {
+ $and: [
+ {[minTimeField]: {$_internalExprGt: aTime}},
+ {[minTimeField]: {$_internalExprGt: aTime}},
+ ]
+ },
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:04'), [measureField]: 4, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:05'), [measureField]: 5, [metaField]: 2},
+ {[timeField]: ISODate('2022-01-01T00:00:06'), [measureField]: 6, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:07'), [measureField]: 7, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:08'), [measureField]: 8, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:09'), [measureField]: 9, [metaField]: 4},
+ ],
+});
+
+// $gte on time
+runTest({
+ eventFilter: {[timeField]: {$gte: aTime}},
+ wholeBucketFilter: {[minTimeField]: {$gte: aTime}},
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:03'), [measureField]: 3, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:04'), [measureField]: 4, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:05'), [measureField]: 5, [metaField]: 2},
+ {[timeField]: ISODate('2022-01-01T00:00:06'), [measureField]: 6, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:07'), [measureField]: 7, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:08'), [measureField]: 8, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:09'), [measureField]: 9, [metaField]: 4},
+ ],
+});
+
+// $gte on measurement
+runTest({
+ eventFilter: {[measureField]: {$gte: aMeasure}},
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:03'), [measureField]: 3, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:04'), [measureField]: 4, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:05'), [measureField]: 5, [metaField]: 2},
+ {[timeField]: ISODate('2022-01-01T00:00:06'), [measureField]: 6, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:07'), [measureField]: 7, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:08'), [measureField]: 8, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:09'), [measureField]: 9, [metaField]: 4},
+ ],
+});
+
+// $gte in $expr on time
+runTest({
+ pipeline: [{$match: {$expr: {$gte: [`$${timeField}`, {$const: aTime}]}}}],
+ eventFilter: {
+ $and: [
+ {[timeField]: {$_internalExprGte: aTime}},
+ {$expr: {$gte: [`$${timeField}`, {$const: aTime}]}},
+ ]
+ },
+ wholeBucketFilter: {
+ $and: [
+ {[minTimeField]: {$_internalExprGte: aTime}},
+ {[minTimeField]: {$_internalExprGte: aTime}},
+ ]
+ },
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:03'), [measureField]: 3, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:04'), [measureField]: 4, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:05'), [measureField]: 5, [metaField]: 2},
+ {[timeField]: ISODate('2022-01-01T00:00:06'), [measureField]: 6, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:07'), [measureField]: 7, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:08'), [measureField]: 8, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:09'), [measureField]: 9, [metaField]: 4},
+ ],
+});
+
+// $lt on time
+runTest({
+ eventFilter: {[timeField]: {$lt: aTime}},
+ wholeBucketFilter: {[maxTimeField]: {$lt: aTime}},
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:01'), [measureField]: 1, [metaField]: 0},
+ {[timeField]: ISODate('2022-01-01T00:00:02'), [measureField]: 2, [metaField]: 1},
+ ],
+});
+
+// $lt on measurement
+runTest({
+ eventFilter: {[measureField]: {$lt: aMeasure}},
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:01'), [measureField]: 1, [metaField]: 0},
+ {[timeField]: ISODate('2022-01-01T00:00:02'), [measureField]: 2, [metaField]: 1},
+ ],
+});
+
+// $lt in $expr on time
+runTest({
+ pipeline: [{$match: {$expr: {$lt: [`$${timeField}`, {$const: aTime}]}}}],
+ eventFilter: {
+ $and: [
+ {[timeField]: {$_internalExprLt: aTime}},
+ {$expr: {$lt: [`$${timeField}`, {$const: aTime}]}},
+ ]
+ },
+ wholeBucketFilter: {
+ $and: [
+ {[maxTimeField]: {$_internalExprLt: aTime}},
+ {[maxTimeField]: {$_internalExprLt: aTime}},
+ ]
+ },
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:01'), [measureField]: 1, [metaField]: 0},
+ {[timeField]: ISODate('2022-01-01T00:00:02'), [measureField]: 2, [metaField]: 1},
+ ],
+});
+
+// $lte on time
+runTest({
+ eventFilter: {[timeField]: {$lte: aTime}},
+ wholeBucketFilter: {[maxTimeField]: {$lte: aTime}},
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:01'), [measureField]: 1, [metaField]: 0},
+ {[timeField]: ISODate('2022-01-01T00:00:02'), [measureField]: 2, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:03'), [measureField]: 3, [metaField]: 1},
+ ],
+});
+
+// $lte in $expr on time
+runTest({
+ pipeline: [{$match: {$expr: {$lte: [`$${timeField}`, {$const: aTime}]}}}],
+ eventFilter: {
+ $and: [
+ {[timeField]: {$_internalExprLte: aTime}},
+ {$expr: {$lte: [`$${timeField}`, {$const: aTime}]}},
+ ]
+ },
+ wholeBucketFilter: {
+ $and: [
+ {[maxTimeField]: {$_internalExprLte: aTime}},
+ {[maxTimeField]: {$_internalExprLte: aTime}},
+ ]
+ },
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:01'), [measureField]: 1, [metaField]: 0},
+ {[timeField]: ISODate('2022-01-01T00:00:02'), [measureField]: 2, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:03'), [measureField]: 3, [metaField]: 1},
+ ],
+});
+
+// $lte on measurement
+runTest({
+ eventFilter: {[measureField]: {$lte: aMeasure}},
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:01'), [measureField]: 1, [metaField]: 0},
+ {[timeField]: ISODate('2022-01-01T00:00:02'), [measureField]: 2, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:03'), [measureField]: 3, [metaField]: 1},
+ ],
+});
+
+// $eq on time
+runTest({
+ eventFilter: {[timeField]: {$eq: aTime}},
+ wholeBucketFilter: {$and: [{[minTimeField]: {$eq: aTime}}, {[maxTimeField]: {$eq: aTime}}]},
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:03'), [measureField]: 3, [metaField]: 1},
+ ],
+});
+
+// $eq in $expr on time
+runTest({
+ pipeline: [{$match: {$expr: {$eq: [`$${timeField}`, {$const: aTime}]}}}],
+ eventFilter: {
+ $and: [
+ {[timeField]: {$_internalExprEq: aTime}},
+ {$expr: {$eq: [`$${timeField}`, {$const: aTime}]}},
+ ]
+ },
+ wholeBucketFilter: {
+ $and: [
+ {[minTimeField]: {$_internalExprEq: aTime}},
+ {[maxTimeField]: {$_internalExprEq: aTime}},
+ {[minTimeField]: {$_internalExprEq: aTime}},
+ {[maxTimeField]: {$_internalExprEq: aTime}},
+ ]
+ },
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:03'), [measureField]: 3, [metaField]: 1},
+ ],
+});
+
+// $eq on measurement
+runTest({
+ eventFilter: {[measureField]: {$eq: aMeasure}},
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:03'), [measureField]: 3, [metaField]: 1},
+ ],
+});
+
+// $and on time
+runTest({
+ eventFilter: {$and: [{[timeField]: {$gt: aTime}}, {[timeField]: {$lt: bTime}}]},
+ wholeBucketFilter: {
+ $and: [
+ {[minTimeField]: {$gt: aTime}},
+ {[maxTimeField]: {$lt: bTime}},
+ ]
+ },
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:04'), [measureField]: 4, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:05'), [measureField]: 5, [metaField]: 2},
+ {[timeField]: ISODate('2022-01-01T00:00:06'), [measureField]: 6, [metaField]: 3},
+ ],
+});
+
+// $or on time
+runTest({
+ eventFilter: {$or: [{[timeField]: {$lte: aTime}}, {[timeField]: {$gte: bTime}}]},
+ wholeBucketFilter: {
+ $or: [
+ {[maxTimeField]: {$lte: aTime}},
+ {[minTimeField]: {$gte: bTime}},
+ ]
+ },
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:01'), [measureField]: 1, [metaField]: 0},
+ {[timeField]: ISODate('2022-01-01T00:00:02'), [measureField]: 2, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:03'), [measureField]: 3, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:07'), [measureField]: 7, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:08'), [measureField]: 8, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:09'), [measureField]: 9, [metaField]: 4},
+ ],
+});
+
+// $match on time and meta
+runTest({
+ pipeline: [{$match: {$and: [{[timeField]: {$gt: aTime}}, {[metaField]: {$lte: bMeta}}]}}],
+ eventFilter: {[timeField]: {$gt: aTime}},
+ wholeBucketFilter: {
+ [minTimeField]: {$gt: aTime},
+ },
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:04'), [measureField]: 4, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:05'), [measureField]: 5, [metaField]: 2},
+ {[timeField]: ISODate('2022-01-01T00:00:06'), [measureField]: 6, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:07'), [measureField]: 7, [metaField]: 3},
+ {[timeField]: ISODate('2022-01-01T00:00:08'), [measureField]: 8, [metaField]: 3},
+ ],
+});
+
+// $match on time or meta
+runTest({
+ eventFilter: {$or: [{[timeField]: {$lte: aTime}}, {[metaField]: {$gt: bMeta}}]},
+ wholeBucketFilter: {
+ $or: [
+ {[maxTimeField]: {$lte: aTime}},
+ {[metaField]: {$gt: bMeta}},
+ ]
+ },
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:01'), [measureField]: 1, [metaField]: 0},
+ {[timeField]: ISODate('2022-01-01T00:00:02'), [measureField]: 2, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:03'), [measureField]: 3, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:09'), [measureField]: 9, [metaField]: 4},
+ ],
+});
+
+// double $match
+runTest({
+ pipeline: [{$match: {[timeField]: {$gt: aTime}}}, {$match: {[timeField]: {$lt: bTime}}}],
+ eventFilter: {$and: [{[timeField]: {$gt: aTime}}, {[timeField]: {$lt: bTime}}]},
+ wholeBucketFilter: {
+ $and: [
+ {[minTimeField]: {$gt: aTime}},
+ {[maxTimeField]: {$lt: bTime}},
+ ]
+ },
+ expectedDocs: [
+ {[timeField]: ISODate('2022-01-01T00:00:04'), [measureField]: 4, [metaField]: 1},
+ {[timeField]: ISODate('2022-01-01T00:00:05'), [measureField]: 5, [metaField]: 2},
+ {[timeField]: ISODate('2022-01-01T00:00:06'), [measureField]: 6, [metaField]: 3},
+ ],
+});
+
+// triple $match
+runTest({
+ pipeline: [
+ {$match: {[timeField]: {$gt: aTime}}},
+ {$match: {[timeField]: {$lt: bTime}}},
+ {$match: {[timeField]: {$lt: aTime}}},
+ ],
+ eventFilter: {
+ $and:
+ [{[timeField]: {$gt: aTime}}, {[timeField]: {$lt: bTime}}, {[timeField]: {$lt: aTime}}]
+ },
+ wholeBucketFilter: {
+ $and: [
+ {[minTimeField]: {$gt: aTime}},
+ {[maxTimeField]: {$lt: bTime}},
+ {[maxTimeField]: {$lt: aTime}},
+ ]
+ },
+ expectedDocs: [],
+});
+})();
diff --git a/jstests/core/timeseries/timeseries_match_pushdown_with_project.js b/jstests/core/timeseries/timeseries_match_pushdown_with_project.js
new file mode 100644
index 00000000000..f63a4be214d
--- /dev/null
+++ b/jstests/core/timeseries/timeseries_match_pushdown_with_project.js
@@ -0,0 +1,130 @@
+/**
+ * Tests that the unpacking stage has correct unpacking behaviour when $match is pushed into it.
+ *
+ * @tags: [
+ * requires_timeseries,
+ * requires_fcv_62,
+ * does_not_support_stepdowns,
+ * directly_against_shardsvrs_incompatible,
+ * ]
+ */
+(function() {
+"use strict";
+
+load("jstests/libs/analyze_plan.js"); // For getAggPlanStages
+
+const coll = db.timeseries_match_pushdown_with_project;
+coll.drop();
+
+const timeField = 'time';
+const metaField = 'meta';
+assert.commandWorked(db.createCollection(coll.getName(), {timeseries: {timeField, metaField}}));
+
+const aTime = ISODate('2022-01-01T00:00:00');
+assert.commandWorked(coll.insert([
+ {[timeField]: aTime, a: 1, b: 1, _id: 1},
+ {[timeField]: aTime, a: 2, b: 2, _id: 2},
+ {[timeField]: aTime, a: 3, b: 3, _id: 3},
+ {[timeField]: aTime, a: 4, b: 4, _id: 4},
+ {[timeField]: aTime, a: 5, b: 5, _id: 5},
+ {[timeField]: aTime, a: 6, b: 6, _id: 6},
+ {[timeField]: aTime, a: 7, b: 7, _id: 7},
+ {[timeField]: aTime, a: 8, b: 8, _id: 8},
+ {[timeField]: aTime, a: 9, b: 9, _id: 9},
+]));
+
+/**
+ * Runs a 'pipeline', asserts the bucket unpacking 'behaviour' (either include or exclude) is
+ * expected.
+ */
+const runTest = function({pipeline, behaviour, expectedDocs}) {
+ const explain = assert.commandWorked(coll.explain().aggregate(pipeline));
+ const unpackStages = getAggPlanStages(explain, '$_internalUnpackBucket');
+ assert.eq(1,
+ unpackStages.length,
+ "Should only have a single $_internalUnpackBucket stage: " + tojson(explain));
+ const unpackStage = unpackStages[0].$_internalUnpackBucket;
+ if (behaviour.include) {
+ assert(unpackStage.include,
+ "Unpacking stage must have 'include' behaviour: " + tojson(explain));
+ assert.sameMembers(behaviour.include, unpackStage.include);
+ }
+ if (behaviour.exclude) {
+ assert(unpackStage.exclude,
+ "Unpacking stage must have 'exclude' behaviour: " + tojson(explain));
+ assert.sameMembers(behaviour.exclude, unpackStage.exclude);
+ }
+
+ const docs = coll.aggregate([...pipeline, {$sort: {a: 1, b: 1, _id: 1}}]).toArray();
+ assert.eq(docs.length, expectedDocs.length, "Incorrect docs: " + tojson(docs));
+ expectedDocs.forEach((doc, i) => {
+ assert.docEq(doc, expectedDocs[i], "Incorrect docs: " + tojson(docs));
+ });
+};
+
+runTest({
+ pipeline: [{$match: {a: {$gt: 5}}}, {$project: {b: 1}}],
+ behaviour: {include: ['_id', 'a', 'b']},
+ expectedDocs: [
+ {a: 6, b: 6, _id: 6},
+ {a: 7, b: 7, _id: 7},
+ {a: 8, b: 8, _id: 8},
+ {a: 9, b: 9, _id: 9},
+ ],
+});
+
+runTest({
+ pipeline: [{$match: {a: {$gt: 5}}}, {$project: {_id: 0, b: 1}}],
+ behaviour: {include: ['a', 'b']},
+ expectedDocs: [
+ {a: 6, b: 6},
+ {a: 7, b: 7},
+ {a: 8, b: 8},
+ {a: 9, b: 9},
+ ],
+});
+
+runTest({
+ pipeline: [{$match: {a: {$gt: 5}}}, {$project: {a: 1}}],
+ behaviour: {include: ['_id', 'a']},
+ expectedDocs: [
+ {a: 6, _id: 6},
+ {a: 7, _id: 7},
+ {a: 8, _id: 8},
+ {a: 9, _id: 9},
+ ],
+});
+
+runTest({
+ pipeline: [{$match: {a: {$gt: 5}}}, {$project: {_id: 0, a: 1}}],
+ behaviour: {include: ['a']},
+ expectedDocs: [
+ {a: 6},
+ {a: 7},
+ {a: 8},
+ {a: 9},
+ ],
+});
+
+runTest({
+ pipeline: [{$match: {a: {$gt: 5}}}, {$project: {a: 0}}],
+ behaviour: {exclude: []},
+ expectedDocs: [
+ {[timeField]: aTime, b: 6, _id: 6},
+ {[timeField]: aTime, b: 7, _id: 7},
+ {[timeField]: aTime, b: 8, _id: 8},
+ {[timeField]: aTime, b: 9, _id: 9},
+ ],
+});
+
+runTest({
+ pipeline: [{$match: {a: {$gt: 5}}}, {$project: {b: 0}}],
+ behaviour: {exclude: []},
+ expectedDocs: [
+ {[timeField]: aTime, a: 6, _id: 6},
+ {[timeField]: aTime, a: 7, _id: 7},
+ {[timeField]: aTime, a: 8, _id: 8},
+ {[timeField]: aTime, a: 9, _id: 9},
+ ],
+});
+})();