summaryrefslogtreecommitdiff
path: root/jstests
diff options
context:
space:
mode:
authorMihai Andrei <mihai.andrei@mongodb.com>2023-03-03 15:50:27 +0000
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2023-03-03 22:46:18 +0000
commit5a27856c9c24e138738a3f54b5a78b9d5f1656b8 (patch)
tree0177c8310f40ccf2c41d8f4c7349fd132e6978f2 /jstests
parent96763fa1fef7faa2513afd2618d4d039ee70a6fe (diff)
downloadmongo-5a27856c9c24e138738a3f54b5a78b9d5f1656b8.tar.gz
SERVER-74262 Check IET when constructing set of equalities during sort analysis
(cherry picked from commit db5b677ff84361abca83ff60f7c62ee7668df951)
Diffstat (limited to 'jstests')
-rw-r--r--jstests/core/query/sbe_plan_cache_autoparameterize_ixscan.js91
-rw-r--r--jstests/libs/analyze_plan.js37
-rw-r--r--jstests/noPassthrough/sbe_plan_cache_size_metric.js12
3 files changed, 121 insertions, 19 deletions
diff --git a/jstests/core/query/sbe_plan_cache_autoparameterize_ixscan.js b/jstests/core/query/sbe_plan_cache_autoparameterize_ixscan.js
new file mode 100644
index 00000000000..4280d54a700
--- /dev/null
+++ b/jstests/core/query/sbe_plan_cache_autoparameterize_ixscan.js
@@ -0,0 +1,91 @@
+/**
+ * Tests that auto-parameterized index scan plans are correctly stored in the SBE plan cache, and
+ * that they can be correctly recovered from the cache with new parameter values.
+ *
+ * @tags: [
+ * not_allowed_with_security_token,
+ * assumes_read_concern_unchanged,
+ * assumes_read_preference_unchanged,
+ * assumes_unsharded_collection,
+ * does_not_support_stepdowns,
+ * # The SBE plan cache was enabled by default in 6.3.
+ * requires_fcv_63,
+ * # Plan cache state is node-local and will not get migrated alongside tenant data.
+ * tenant_migration_incompatible,
+ * cqf_incompatible,
+ * ]
+ */
+(function() {
+"use strict";
+
+load("jstests/libs/sbe_util.js"); // For 'checkSBEEnabled'.
+load("jstests/libs/analyze_plan.js"); // For 'getQueryHashFromExplain'.
+
+// This test is specifically verifying the behavior of the SBE plan cache, which is only enabled
+// when SBE is enabled.
+if (!checkSBEEnabled(db)) {
+ jsTestLog("Skipping test because SBE is not enabled");
+ return;
+}
+
+const coll = db[jsTestName()];
+coll.drop();
+
+// Set up the collection with an index and a set of documents.
+assert.commandWorked(coll.createIndex({a: 1}));
+assert.commandWorked(coll.insertMany([{_id: 1, a: 1}, {_id: 2, a: 2}, {_id: 3, a: 3}]));
+const filter1 = {
+ a: {$gte: 2, $lte: 2}
+};
+const filter2 = {
+ a: {$gte: 1, $lte: 2}
+};
+const sortPattern = {
+ a: -1
+};
+
+// Create a cache entry using 'filter1'.
+assert.eq(0, coll.getPlanCache().list().length, "Expected 0 cache entries");
+const filter1Result = coll.find(filter1).sort(sortPattern).toArray();
+const expectedFilter1Result = [{_id: 2, a: 2}];
+assert.eq(expectedFilter1Result, filter1Result);
+const cacheEntries = coll.getPlanCache().list();
+assert.eq(1, cacheEntries.length, cacheEntries);
+const cacheEntry = cacheEntries[0];
+
+// Verify that our cache entry is pinned and active.
+assert(cacheEntry.isPinned, cacheEntry);
+assert(cacheEntry.isActive, cacheEntry);
+
+// Capture the results for 'filter2' and verify that it used the same plan cache entry as 'filter1'.
+const cacheResults = coll.find(filter2).sort(sortPattern).toArray();
+const expectedFilter2Result = [{_id: 2, a: 2}, {_id: 1, a: 1}];
+assert.eq(cacheResults, expectedFilter2Result);
+
+// There should still be exactly one plan cache entry.
+assert.eq(1, coll.getPlanCache().list().length, cacheEntries);
+
+// The plan cache key and the query hashes of both queries should match.
+const explain = coll.find(filter2).sort(sortPattern).explain();
+const planCacheKey = cacheEntry.planCacheKey;
+assert.neq(null, planCacheKey, cacheEntry);
+assert.eq(planCacheKey, getPlanCacheKeyFromExplain(explain, db), explain);
+
+const queryHash = cacheEntry.queryHash;
+assert.neq(null, queryHash, cacheEntry);
+assert.eq(queryHash, getQueryHashFromExplain(explain, db), explain);
+
+// Clear the plan cache, and run 'filter2' again. This time, verify that we create a cache entry
+// with the same planCacheKey and queryHash as before.
+coll.getPlanCache().clear();
+assert.eq(0, coll.getPlanCache().list().length, "Expected 0 cache entries");
+const results = coll.find(filter2).sort(sortPattern).toArray();
+const newCacheEntries = coll.getPlanCache().list();
+assert.eq(1, newCacheEntries.length, "Expected 1 cache entry");
+const newCacheEntry = newCacheEntries[0];
+assert.eq(newCacheEntry.planCacheKey, planCacheKey, newCacheEntry);
+assert.eq(newCacheEntry.queryHash, queryHash, newCacheEntry);
+
+// The query should also return the same results as before.
+assert.eq(results, cacheResults);
+}());
diff --git a/jstests/libs/analyze_plan.js b/jstests/libs/analyze_plan.js
index b16d021fa71..625a9201036 100644
--- a/jstests/libs/analyze_plan.js
+++ b/jstests/libs/analyze_plan.js
@@ -524,17 +524,36 @@ function assertStagesForExplainOfCommand({coll, cmdObj, expectedStages, stagesNo
}
/**
- * Get the "planCacheKey" from the explain result.
+ * Utility to obtain a value from 'explainRes' using 'getValueCallback'.
+ */
+function getFieldValueFromExplain(explainRes, getValueCallback) {
+ assert(explainRes.hasOwnProperty("queryPlanner"), explainRes);
+ const plannerOutput = explainRes.queryPlanner;
+ const fieldValue = getValueCallback(plannerOutput);
+ assert.eq(typeof fieldValue, "string");
+ return fieldValue;
+}
+
+/**
+ * Get the 'planCacheKey' from 'explainRes'.
*/
function getPlanCacheKeyFromExplain(explainRes, db) {
- const hash = FixtureHelpers.isMongos(db) &&
- explainRes.queryPlanner.hasOwnProperty("winningPlan") &&
- explainRes.queryPlanner.winningPlan.hasOwnProperty("shards")
- ? explainRes.queryPlanner.winningPlan.shards[0].planCacheKey
- : explainRes.queryPlanner.planCacheKey;
- assert.eq(typeof hash, "string");
-
- return hash;
+ return getFieldValueFromExplain(explainRes, function(plannerOutput) {
+ return FixtureHelpers.isMongos(db) && plannerOutput.hasOwnProperty("winningPlan") &&
+ plannerOutput.winningPlan.hasOwnProperty("shards")
+ ? plannerOutput.winningPlan.shards[0].planCacheKey
+ : plannerOutput.planCacheKey;
+ });
+}
+
+/**
+ * Get the 'queryHash' from 'explainRes'.
+ */
+function getQueryHashFromExplain(explainRes, db) {
+ return getFieldValueFromExplain(explainRes, function(plannerOutput) {
+ return FixtureHelpers.isMongos(db) ? plannerOutput.winningPlan.shards[0].queryHash
+ : plannerOutput.queryHash;
+ });
}
/**
diff --git a/jstests/noPassthrough/sbe_plan_cache_size_metric.js b/jstests/noPassthrough/sbe_plan_cache_size_metric.js
index 1eb667754e0..102a9717270 100644
--- a/jstests/noPassthrough/sbe_plan_cache_size_metric.js
+++ b/jstests/noPassthrough/sbe_plan_cache_size_metric.js
@@ -16,8 +16,8 @@
(function() {
"use strict";
-load("jstests/libs/sbe_util.js"); // For checkSBEEnabled.
-load('jstests/libs/fixture_helpers.js'); // For FixtureHelpers.
+load("jstests/libs/sbe_util.js"); // For 'checkSBEEnabled()'.
+load("jstests/libs/analyze_plan.js"); // For 'getQueryHashFromExplain()'.
const conn = MongoRunner.runMongod();
assert.neq(conn, null, "mongod failed to start");
@@ -33,14 +33,6 @@ function getCacheEntriesByQueryHashKey(coll, queryHash) {
return coll.aggregate([{$planCacheStats: {}}, {$match: {queryHash}}]).toArray();
}
-function getQueryHashFromExplain(explainRes) {
- const hash = FixtureHelpers.isMongos(db)
- ? explainRes.queryPlanner.winningPlan.shards[0].queryHash
- : explainRes.queryPlanner.queryHash;
- assert.eq(typeof (hash), "string");
- return hash;
-}
-
function getPlanCacheSize() {
return db.serverStatus().metrics.query.planCacheTotalSizeEstimateBytes;
}