summaryrefslogtreecommitdiff
path: root/src/mongo/db/commands
diff options
context:
space:
mode:
authorBen Shteinfeld <ben.shteinfeld@mongodb.com>2022-08-25 21:05:13 +0000
committerEvergreen Agent <no-reply@evergreen.mongodb.com>2022-08-25 22:29:05 +0000
commit7936a08758813938df3653ca63391b822612f238 (patch)
treedefb93784725319b596febd5480907affdb15b57 /src/mongo/db/commands
parent42aef32217428c8d349ceb80ef62d077eafb835e (diff)
downloadmongo-7936a08758813938df3653ca63391b822612f238.tar.gz
SERVER-62407 Translate find queries directly to ABT
Diffstat (limited to 'src/mongo/db/commands')
-rw-r--r--src/mongo/db/commands/SConscript2
-rw-r--r--src/mongo/db/commands/cqf/cqf_aggregate.cpp529
-rw-r--r--src/mongo/db/commands/cqf/cqf_aggregate.h45
-rw-r--r--src/mongo/db/commands/cqf/cqf_command_utils.cpp752
-rw-r--r--src/mongo/db/commands/cqf/cqf_command_utils.h80
-rw-r--r--src/mongo/db/commands/find_cmd.cpp15
-rw-r--r--src/mongo/db/commands/run_aggregate.cpp12
7 files changed, 16 insertions, 1419 deletions
diff --git a/src/mongo/db/commands/SConscript b/src/mongo/db/commands/SConscript
index 56fc590f96f..e83f7356c54 100644
--- a/src/mongo/db/commands/SConscript
+++ b/src/mongo/db/commands/SConscript
@@ -338,8 +338,6 @@ env.Library(
source=[
"analyze_cmd.cpp",
"count_cmd.cpp",
- "cqf/cqf_aggregate.cpp",
- "cqf/cqf_command_utils.cpp",
"create_command.cpp",
"create_indexes.cpp",
"current_op.cpp",
diff --git a/src/mongo/db/commands/cqf/cqf_aggregate.cpp b/src/mongo/db/commands/cqf/cqf_aggregate.cpp
deleted file mode 100644
index b7d1717b8f7..00000000000
--- a/src/mongo/db/commands/cqf/cqf_aggregate.cpp
+++ /dev/null
@@ -1,529 +0,0 @@
-/**
- * Copyright (C) 2022-present MongoDB, Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the Server Side Public License, version 1,
- * as published by MongoDB, Inc.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * Server Side Public License for more details.
- *
- * You should have received a copy of the Server Side Public License
- * along with this program. If not, see
- * <http://www.mongodb.com/licensing/server-side-public-license>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the Server Side Public License in all respects for
- * all of the code used other than as permitted herein. If you modify file(s)
- * with this exception, you may extend this exception to your version of the
- * file(s), but you are not obligated to do so. If you do not wish to do so,
- * delete this exception statement from your version. If you delete this
- * exception statement from all source files in the program, then also delete
- * it in the license file.
- */
-
-#include "mongo/db/commands/cqf/cqf_aggregate.h"
-
-#include "mongo/db/commands/cqf/cqf_command_utils.h"
-#include "mongo/db/curop.h"
-#include "mongo/db/exec/sbe/abt/abt_lower.h"
-#include "mongo/db/pipeline/abt/document_source_visitor.h"
-#include "mongo/db/pipeline/abt/match_expression_visitor.h"
-#include "mongo/db/query/ce/ce_histogram.h"
-#include "mongo/db/query/ce/ce_sampling.h"
-#include "mongo/db/query/ce/collection_statistics.h"
-#include "mongo/db/query/ce_mode_parameter.h"
-#include "mongo/db/query/optimizer/cascades/ce_heuristic.h"
-#include "mongo/db/query/optimizer/cascades/cost_derivation.h"
-#include "mongo/db/query/optimizer/explain.h"
-#include "mongo/db/query/optimizer/node.h"
-#include "mongo/db/query/optimizer/opt_phase_manager.h"
-#include "mongo/db/query/plan_executor_factory.h"
-#include "mongo/db/query/query_knobs_gen.h"
-#include "mongo/db/query/query_planner_params.h"
-#include "mongo/db/query/sbe_stage_builder.h"
-#include "mongo/db/query/yield_policy_callbacks_impl.h"
-#include "mongo/logv2/log.h"
-#include "mongo/logv2/log_attr.h"
-
-#define MONGO_LOGV2_DEFAULT_COMPONENT ::mongo::logv2::LogComponent::kQuery
-
-namespace mongo {
-
-using namespace optimizer;
-
-static opt::unordered_map<std::string, optimizer::IndexDefinition> buildIndexSpecsOptimizer(
- boost::intrusive_ptr<ExpressionContext> expCtx,
- OperationContext* opCtx,
- const CollectionPtr& collection,
- const boost::optional<BSONObj>& indexHint,
- const optimizer::ProjectionName& scanProjName,
- const DisableIndexOptions disableIndexOptions,
- bool& disableScan) {
- using namespace optimizer;
-
- if (disableIndexOptions == DisableIndexOptions::DisableAll) {
- return {};
- }
-
- std::string indexHintName;
- if (indexHint) {
- const BSONElement element = indexHint->firstElement();
- const StringData fieldName = element.fieldNameStringData();
- if (fieldName == "$natural"_sd) {
- if (!element.isNumber() || element.numberInt() != 1) {
- uasserted(6624255, "Unsupported hint option");
- }
- // Do not add indexes.
- return {};
- } else if (fieldName == "$hint"_sd && element.type() == BSONType::String) {
- indexHintName = element.valueStringData().toString();
- }
-
- disableScan = true;
- }
-
- const IndexCatalog& indexCatalog = *collection->getIndexCatalog();
- opt::unordered_map<std::string, IndexDefinition> result;
- auto indexIterator =
- indexCatalog.getIndexIterator(opCtx, IndexCatalog::InclusionPolicy::kReady);
-
- while (indexIterator->more()) {
- const IndexCatalogEntry& catalogEntry = *indexIterator->next();
-
- const IndexDescriptor& descriptor = *catalogEntry.descriptor();
- if (descriptor.hidden() || descriptor.isSparse() ||
- descriptor.getIndexType() != IndexType::INDEX_BTREE ||
- !descriptor.collation().isEmpty()) {
- uasserted(ErrorCodes::InternalErrorNotSupported, "Unsupported index type");
- }
-
- if (indexHint) {
- if (indexHintName.empty()) {
- if (!SimpleBSONObjComparator::kInstance.evaluate(descriptor.keyPattern() ==
- *indexHint)) {
- // Index key pattern does not match hint.
- continue;
- }
- } else if (indexHintName != descriptor.indexName()) {
- // Index name does not match hint.
- continue;
- }
- }
-
- const bool isMultiKey = catalogEntry.isMultikey(opCtx, collection);
- const MultikeyPaths& multiKeyPaths = catalogEntry.getMultikeyPaths(opCtx, collection);
- uassert(6624251, "Multikey paths cannot be empty.", !multiKeyPaths.empty());
-
- // SBE version is base 0.
- const int64_t version = static_cast<int>(descriptor.version()) - 1;
-
- uint32_t orderingBits = 0;
- {
- const Ordering ordering = catalogEntry.ordering();
- for (int i = 0; i < descriptor.getNumFields(); i++) {
- if ((ordering.get(i) == -1)) {
- orderingBits |= (1ull << i);
- }
- }
- }
-
- IndexCollationSpec indexCollationSpec;
- bool useIndex = true;
- size_t elementIdx = 0;
- for (const auto& element : descriptor.keyPattern()) {
- FieldPathType fieldPath;
- FieldPath path(element.fieldName());
-
- for (size_t i = 0; i < path.getPathLength(); i++) {
- const std::string& fieldName = path.getFieldName(i).toString();
- if (fieldName == "$**") {
- // TODO: For now disallow wildcard indexes.
- useIndex = false;
- break;
- }
- fieldPath.emplace_back(fieldName);
- }
- if (!useIndex) {
- break;
- }
-
- const int direction = element.numberInt();
- if (direction != -1 && direction != 1) {
- // Invalid value?
- useIndex = false;
- break;
- }
-
- const CollationOp collationOp =
- (direction == 1) ? CollationOp::Ascending : CollationOp::Descending;
-
- // Construct an ABT path for each index component (field path).
- const MultikeyComponents& elementMultiKeyInfo = multiKeyPaths[elementIdx];
- ABT abtPath = make<PathIdentity>();
- for (size_t i = fieldPath.size(); i-- > 0;) {
- if (isMultiKey && elementMultiKeyInfo.find(i) != elementMultiKeyInfo.cend()) {
- // This is a multikey element of the path.
- abtPath = make<PathTraverse>(std::move(abtPath), PathTraverse::kSingleLevel);
- }
- abtPath = make<PathGet>(fieldPath.at(i), std::move(abtPath));
- }
- indexCollationSpec.emplace_back(std::move(abtPath), collationOp);
- ++elementIdx;
- }
- if (!useIndex) {
- continue;
- }
-
- PartialSchemaRequirements partialIndexReqMap;
- if (descriptor.isPartial() &&
- disableIndexOptions != DisableIndexOptions::DisablePartialOnly) {
- auto expr = MatchExpressionParser::parseAndNormalize(
- descriptor.partialFilterExpression(),
- expCtx,
- ExtensionsCallbackNoop(),
- MatchExpressionParser::kBanAllSpecialFeatures);
-
- ABT exprABT = generateMatchExpression(expr.get(),
- false /*allowAggExpression*/,
- "" /*rootProjection*/,
- "" /*uniquePrefix*/);
- exprABT = make<EvalFilter>(std::move(exprABT), make<Variable>(scanProjName));
-
- // TODO: simplify expression.
-
- auto conversion = convertExprToPartialSchemaReq(exprABT, true /*isFilterContext*/);
- if (!conversion) {
- // TODO: should this conversion be always possible?
- continue;
- }
- tassert(6624257,
- "Should not be seeing a partial index filter where we need to over-approximate",
- !conversion->_retainPredicate);
-
- partialIndexReqMap = std::move(conversion->_reqMap);
- }
-
- // For now we assume distribution is Centralized.
- result.emplace(descriptor.indexName(),
- IndexDefinition(std::move(indexCollationSpec),
- version,
- orderingBits,
- isMultiKey,
- DistributionType::Centralized,
- std::move(partialIndexReqMap)));
- }
-
- return result;
-}
-
-static QueryHints getHintsFromQueryKnobs() {
- QueryHints hints;
-
- hints._disableScan = internalCascadesOptimizerDisableScan.load();
- hints._disableIndexes = internalCascadesOptimizerDisableIndexes.load()
- ? DisableIndexOptions::DisableAll
- : DisableIndexOptions::Enabled;
- hints._disableHashJoinRIDIntersect =
- internalCascadesOptimizerDisableHashJoinRIDIntersect.load();
- hints._disableMergeJoinRIDIntersect =
- internalCascadesOptimizerDisableMergeJoinRIDIntersect.load();
- hints._disableGroupByAndUnionRIDIntersect =
- internalCascadesOptimizerDisableGroupByAndUnionRIDIntersect.load();
- hints._keepRejectedPlans = internalCascadesOptimizerKeepRejectedPlans.load();
- hints._disableBranchAndBound = internalCascadesOptimizerDisableBranchAndBound.load();
- hints._fastIndexNullHandling = internalCascadesOptimizerFastIndexNullHandling.load();
-
- return hints;
-}
-
-static std::unique_ptr<PlanExecutor, PlanExecutor::Deleter> optimizeAndCreateExecutor(
- OptPhaseManager& phaseManager,
- ABT abtTree,
- OperationContext* opCtx,
- boost::intrusive_ptr<ExpressionContext> expCtx,
- const NamespaceString& nss,
- const CollectionPtr& collection) {
-
- const bool optimizationResult = phaseManager.optimize(abtTree);
- uassert(6624252, "Optimization failed", optimizationResult);
-
- {
- const auto& memo = phaseManager.getMemo();
- const auto& memoStats = memo.getStats();
- OPTIMIZER_DEBUG_LOG(6264800,
- 5,
- "Optimizer stats",
- "memoGroups"_attr = memo.getGroupCount(),
- "memoLogicalNodes"_attr = memo.getLogicalNodeCount(),
- "memoPhysNodes"_attr = memo.getPhysicalNodeCount(),
- "memoIntegrations"_attr = memoStats._numIntegrations,
- "physPlansExplored"_attr = memoStats._physPlanExplorationCount,
- "physMemoChecks"_attr = memoStats._physMemoCheckCount);
- }
-
- {
- const std::string explain = ExplainGenerator::explainV2(
- make<MemoPhysicalDelegatorNode>(phaseManager.getPhysicalNodeId()),
- true /*displayPhysicalProperties*/,
- &phaseManager.getMemo());
- OPTIMIZER_DEBUG_LOG(6264801, 5, "Optimized ABT", "explain"_attr = explain);
- }
-
- auto env = VariableEnvironment::build(abtTree);
- SlotVarMap slotMap;
- sbe::value::SlotIdGenerator ids;
- SBENodeLowering g{env,
- slotMap,
- ids,
- phaseManager.getMetadata(),
- phaseManager.getNodeToGroupPropsMap(),
- phaseManager.getRIDProjections()};
- auto sbePlan = g.optimize(abtTree);
-
- uassert(6624253, "Lowering failed: did not produce a plan.", sbePlan != nullptr);
- uassert(6624254, "Lowering failed: did not produce any output slots.", !slotMap.empty());
-
- {
- sbe::DebugPrinter p;
- OPTIMIZER_DEBUG_LOG(6264802, 5, "Lowered SBE plan", "plan"_attr = p.print(*sbePlan.get()));
- }
-
- stage_builder::PlanStageData data{std::make_unique<sbe::RuntimeEnvironment>()};
- data.outputs.set(stage_builder::PlanStageSlots::kResult, slotMap.begin()->second);
-
- sbePlan->attachToOperationContext(opCtx);
- if (expCtx->explain || expCtx->mayDbProfile) {
- sbePlan->markShouldCollectTimingInfo();
- }
-
- auto yieldPolicy =
- std::make_unique<PlanYieldPolicySBE>(PlanYieldPolicy::YieldPolicy::YIELD_AUTO,
- opCtx->getServiceContext()->getFastClockSource(),
- internalQueryExecYieldIterations.load(),
- Milliseconds{internalQueryExecYieldPeriodMS.load()},
- nullptr,
- std::make_unique<YieldPolicyCallbacksImpl>(nss));
-
- sbePlan->prepare(data.ctx);
- auto planExec = uassertStatusOK(plan_executor_factory::make(
- opCtx,
- nullptr /*cq*/,
- nullptr /*solution*/,
- {std::move(sbePlan), std::move(data)},
- std::make_unique<ABTPrinter>(std::move(abtTree), phaseManager.getNodeToGroupPropsMap()),
- MultipleCollectionAccessor(collection),
- QueryPlannerParams::Options::DEFAULT,
- nss,
- std::move(yieldPolicy)));
- return planExec;
-}
-
-static void populateAdditionalScanDefs(OperationContext* opCtx,
- boost::intrusive_ptr<ExpressionContext> expCtx,
- const Pipeline& pipeline,
- const boost::optional<BSONObj>& indexHint,
- const size_t numberOfPartitions,
- PrefixId& prefixId,
- opt::unordered_map<std::string, ScanDefinition>& scanDefs,
- const DisableIndexOptions disableIndexOptions,
- bool& disableScan) {
- for (const auto& involvedNss : pipeline.getInvolvedCollections()) {
- // TODO handle views?
- AutoGetCollectionForReadCommandMaybeLockFree ctx(
- opCtx, involvedNss, AutoGetCollectionViewMode::kViewsForbidden);
- const CollectionPtr& collection = ctx ? ctx.getCollection() : CollectionPtr::null;
- const bool collectionExists = collection != nullptr;
- const std::string uuidStr =
- collectionExists ? collection->uuid().toString() : "<missing_uuid>";
-
- const std::string collNameStr = involvedNss.coll().toString();
- // TODO: We cannot add the uuidStr suffix because the pipeline translation does not have
- // access to the metadata so it generates a scan over just the collection name.
- const std::string scanDefName = collNameStr;
-
- opt::unordered_map<std::string, optimizer::IndexDefinition> indexDefs;
- const ProjectionName& scanProjName = prefixId.getNextId("scan");
- if (collectionExists) {
- // TODO: add locks on used indexes?
- indexDefs = buildIndexSpecsOptimizer(expCtx,
- opCtx,
- collection,
- indexHint,
- scanProjName,
- disableIndexOptions,
- disableScan);
- }
-
- // For now handle only local parallelism (no over-the-network exchanges).
- DistributionAndPaths distribution{(numberOfPartitions == 1)
- ? DistributionType::Centralized
- : DistributionType::UnknownPartitioning};
-
- const CEType collectionCE = collectionExists ? collection->numRecords(opCtx) : -1.0;
- scanDefs[scanDefName] =
- ScanDefinition({{"type", "mongod"},
- {"database", involvedNss.db().toString()},
- {"uuid", uuidStr},
- {ScanNode::kDefaultCollectionNameSpec, collNameStr}},
- std::move(indexDefs),
- std::move(distribution),
- collectionExists,
- collectionCE);
- }
-}
-
-std::unique_ptr<PlanExecutor, PlanExecutor::Deleter> getSBEExecutorViaCascadesOptimizer(
- OperationContext* opCtx,
- boost::intrusive_ptr<ExpressionContext> expCtx,
- const NamespaceString& nss,
- const CollectionPtr& collection,
- const boost::optional<BSONObj>& indexHint,
- const Pipeline& pipeline) {
- const bool collectionExists = collection != nullptr;
- const std::string uuidStr = collectionExists ? collection->uuid().toString() : "<missing_uuid>";
- const std::string collNameStr = nss.coll().toString();
- const std::string scanDefName = collNameStr + "_" + uuidStr;
-
- if (indexHint && !pipeline.getInvolvedCollections().empty()) {
- uasserted(6624256,
- "For now we can apply hints only for queries involving a single collection");
- }
- // Unsupported command/collection options.
- uassert(ErrorCodes::InternalErrorNotSupported,
- "Collection-default collation is not supported",
- !collection || collection->getCollectionOptions().collation.isEmpty());
-
- uassert(ErrorCodes::InternalErrorNotSupported,
- "Clustered collections are not supported",
- !collection || !collection->isClustered());
-
- uassert(ErrorCodes::InternalErrorNotSupported,
- "Timeseries collections are not supported",
- !collection || !collection->getTimeseriesOptions());
-
- auto curOp = CurOp::get(opCtx);
- curOp->debug().cqfUsed = true;
-
- QueryHints queryHints = getHintsFromQueryKnobs();
-
- PrefixId prefixId;
- const ProjectionName& scanProjName = prefixId.getNextId("scan");
-
- // Add the base collection metadata.
- opt::unordered_map<std::string, optimizer::IndexDefinition> indexDefs;
- if (collectionExists) {
- // TODO: add locks on used indexes?
- indexDefs = buildIndexSpecsOptimizer(expCtx,
- opCtx,
- collection,
- indexHint,
- scanProjName,
- queryHints._disableIndexes,
- queryHints._disableScan);
- }
-
- const size_t numberOfPartitions = internalQueryDefaultDOP.load();
- // For now handle only local parallelism (no over-the-network exchanges).
- DistributionAndPaths distribution{(numberOfPartitions == 1)
- ? DistributionType::Centralized
- : DistributionType::UnknownPartitioning};
-
- opt::unordered_map<std::string, ScanDefinition> scanDefs;
- const int64_t numRecords = collectionExists ? collection->numRecords(opCtx) : -1;
- scanDefs.emplace(scanDefName,
- ScanDefinition({{"type", "mongod"},
- {"database", nss.db().toString()},
- {"uuid", uuidStr},
- {ScanNode::kDefaultCollectionNameSpec, collNameStr}},
- std::move(indexDefs),
- std::move(distribution),
- collectionExists,
- static_cast<CEType>(numRecords)));
-
- // Add a scan definition for all involved collections. Note that the base namespace has already
- // been accounted for above and isn't included here.
- populateAdditionalScanDefs(opCtx,
- expCtx,
- pipeline,
- indexHint,
- numberOfPartitions,
- prefixId,
- scanDefs,
- queryHints._disableIndexes,
- queryHints._disableScan);
-
- Metadata metadata(std::move(scanDefs), numberOfPartitions);
-
- ABT abtTree = collectionExists ? make<ScanNode>(scanProjName, scanDefName)
- : make<ValueScanNode>(ProjectionNameVector{scanProjName});
- abtTree =
- translatePipelineToABT(metadata, pipeline, scanProjName, std::move(abtTree), prefixId);
-
- OPTIMIZER_DEBUG_LOG(
- 6264803, 5, "Translated ABT", "explain"_attr = ExplainGenerator::explainV2(abtTree));
-
- if (internalQueryCardinalityEstimatorMode == ce::kSampling && collectionExists &&
- numRecords > 0) {
- Metadata metadataForSampling = metadata;
- // Do not use indexes for sampling.
- for (auto& entry : metadataForSampling._scanDefs) {
- entry.second.getIndexDefs().clear();
- }
-
- // TODO: consider a limited rewrite set.
- OptPhaseManager phaseManagerForSampling(OptPhaseManager::getAllRewritesSet(),
- prefixId,
- false /*requireRID*/,
- std::move(metadataForSampling),
- std::make_unique<HeuristicCE>(),
- std::make_unique<DefaultCosting>(),
- DebugInfo::kDefaultForProd);
-
- OptPhaseManager phaseManager{
- OptPhaseManager::getAllRewritesSet(),
- prefixId,
- false /*requireRID*/,
- std::move(metadata),
- std::make_unique<CESamplingTransport>(opCtx, phaseManagerForSampling, numRecords),
- std::make_unique<DefaultCosting>(),
- DebugInfo::kDefaultForProd};
- phaseManager.getHints() = queryHints;
-
- return optimizeAndCreateExecutor(
- phaseManager, std::move(abtTree), opCtx, expCtx, nss, collection);
-
- } else if (internalQueryCardinalityEstimatorMode == ce::kHistogram &&
- ce::CollectionStatistics::hasCollectionStatistics(nss)) {
- const auto& stats = ce::CollectionStatistics::getCollectionStatistics(nss);
- auto ceDerivation = std::make_unique<CEHistogramTransport>(stats);
- OptPhaseManager phaseManager{OptPhaseManager::getAllRewritesSet(),
- prefixId,
- false /*requireRID*/,
- std::move(metadata),
- std::move(ceDerivation),
- std::make_unique<DefaultCosting>(),
- DebugInfo::kDefaultForProd};
-
- return optimizeAndCreateExecutor(
- phaseManager, std::move(abtTree), opCtx, expCtx, nss, collection);
-
- } else {
- // Default to using heuristics.
- OptPhaseManager phaseManager{OptPhaseManager::getAllRewritesSet(),
- prefixId,
- std::move(metadata),
- DebugInfo::kDefaultForProd};
- phaseManager.getHints() = queryHints;
-
- return optimizeAndCreateExecutor(
- phaseManager, std::move(abtTree), opCtx, expCtx, nss, collection);
- }
-}
-
-} // namespace mongo
diff --git a/src/mongo/db/commands/cqf/cqf_aggregate.h b/src/mongo/db/commands/cqf/cqf_aggregate.h
deleted file mode 100644
index ec7ee64b257..00000000000
--- a/src/mongo/db/commands/cqf/cqf_aggregate.h
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Copyright (C) 2022-present MongoDB, Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the Server Side Public License, version 1,
- * as published by MongoDB, Inc.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * Server Side Public License for more details.
- *
- * You should have received a copy of the Server Side Public License
- * along with this program. If not, see
- * <http://www.mongodb.com/licensing/server-side-public-license>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the Server Side Public License in all respects for
- * all of the code used other than as permitted herein. If you modify file(s)
- * with this exception, you may extend this exception to your version of the
- * file(s), but you are not obligated to do so. If you do not wish to do so,
- * delete this exception statement from your version. If you delete this
- * exception statement from all source files in the program, then also delete
- * it in the license file.
- */
-
-#pragma once
-
-#include "mongo/db/catalog/collection.h"
-#include "mongo/db/query/plan_executor.h"
-
-namespace mongo {
-
-std::unique_ptr<PlanExecutor, PlanExecutor::Deleter> getSBEExecutorViaCascadesOptimizer(
- OperationContext* opCtx,
- boost::intrusive_ptr<ExpressionContext> expCtx,
- const NamespaceString& nss,
- const CollectionPtr& collection,
- const boost::optional<BSONObj>& indexHint,
- const Pipeline& pipeline);
-
-} // namespace mongo
diff --git a/src/mongo/db/commands/cqf/cqf_command_utils.cpp b/src/mongo/db/commands/cqf/cqf_command_utils.cpp
deleted file mode 100644
index 3a64454a60b..00000000000
--- a/src/mongo/db/commands/cqf/cqf_command_utils.cpp
+++ /dev/null
@@ -1,752 +0,0 @@
-/**
- * Copyright (C) 2022-present MongoDB, Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the Server Side Public License, version 1,
- * as published by MongoDB, Inc.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * Server Side Public License for more details.
- *
- * You should have received a copy of the Server Side Public License
- * along with this program. If not, see
- * <http://www.mongodb.com/licensing/server-side-public-license>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the Server Side Public License in all respects for
- * all of the code used other than as permitted herein. If you modify file(s)
- * with this exception, you may extend this exception to your version of the
- * file(s), but you are not obligated to do so. If you do not wish to do so,
- * delete this exception statement from your version. If you delete this
- * exception statement from all source files in the program, then also delete
- * it in the license file.
- */
-
-#include "mongo/db/commands/cqf/cqf_command_utils.h"
-
-#include "mongo/db/commands/test_commands_enabled.h"
-#include "mongo/db/exec/add_fields_projection_executor.h"
-#include "mongo/db/exec/exclusion_projection_executor.h"
-#include "mongo/db/exec/inclusion_projection_executor.h"
-#include "mongo/db/exec/projection_executor_builder.h"
-#include "mongo/db/exec/sbe/abt/abt_lower.h"
-#include "mongo/db/matcher/expression_always_boolean.h"
-#include "mongo/db/matcher/expression_array.h"
-#include "mongo/db/matcher/expression_expr.h"
-#include "mongo/db/matcher/expression_geo.h"
-#include "mongo/db/matcher/expression_internal_bucket_geo_within.h"
-#include "mongo/db/matcher/expression_internal_expr_comparison.h"
-#include "mongo/db/matcher/expression_leaf.h"
-#include "mongo/db/matcher/expression_text.h"
-#include "mongo/db/matcher/expression_text_noop.h"
-#include "mongo/db/matcher/expression_tree.h"
-#include "mongo/db/matcher/expression_type.h"
-#include "mongo/db/matcher/expression_visitor.h"
-#include "mongo/db/matcher/expression_where.h"
-#include "mongo/db/matcher/expression_where_noop.h"
-#include "mongo/db/matcher/match_expression_walker.h"
-#include "mongo/db/matcher/schema/expression_internal_schema_all_elem_match_from_index.h"
-#include "mongo/db/matcher/schema/expression_internal_schema_allowed_properties.h"
-#include "mongo/db/matcher/schema/expression_internal_schema_cond.h"
-#include "mongo/db/matcher/schema/expression_internal_schema_eq.h"
-#include "mongo/db/matcher/schema/expression_internal_schema_fmod.h"
-#include "mongo/db/matcher/schema/expression_internal_schema_match_array_index.h"
-#include "mongo/db/matcher/schema/expression_internal_schema_max_items.h"
-#include "mongo/db/matcher/schema/expression_internal_schema_max_length.h"
-#include "mongo/db/matcher/schema/expression_internal_schema_max_properties.h"
-#include "mongo/db/matcher/schema/expression_internal_schema_min_items.h"
-#include "mongo/db/matcher/schema/expression_internal_schema_min_length.h"
-#include "mongo/db/matcher/schema/expression_internal_schema_min_properties.h"
-#include "mongo/db/matcher/schema/expression_internal_schema_object_match.h"
-#include "mongo/db/matcher/schema/expression_internal_schema_root_doc_eq.h"
-#include "mongo/db/matcher/schema/expression_internal_schema_unique_items.h"
-#include "mongo/db/matcher/schema/expression_internal_schema_xor.h"
-#include "mongo/db/pipeline/abt/agg_expression_visitor.h"
-#include "mongo/db/pipeline/abt/document_source_visitor.h"
-#include "mongo/db/pipeline/abt/match_expression_visitor.h"
-#include "mongo/db/pipeline/abt/utils.h"
-#include "mongo/db/pipeline/document_source_bucket_auto.h"
-#include "mongo/db/pipeline/document_source_coll_stats.h"
-#include "mongo/db/pipeline/document_source_current_op.h"
-#include "mongo/db/pipeline/document_source_cursor.h"
-#include "mongo/db/pipeline/document_source_exchange.h"
-#include "mongo/db/pipeline/document_source_facet.h"
-#include "mongo/db/pipeline/document_source_geo_near.h"
-#include "mongo/db/pipeline/document_source_geo_near_cursor.h"
-#include "mongo/db/pipeline/document_source_graph_lookup.h"
-#include "mongo/db/pipeline/document_source_group.h"
-#include "mongo/db/pipeline/document_source_index_stats.h"
-#include "mongo/db/pipeline/document_source_internal_inhibit_optimization.h"
-#include "mongo/db/pipeline/document_source_internal_shard_filter.h"
-#include "mongo/db/pipeline/document_source_internal_split_pipeline.h"
-#include "mongo/db/pipeline/document_source_internal_unpack_bucket.h"
-#include "mongo/db/pipeline/document_source_limit.h"
-#include "mongo/db/pipeline/document_source_list_cached_and_active_users.h"
-#include "mongo/db/pipeline/document_source_list_local_sessions.h"
-#include "mongo/db/pipeline/document_source_list_sessions.h"
-#include "mongo/db/pipeline/document_source_lookup.h"
-#include "mongo/db/pipeline/document_source_match.h"
-#include "mongo/db/pipeline/document_source_merge.h"
-#include "mongo/db/pipeline/document_source_operation_metrics.h"
-#include "mongo/db/pipeline/document_source_out.h"
-#include "mongo/db/pipeline/document_source_plan_cache_stats.h"
-#include "mongo/db/pipeline/document_source_queue.h"
-#include "mongo/db/pipeline/document_source_redact.h"
-#include "mongo/db/pipeline/document_source_replace_root.h"
-#include "mongo/db/pipeline/document_source_sample.h"
-#include "mongo/db/pipeline/document_source_sample_from_random_cursor.h"
-#include "mongo/db/pipeline/document_source_sequential_document_cache.h"
-#include "mongo/db/pipeline/document_source_single_document_transformation.h"
-#include "mongo/db/pipeline/document_source_skip.h"
-#include "mongo/db/pipeline/document_source_sort.h"
-#include "mongo/db/pipeline/document_source_tee_consumer.h"
-#include "mongo/db/pipeline/document_source_union_with.h"
-#include "mongo/db/pipeline/document_source_unwind.h"
-#include "mongo/db/pipeline/visitors/document_source_visitor.h"
-#include "mongo/db/pipeline/visitors/document_source_walker.h"
-#include "mongo/db/pipeline/visitors/transformer_interface_walker.h"
-#include "mongo/db/query/query_feature_flags_gen.h"
-#include "mongo/db/query/query_knobs_gen.h"
-#include "mongo/db/query/query_planner_params.h"
-#include "mongo/s/query/document_source_merge_cursors.h"
-
-namespace mongo {
-
-using namespace optimizer;
-
-namespace {
-
-/**
- * Visitor that is responsible for indicating whether a MatchExpression is eligible for Bonsai by
- * setting the '_eligible' member variable. Expressions which are "test-only" and not officially
- * supported should set _eligible to false.
- */
-class ABTMatchExpressionVisitor : public MatchExpressionConstVisitor {
-public:
- ABTMatchExpressionVisitor(bool& eligible) : _eligible(eligible) {}
-
- void visit(const LTEMatchExpression* expr) override {
- assertSupportedPathExpression(expr);
- }
- void visit(const LTMatchExpression* expr) override {
- assertSupportedPathExpression(expr);
- }
- void visit(const ElemMatchObjectMatchExpression* expr) override {
- assertSupportedPathExpression(expr);
- }
- void visit(const ElemMatchValueMatchExpression* expr) override {
- assertSupportedPathExpression(expr);
- }
- void visit(const EqualityMatchExpression* expr) override {
- assertSupportedPathExpression(expr);
- }
- void visit(const GTEMatchExpression* expr) override {
- assertSupportedPathExpression(expr);
- }
- void visit(const GTMatchExpression* expr) override {
- assertSupportedPathExpression(expr);
- }
- void visit(const InMatchExpression* expr) override {
- assertSupportedPathExpression(expr);
-
- // $in over a regex predicate is not supported.
- if (!expr->getRegexes().empty()) {
- _eligible = false;
- }
- }
- void visit(const ExistsMatchExpression* expr) override {
- assertSupportedPathExpression(expr);
- }
- void visit(const AndMatchExpression* expr) override {}
- void visit(const OrMatchExpression* expr) override {}
-
- void visit(const GeoMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const GeoNearMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalBucketGeoWithinMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalExprEqMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalExprGTMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalExprGTEMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalExprLTMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalExprLTEMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaAllElemMatchFromIndexMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaAllowedPropertiesMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaBinDataEncryptedTypeExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaBinDataFLE2EncryptedTypeExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaBinDataSubTypeExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaCondMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaEqMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaFmodMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaMatchArrayIndexMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaMaxItemsMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaMaxLengthMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaMaxPropertiesMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaMinItemsMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaMinLengthMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaMinPropertiesMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaObjectMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaRootDocEqMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaTypeExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaUniqueItemsMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const InternalSchemaXorMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const ModMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const NorMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const NotMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const RegexMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const SizeMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const TextMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const TextNoOpMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const TwoDPtInAnnulusExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const WhereMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const WhereNoOpMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const BitsAllClearMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const BitsAllSetMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const BitsAnyClearMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const BitsAnySetMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const TypeMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const AlwaysFalseMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const AlwaysTrueMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const ExprMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
- void visit(const EncryptedBetweenMatchExpression* expr) override {
- unsupportedExpression(expr);
- }
-
-private:
- void unsupportedExpression(const MatchExpression* expr) {
- _eligible = false;
- }
-
- void assertSupportedPathExpression(const PathMatchExpression* expr) {
- if (FieldRef(expr->path()).hasNumericPathComponents())
- _eligible = false;
- }
-
- bool& _eligible;
-};
-
-
-class ABTTransformerVisitor : public TransformerInterfaceConstVisitor {
-public:
- ABTTransformerVisitor(bool& eligible) : _eligible(eligible) {}
-
- void visit(const projection_executor::ExclusionProjectionExecutor* transformer) override {
- checkUnsupportedInclusionExclusion(transformer);
- }
-
- void visit(const projection_executor::InclusionProjectionExecutor* transformer) override {
- checkUnsupportedInclusionExclusion(transformer);
- }
-
- void visit(const projection_executor::AddFieldsProjectionExecutor* transformer) override {
- unsupportedTransformer(transformer);
- }
-
- void visit(const GroupFromFirstDocumentTransformation* transformer) override {
- unsupportedTransformer(transformer);
- }
-
- void visit(const ReplaceRootTransformation* transformer) override {
- unsupportedTransformer(transformer);
- }
-
-private:
- void unsupportedTransformer(const TransformerInterface* transformer) {
- _eligible = false;
- }
-
- template <typename T>
- void checkUnsupportedInclusionExclusion(const T* transformer) {
- OrderedPathSet computedPaths;
- StringMap<std::string> renamedPaths;
- transformer->getRoot()->reportComputedPaths(&computedPaths, &renamedPaths);
-
- // Non-simple projections are supported under test only.
- if (computedPaths.size() > 0 || renamedPaths.size() > 0) {
- unsupportedTransformer(transformer);
- return;
- }
-
- OrderedPathSet preservedPaths;
- transformer->getRoot()->reportProjectedPaths(&preservedPaths);
-
- for (const std::string& path : preservedPaths) {
- if (FieldRef(path).hasNumericPathComponents()) {
- unsupportedTransformer(transformer);
- return;
- }
- }
- }
-
- bool& _eligible;
-};
-
-/**
- * Visitor that is responsible for indicating whether a DocumentSource is eligible for Bonsai by
- * setting the 'eligible' member variable. Stages which are "test-only" and not officially supported
- * should set 'eligible' to false.
- */
-class ABTUnsupportedDocumentSourceVisitor : public DocumentSourceConstVisitor {
-public:
- void visit(const DocumentSourceInternalUnpackBucket* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceBucketAuto* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceCollStats* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceCurrentOp* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceCursor* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceExchange* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceFacet* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceGeoNear* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceGeoNearCursor* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceGraphLookUp* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceIndexStats* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceInternalShardFilter* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceInternalSplitPipeline* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceListCachedAndActiveUsers* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceListLocalSessions* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceListSessions* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceLookUp* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceMerge* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceMergeCursors* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceOperationMetrics* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceOut* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourcePlanCacheStats* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceQueue* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceRedact* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceSample* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceSampleFromRandomCursor* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceSequentialDocumentCache* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceTeeConsumer* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceGroup* source) override {
- unsupportedStage(source);
- }
- void visit(const DocumentSourceLimit* source) override {
- unsupportedStage(source);
- }
- void visit(const DocumentSourceSkip* source) override {
- unsupportedStage(source);
- }
- void visit(const DocumentSourceSort* source) override {
- unsupportedStage(source);
- }
- void visit(const DocumentSourceUnwind* source) override {
- unsupportedStage(source);
- }
- void visit(const DocumentSourceUnionWith* source) override {
- unsupportedStage(source);
- }
-
- void visit(const DocumentSourceInternalInhibitOptimization* source) override {
- // Can be ignored.
- }
-
- void visit(const DocumentSourceMatch* source) override {
- // Pass a reference to our local 'eligible' variable to allow the visitor to overwrite it.
- ABTMatchExpressionVisitor visitor(eligible);
- MatchExpressionWalker walker(nullptr /*preVisitor*/, nullptr /*inVisitor*/, &visitor);
- tree_walker::walk<true, MatchExpression>(source->getMatchExpression(), &walker);
- }
-
- void visit(const DocumentSourceSingleDocumentTransformation* source) override {
- ABTTransformerVisitor visitor(eligible);
- TransformerInterfaceWalker walker(&visitor);
- walker.walk(&source->getTransformer());
- }
-
- void unsupportedStage(const DocumentSource* source) {
- eligible = false;
- }
-
- bool eligible = true;
-};
-
-template <class RequestType>
-bool isEligibleCommon(const RequestType& request,
- OperationContext* opCtx,
- const CollectionPtr& collection) {
- // The FindCommandRequest defaults some parameters to BSONObj() instead of boost::none.
- auto noneOrDefaultEmpty = [&](auto param) {
- if constexpr (std::is_same_v<decltype(param), boost::optional<BSONObj>>) {
- return param && !param->isEmpty();
- } else {
- return !param.isEmpty();
- }
- };
- bool unsupportedCmdOption = noneOrDefaultEmpty(request.getHint()) ||
- noneOrDefaultEmpty(request.getCollation()) || request.getLet() ||
- request.getLegacyRuntimeConstants();
-
- bool unsupportedIndexType = [&]() {
- if (collection == nullptr)
- return false;
-
- const IndexCatalog& indexCatalog = *collection->getIndexCatalog();
- auto indexIterator =
- indexCatalog.getIndexIterator(opCtx, IndexCatalog::InclusionPolicy::kReady);
-
- while (indexIterator->more()) {
- const IndexDescriptor& descriptor = *indexIterator->next()->descriptor();
- if (descriptor.isPartial() || descriptor.hidden() || descriptor.isSparse() ||
- descriptor.getIndexType() != IndexType::INDEX_BTREE ||
- !descriptor.collation().isEmpty()) {
- return true;
- }
- }
- return false;
- }();
-
- bool unsupportedCollectionType = [&]() {
- if (collection == nullptr)
- return false;
-
- if (collection->isClustered() || !collection->getCollectionOptions().collation.isEmpty() ||
- collection->getTimeseriesOptions()) {
- return true;
- }
-
- return false;
- }();
-
- return !unsupportedCmdOption && !unsupportedIndexType && !unsupportedCollectionType &&
- !storageGlobalParams.noTableScan.load();
-}
-
-boost::optional<bool> shouldForceEligibility() {
- // Without the feature flag set, no queries are eligible for Bonsai.
- if (!serverGlobalParams.featureCompatibility.isVersionInitialized() ||
- !feature_flags::gFeatureFlagCommonQueryFramework.isEnabled(
- serverGlobalParams.featureCompatibility)) {
- return false;
- }
-
- auto queryControl = ServerParameterSet::getNodeParameterSet()->get<QueryFrameworkControl>(
- "internalQueryFrameworkControl");
-
- switch (queryControl->_data.get()) {
- case QueryFrameworkControlEnum::kForceClassicEngine:
- case QueryFrameworkControlEnum::kTrySbeEngine:
- return false;
- case QueryFrameworkControlEnum::kTryBonsai:
- // Return boost::none to indicate that we should not force eligibility of bonsai nor the
- // classic engine.
- return boost::none;
- case QueryFrameworkControlEnum::kForceBonsai:
- // This option is only supported with test commands enabled.
- return getTestCommandsEnabled();
- }
-
- MONGO_UNREACHABLE;
-}
-
-} // namespace
-
-MONGO_FAIL_POINT_DEFINE(enableExplainInBonsai);
-
-bool isEligibleForBonsai(const AggregateCommandRequest& request,
- const Pipeline& pipeline,
- OperationContext* opCtx,
- const CollectionPtr& collection) {
- if (auto forceBonsai = shouldForceEligibility(); forceBonsai.has_value()) {
- return *forceBonsai;
- }
-
- // Explain is not currently supported but is allowed if the failpoint is set
- // for testing purposes.
- if (!MONGO_unlikely(enableExplainInBonsai.shouldFail()) && request.getExplain()) {
- return false;
- }
-
- bool commandOptionsEligible = isEligibleCommon(request, opCtx, collection) &&
- !request.getUnwrappedReadPref() && !request.getRequestReshardingResumeToken().has_value() &&
- !request.getExchange();
-
- // Early return to avoid unnecessary work of walking the input pipeline.
- if (!commandOptionsEligible) {
- return false;
- }
-
- ABTUnsupportedDocumentSourceVisitor visitor;
- DocumentSourceWalker walker(nullptr /*preVisitor*/, &visitor);
-
- // The rudimentary walker may throw if it reaches a stage that it isn't aware about, so catch it
- // here and return ineligible.
- // TODO SERVER-62027 this should no longer be needed once all stages require a visit.
- try {
- walker.walk(pipeline);
- } catch (DBException&) {
- visitor.eligible = false;
- }
-
- return visitor.eligible;
-}
-
-bool isEligibleForBonsai(const CanonicalQuery& cq,
- OperationContext* opCtx,
- const CollectionPtr& collection) {
- if (auto forceBonsai = shouldForceEligibility(); forceBonsai.has_value()) {
- return *forceBonsai;
- }
-
- // Explain is not currently supported but is allowed if the failpoint is set
- // for testing purposes.
- if (!MONGO_unlikely(enableExplainInBonsai.shouldFail()) && cq.getExplain()) {
- return false;
- }
-
- auto request = cq.getFindCommandRequest();
- auto expression = cq.root();
- bool commandOptionsEligible = isEligibleCommon(request, opCtx, collection) &&
- request.getSort().isEmpty() && request.getMin().isEmpty() && request.getMax().isEmpty() &&
- !request.getReturnKey() && !request.getSingleBatch() && !request.getTailable() &&
- !request.getSkip() && !request.getLimit() && !request.getNoCursorTimeout();
-
- // Early return to avoid unnecessary work of walking the input expression.
- if (!commandOptionsEligible) {
- return false;
- }
-
- bool eligible = true;
- ABTMatchExpressionVisitor visitor(eligible);
- MatchExpressionWalker walker(nullptr /*preVisitor*/, nullptr /*inVisitor*/, &visitor);
- tree_walker::walk<true, MatchExpression>(expression, &walker);
-
- if (cq.getProj()) {
- // TODO SERVER-66846 Replace this with ProjectionAST walker
- auto projExecutor = projection_executor::buildProjectionExecutor(
- cq.getExpCtx(),
- cq.getProj(),
- ProjectionPolicies::findProjectionPolicies(),
- projection_executor::BuilderParamsBitSet{projection_executor::kDefaultBuilderParams});
- ABTTransformerVisitor visitor(eligible);
- TransformerInterfaceWalker walker(&visitor);
- walker.walk(projExecutor.get());
- }
-
- return eligible;
-}
-
-} // namespace mongo
diff --git a/src/mongo/db/commands/cqf/cqf_command_utils.h b/src/mongo/db/commands/cqf/cqf_command_utils.h
deleted file mode 100644
index 4ceb333d364..00000000000
--- a/src/mongo/db/commands/cqf/cqf_command_utils.h
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Copyright (C) 2022-present MongoDB, Inc.
- *
- * This program is free software: you can redistribute it and/or modify
- * it under the terms of the Server Side Public License, version 1,
- * as published by MongoDB, Inc.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * Server Side Public License for more details.
- *
- * You should have received a copy of the Server Side Public License
- * along with this program. If not, see
- * <http://www.mongodb.com/licensing/server-side-public-license>.
- *
- * As a special exception, the copyright holders give permission to link the
- * code of portions of this program with the OpenSSL library under certain
- * conditions as described in each individual source file and distribute
- * linked combinations including the program with the OpenSSL library. You
- * must comply with the Server Side Public License in all respects for
- * all of the code used other than as permitted herein. If you modify file(s)
- * with this exception, you may extend this exception to your version of the
- * file(s), but you are not obligated to do so. If you do not wish to do so,
- * delete this exception statement from your version. If you delete this
- * exception statement from all source files in the program, then also delete
- * it in the license file.
- */
-
-#pragma once
-
-#include "mongo/db/catalog/collection.h"
-
-#define MONGO_LOGV2_DEFAULT_COMPONENT ::mongo::logv2::LogComponent::kQuery
-constexpr bool kMongoOptimizerStdCoutDebugOutput = false;
-
-namespace mongo {
-
-template <typename T>
-void coutPrintAttr(const logv2::detail::NamedArg<T>& arg) {
- std::cout << arg.name << " : " << arg.value << "\n";
-}
-
-template <typename T, typename... Args>
-void coutPrintAttr(const logv2::detail::NamedArg<T>& arg,
- const logv2::detail::NamedArg<Args>&... args) {
- std::cout << arg.name << " : " << arg.value << "\n";
- coutPrintAttr(args...);
-}
-
-template <typename... Args>
-void coutPrint(const std::string& msg, const logv2::detail::NamedArg<Args>&... args) {
- std::cout << "********* " << msg << " *********\n";
- coutPrintAttr(args...);
- std::cout << "********* " << msg << " *********\n";
-}
-
-#define OPTIMIZER_DEBUG_LOG(ID, DLEVEL, FMTSTR_MESSAGE, ...) \
- LOGV2_DEBUG(ID, DLEVEL, FMTSTR_MESSAGE, ##__VA_ARGS__); \
- if (kMongoOptimizerStdCoutDebugOutput) \
- ::mongo::coutPrint(FMTSTR_MESSAGE, __VA_ARGS__);
-
-/**
- * Returns whether the given Pipeline and aggregate command is eligible to use the bonsai
- * optimizer.
- */
-bool isEligibleForBonsai(const AggregateCommandRequest& request,
- const Pipeline& pipeline,
- OperationContext* opCtx,
- const CollectionPtr& collection);
-
-/**
- * Returns whether the given find command is eligible to use the bonsai optimizer.
- */
-bool isEligibleForBonsai(const CanonicalQuery& cq,
- OperationContext* opCtx,
- const CollectionPtr& collection);
-
-} // namespace mongo
-#undef MONGO_LOGV2_DEFAULT_COMPONENT
diff --git a/src/mongo/db/commands/find_cmd.cpp b/src/mongo/db/commands/find_cmd.cpp
index 9ebf3c97908..86bff60820b 100644
--- a/src/mongo/db/commands/find_cmd.cpp
+++ b/src/mongo/db/commands/find_cmd.cpp
@@ -36,7 +36,6 @@
#include "mongo/db/client.h"
#include "mongo/db/clientcursor.h"
#include "mongo/db/commands.h"
-#include "mongo/db/commands/cqf/cqf_command_utils.h"
#include "mongo/db/commands/run_aggregate.h"
#include "mongo/db/commands/test_commands_enabled.h"
#include "mongo/db/cursor_manager.h"
@@ -48,6 +47,8 @@
#include "mongo/db/pipeline/aggregation_request_helper.h"
#include "mongo/db/pipeline/variables.h"
#include "mongo/db/query/collation/collator_factory_interface.h"
+#include "mongo/db/query/cqf_command_utils.h"
+#include "mongo/db/query/cqf_get_executor.h"
#include "mongo/db/query/cursor_response.h"
#include "mongo/db/query/explain.h"
#include "mongo/db/query/find.h"
@@ -309,9 +310,9 @@ public:
extensionsCallback,
MatchExpressionParser::kAllowAllSpecialFeatures));
- // If we are running a query against a view, or if we are trying to test the new
- // optimizer, redirect this query through the aggregation system.
- if (ctx->getView() || isEligibleForBonsai(*cq, opCtx, ctx->getCollection())) {
+ // If we are running a query against a view redirect this query through the aggregation
+ // system.
+ if (ctx->getView()) {
// Relinquish locks. The aggregation command will re-acquire them.
ctx.reset();
@@ -514,9 +515,9 @@ public:
extensionsCallback,
MatchExpressionParser::kAllowAllSpecialFeatures));
- // If we are running a query against a view, or if we are trying to test the new
- // optimizer, redirect this query through the aggregation system.
- if (ctx->getView() || isEligibleForBonsai(*cq, opCtx, ctx->getCollection())) {
+ // If we are running a query against a view redirect this query through the aggregation
+ // system.
+ if (ctx->getView()) {
// Relinquish locks. The aggregation command will re-acquire them.
ctx.reset();
diff --git a/src/mongo/db/commands/run_aggregate.cpp b/src/mongo/db/commands/run_aggregate.cpp
index acc2597af08..28909dd442b 100644
--- a/src/mongo/db/commands/run_aggregate.cpp
+++ b/src/mongo/db/commands/run_aggregate.cpp
@@ -41,8 +41,6 @@
#include "mongo/db/catalog/database_holder.h"
#include "mongo/db/change_stream_change_collection_manager.h"
#include "mongo/db/change_stream_pre_images_collection_manager.h"
-#include "mongo/db/commands/cqf/cqf_aggregate.h"
-#include "mongo/db/commands/cqf/cqf_command_utils.h"
#include "mongo/db/curop.h"
#include "mongo/db/cursor_manager.h"
#include "mongo/db/db_raii.h"
@@ -66,6 +64,8 @@
#include "mongo/db/pipeline/search_helper.h"
#include "mongo/db/query/collation/collator_factory_interface.h"
#include "mongo/db/query/collection_query_info.h"
+#include "mongo/db/query/cqf_command_utils.h"
+#include "mongo/db/query/cqf_get_executor.h"
#include "mongo/db/query/cursor_response.h"
#include "mongo/db/query/find_common.h"
#include "mongo/db/query/get_executor.h"
@@ -976,8 +976,12 @@ Status runAggregate(OperationContext* opCtx,
!request.getExchange().has_value());
auto timeBegin = Date_t::now();
- execs.emplace_back(getSBEExecutorViaCascadesOptimizer(
- opCtx, expCtx, nss, collections.getMainCollection(), request.getHint(), *pipeline));
+ execs.emplace_back(getSBEExecutorViaCascadesOptimizer(opCtx,
+ expCtx,
+ nss,
+ collections.getMainCollection(),
+ request.getHint(),
+ std::move(pipeline)));
auto elapsed =
(Date_t::now().toMillisSinceEpoch() - timeBegin.toMillisSinceEpoch()) / 1000.0;
OPTIMIZER_DEBUG_LOG(