SERVER-80756 Expose physical ABT in M2 explain

This commit is contained in:
Militsa Sotirova 2023-11-10 17:19:00 +00:00 committed by Evergreen Agent
parent 7239317bdb
commit 4cf2ec98fc
19 changed files with 579 additions and 47 deletions

View File

@ -197,7 +197,7 @@ async def benchmark(config: BenchmarkConfig, database: DatabaseInstance, task: B
def make_variant(explain: Sequence[dict[str, any]]) -> ExperimentResult:
"""Make one variant of the A/B test."""
pt = [physical_tree.build(e['queryPlanner']['winningPlan']['optimizerPlan']) for e in explain]
pt = [physical_tree.build(e['queryPlanner']['winningPlan']['queryPlan']) for e in explain]
et = [execution_tree.build_execution_tree(e['executionStats']) for e in explain]
mean = sum(et.total_execution_time for et in et) / len(et)
return ExperimentResult(explain=explain, physical_tree=pt, execution_tree=et, mean=mean)

View File

@ -115,7 +115,7 @@ async def load_calibration_data(database: DatabaseInstance, collection_name: str
df['sbe'] = df.explain.apply(lambda e: sbe.build_execution_tree(
json.loads(e)['executionStats']))
df['abt'] = df.explain.apply(lambda e: abt.build(
json.loads(e)['queryPlanner']['winningPlan']['optimizerPlan']))
json.loads(e)['queryPlanner']['winningPlan']['queryPlan']))
df['total_execution_time'] = df.sbe.apply(lambda t: t.total_execution_time)
return df

View File

@ -118,7 +118,7 @@ def parse_explain(explain: Mapping[str, any], abt_types: Sequence[str]):
try:
et = execution_tree.build_execution_tree(explain['executionStats'])
pt = physical_tree.build(explain['queryPlanner']['winningPlan']['optimizerPlan'])
pt = physical_tree.build(explain['queryPlanner']['winningPlan']['queryPlan'])
except Exception as exception:
print(f'*** Failed to parse explain with the followinf error: {exception}')
print(explain)

View File

@ -59,7 +59,7 @@ assert.commandWorked(coll.createIndex({'c': 1}));
// Return the results with the _id field excluded.
function run({pipeline, plan: expectedPlan}) {
const explain = coll.explain().aggregate(pipeline);
const plan = getPlanSkeleton(explain.queryPlanner.winningPlan.optimizerPlan, {
const plan = getPlanSkeleton(explain.queryPlanner.winningPlan.queryPlan, {
extraKeepKeys: ['indexDefName', 'interval'],
});
assert.eq(plan, expectedPlan, plan);

View File

@ -45,7 +45,7 @@ function assertMatchOnlyNaN(filter) {
};
const explain = coll.explain().find(filter).hint({a: 1}).finish();
const skeleton = getPlanSkeleton(explain.queryPlanner.winningPlan.optimizerPlan, {
const skeleton = getPlanSkeleton(explain.queryPlanner.winningPlan.queryPlan, {
extraKeepKeys: ['indexDefName', 'interval'],
});
assert.eq(skeleton, expectedExplain, {filter: filter, explain: explain, skeleton: skeleton});
@ -74,7 +74,7 @@ function assertEmptyCoScan(filter) {
};
const explain = coll.explain().find(filter).finish();
const skeleton = getPlanSkeleton(explain.queryPlanner.winningPlan.optimizerPlan);
const skeleton = getPlanSkeleton(explain.queryPlanner.winningPlan.queryPlan);
assert.eq(skeleton, expectedExplain, {filter: filter, explain: explain, skeleton: skeleton});
}
assertEmptyCoScan({a: {$lt: NaN}});

View File

@ -1,5 +1,4 @@
import {
assertValueOnNonOptimizerPlanPath,
assertValueOnPlanPath,
checkCascadesOptimizerEnabled,
checkPlanCacheParameterization
@ -47,7 +46,7 @@ function verifyCommandParameterization(
let res = coll.explain("executionStats").aggregate(pipeline);
assert.eq(nReturnedExpected, res.executionStats.nReturned);
if (indexed)
assertValueOnNonOptimizerPlanPath(stage, res, path);
assertValueOnPlanPath(stage, res, path);
else
assertValueOnPlanPath("PhysicalScan", res, "child.child.nodeType");
}

View File

@ -1,5 +1,4 @@
import {
assertValueOnNonOptimizerPlanPath,
assertValueOnPlanPath,
checkCascadesOptimizerEnabled,
checkPlanCacheParameterization
@ -47,7 +46,7 @@ function verifyCommandParameterization(
let res = coll.explain("executionStats").find(findCmd).finish();
assert.eq(nReturnedExpected, res.executionStats.nReturned);
if (indexed)
assertValueOnNonOptimizerPlanPath(stage, res, path);
assertValueOnPlanPath(stage, res, path);
else
assertValueOnPlanPath("PhysicalScan", res, "child.child.nodeType");
}

View File

@ -20,8 +20,8 @@ for (let i = 0; i < nDocs; i++) {
assert.commandWorked(bulk.execute());
const res = coll.explain().aggregate([{$match: {'a': {$lt: 2}}}]);
assert(res.queryPlanner.winningPlan.optimizerPlan.hasOwnProperty("properties"));
const props = res.queryPlanner.winningPlan.optimizerPlan.properties;
assert(res.queryPlanner.winningPlan.queryPlan.hasOwnProperty("properties"));
const props = res.queryPlanner.winningPlan.queryPlan.properties;
// Verify the winning plan cardinality is within roughly 25% of the expected documents.
assert.lt(nDocs * 0.2 * 0.75, props.adjustedCE);

View File

@ -258,6 +258,26 @@ export function getExecutionStages(root) {
return [root.executionStats.executionStages];
}
/**
* Returns the winningPlan.queryPlan of each shard in the explain in a list.
*/
export function getShardQueryPlans(root) {
let result = [];
if (root.hasOwnProperty("shards")) {
for (let shardName of Object.keys(root.shards)) {
let shard = root.shards[shardName];
result.push(shard.queryPlanner.winningPlan.queryPlan);
}
} else {
for (let shard of root.queryPlanner.winningPlan.shards) {
result.push(shard.winningPlan.queryPlan);
}
}
return result;
}
/**
* Given the root stage of agg explain's JSON representation of a query plan ('root'), returns all
* subdocuments whose stage is 'stage'. This can either be an agg stage name like "$cursor" or

View File

@ -40,8 +40,7 @@ export function usedBonsaiOptimizer(explain) {
if (explain.hasOwnProperty("shards")) {
// This section handles the explain output for aggregations against sharded colls.
for (let shardName of Object.keys(explain.shards)) {
if (!explain.shards[shardName].queryPlanner.winningPlan.hasOwnProperty(
"optimizerPlan")) {
if (explain.shards[shardName].queryPlanner.queryFramework !== "cqf") {
return false;
}
}
@ -50,7 +49,7 @@ export function usedBonsaiOptimizer(explain) {
explain.queryPlanner.winningPlan.hasOwnProperty("shards")) {
// This section handles the explain output for find queries against sharded colls.
for (let shardExplain of explain.queryPlanner.winningPlan.shards) {
if (!shardExplain.winningPlan.hasOwnProperty("optimizerPlan")) {
if (shardExplain.queryFramework !== "cqf") {
return false;
}
}
@ -58,17 +57,7 @@ export function usedBonsaiOptimizer(explain) {
}
// This section handles the explain output for unsharded queries.
if (!isAggregationPlan(explain)) {
return explain.queryPlanner.winningPlan.hasOwnProperty("optimizerPlan");
}
const plannerOutput = getAggPlanStage(explain, "$cursor");
if (plannerOutput != null) {
return plannerOutput["$cursor"].queryPlanner.winningPlan.hasOwnProperty("optimizerPlan");
} else {
return explain.queryPlanner.winningPlan.hasOwnProperty("optimizerPlan");
}
return explain.hasOwnProperty("queryPlanner") && explain.queryPlanner.queryFramework === "cqf";
}
/**
@ -83,10 +72,12 @@ export function leftmostLeafStage(node) {
node = node.queryPlanner;
} else if (node.winningPlan) {
node = node.winningPlan;
} else if (node.optimizerPlan) {
node = node.optimizerPlan;
} else if (node.queryPlan) {
node = node.queryPlan;
} else if (node.child) {
node = node.child;
} else if (node.inputStage) {
node = node.inputStage;
} else if (node.leftChild) {
node = node.leftChild;
} else if (node.children) {
@ -119,7 +110,7 @@ export function getPlanSkeleton(node, options = {}) {
'queryPlanner',
'winningPlan',
'optimizerPlan',
'queryPlan',
'child',
'children',
'leftChild',
@ -328,7 +319,7 @@ export function prettyOp(op) {
*/
export function removeUUIDsFromExplain(db, explain) {
const listCollsRes = db.runCommand({listCollections: 1}).cursor.firstBatch;
let plan = explain.queryPlanner.winningPlan.optimizerPlan.plan.toString();
let plan = explain.queryPlanner.winningPlan.queryPlan.plan.toString();
for (let entry of listCollsRes) {
const uuidStr = entry.info.uuid.toString().slice(6).slice(0, -2);
@ -357,15 +348,11 @@ export function navigateToPath(doc, path) {
}
export function navigateToPlanPath(doc, path) {
return navigateToPath(doc, "queryPlanner.winningPlan.optimizerPlan." + path);
}
function navigateToNonOptimizerPlanPath(doc, path) {
return navigateToPath(doc, "queryPlanner.winningPlan.queryPlan." + path);
}
export function navigateToRootNode(doc) {
return navigateToPath(doc, "queryPlanner.winningPlan.optimizerPlan");
return navigateToPath(doc, "queryPlanner.winningPlan.queryPlan");
}
export function assertValueOnPathFn(value, doc, path, fn) {
@ -386,10 +373,6 @@ export function assertValueOnPlanPath(value, doc, path) {
assertValueOnPathFn(value, doc, path, navigateToPlanPath);
}
export function assertValueOnNonOptimizerPlanPath(value, doc, path) {
assertValueOnPathFn(value, doc, path, navigateToNonOptimizerPlanPath);
}
export function runWithParams(keyValPairs, fn) {
let prevVals = [];

View File

@ -0,0 +1,351 @@
/**
* Tests the queryPlanner explain output for CQF.
*/
import {
getAllPlanStages,
getShardQueryPlans,
getWinningPlanFromExplain
} from "jstests/libs/analyze_plan.js"
import {DiscoverTopology} from "jstests/libs/discover_topology.js";
import {
leftmostLeafStage,
usedBonsaiOptimizer,
} from "jstests/libs/optimizer_utils.js";
import {setParameterOnAllHosts} from "jstests/noPassthrough/libs/server_parameter_helpers.js";
let dbName = "test";
let kForwardDir = "forward";
let kBackwardDir = "backward";
let kTryBonsai = "tryBonsai";
// Asserts on the direction of the physical scan in the explain output.
function checkScanDirection(explain, expectedDir) {
let scan = leftmostLeafStage(explain);
// Sanity check that the leaf is a physical scan.
assert.eq(scan.stage, "COLLSCAN");
// Assert on the direction of the scan.
assert.eq(scan.direction, expectedDir);
}
function setFrameworkControl(db, value) {
setParameterOnAllHosts(
DiscoverTopology.findNonConfigNodes(db.getMongo()), "internalQueryFrameworkControl", value);
}
function analyzeInputStage(inputStage) {
switch (inputStage.stage) {
case "ROOT":
throw "A ROOT stage should not be the input to a stage.";
case "EVALUATION":
analyzeEvaluationStage(inputStage);
break;
case "FILTER":
analyzeFilterStage(inputStage);
break;
case "COLLSCAN":
analyzeCollScanStage(inputStage);
break;
default:
throw "Unrecognized input stage: " + inputStage.stage;
}
}
function analyzeRootStage(rootStage) {
assert(rootStage.hasOwnProperty("stage"));
if (rootStage.stage == "EOF") {
assert(rootStage.hasOwnProperty("planNodeId"));
assert(!rootStage.hasOwnProperty("inputStage"));
return;
}
assert(rootStage.stage == "ROOT");
assert(rootStage.hasOwnProperty("projections"));
assert(rootStage.hasOwnProperty("cardinalityEstimate"));
assert(rootStage.hasOwnProperty("costEstimate"));
assert(rootStage.hasOwnProperty("inputStage"));
analyzeInputStage(rootStage.inputStage);
}
function analyzeEvaluationStage(evaluationStage) {
assert(evaluationStage.hasOwnProperty("stage") && evaluationStage.stage == "EVALUATION");
assert(evaluationStage.hasOwnProperty("planNodeId"));
assert(evaluationStage.hasOwnProperty("projections"));
assert(evaluationStage.hasOwnProperty("cardinalityEstimate"));
assert(evaluationStage.hasOwnProperty("inputStage"));
analyzeInputStage(evaluationStage.inputStage);
}
function analyzeFilterStage(filterStage) {
assert(filterStage.hasOwnProperty("stage") && filterStage.stage == "FILTER");
assert(filterStage.hasOwnProperty("planNodeId"));
assert(filterStage.hasOwnProperty("filter"));
assert(filterStage.hasOwnProperty("cardinalityEstimate"));
assert(filterStage.hasOwnProperty("inputStage"));
analyzeInputStage(filterStage.inputStage);
}
function analyzeCollScanStage(collScanStage) {
assert(collScanStage.hasOwnProperty("stage") && collScanStage.stage == "COLLSCAN");
assert(collScanStage.hasOwnProperty("planNodeId"));
assert(collScanStage.hasOwnProperty("direction"));
assert(collScanStage.hasOwnProperty("projections"));
assert(collScanStage.hasOwnProperty("cardinalityEstimate"));
assert(!collScanStage.hasOwnProperty("inputStage"));
}
function getAllPlanStagesList(stagesExplain) {
let stages = getAllPlanStages(stagesExplain);
let result = [];
for (let stageObj of stages) {
result.push(stageObj.stage);
}
return result;
}
/**
* A helper function to perform assertions over an arbitrary amount of explain paths, all of which
* should have the same stages present.
*
* @param {array} explainPathList - a list of explain paths over which we will perform some
* assertions
* @param {array} expectedStages - the list of stages that all given explain paths should contain
*/
function analyzeExplainHelper(explainPathList, expectedStages, expectedDir) {
// For each of the explan paths given, find the list of stages present in the explain, assert
// that list is the same as expectedStages, and analyze the root stage of the explain.
for (let explain of explainPathList) {
let planStages = getAllPlanStagesList(explain);
assert.eq(planStages.length, expectedStages.length);
for (let expectedStage of expectedStages) {
assert(planStages.includes(expectedStage));
}
analyzeRootStage(explain);
// We can only assert on the scan direction if a COLLSCAN stage is present in the explain.
// This is not the case for an EOF plan.
if (!expectedStages.includes("EOF")) {
// TOOD SERVER-82876: Change this assertion to use expectedDir.
checkScanDirection(explain, "<todo>");
}
}
}
function analyzeExplain(
explain, expectedStandaloneStages, expectedShardedStages, expectedDir, isSharded) {
// Sanity check that we used Bonsai.
assert(usedBonsaiOptimizer(explain), tojson(explain));
if (isSharded) {
let allShards = getShardQueryPlans(explain);
// If we expect an EOF stage in a sharded environment, there will only be one shard in the
// explain output. Otherwise there will be two.
assert.eq(expectedShardedStages.includes("EOF") ? 1 : 2, allShards.length);
analyzeExplainHelper(allShards, expectedShardedStages, expectedDir);
} else {
let stagesPath = getWinningPlanFromExplain(explain);
analyzeExplainHelper([stagesPath], expectedStandaloneStages, expectedDir);
}
}
function analyzeFindExplain(coll,
expectedStandaloneStages,
expectedShardedStages,
expectedDir,
isSharded,
query,
projection = {},
hint = {}) {
let explain = coll.find(query, projection).hint(hint).explain();
analyzeExplain(
explain, expectedStandaloneStages, expectedShardedStages, expectedDir, isSharded);
}
function analyzeAggExplain(coll,
expectedStandaloneStages,
expectedShardedStages,
expectedDir,
isSharded,
pipeline,
hint = {}) {
let cmd = {aggregate: coll.getName(), pipeline: pipeline, explain: true};
if (Object.keys(hint).length > 0) {
cmd.hint = hint;
}
let explain = assert.commandWorked(coll.runCommand(cmd));
analyzeExplain(
explain, expectedStandaloneStages, expectedShardedStages, expectedDir, isSharded);
}
function runTest(db, coll, isSharded) {
setFrameworkControl(db, kTryBonsai);
const emptyColl = db.empty_coll;
emptyColl.drop();
// Empty collection should have EOF as explain, find.
analyzeFindExplain(emptyColl,
["EOF"] /* expectedStandaloneStages */,
["EOF"] /* expectedShardedStages */,
null /* expectedDir, shouldn't be checked for this case */,
isSharded,
{} /* query */,
{} /* projection */,
{} /* hint */);
// Empty collection should have EOF as explain, agg.
analyzeAggExplain(emptyColl,
["EOF"] /* expectedStandaloneStages */,
["EOF"] /* expectedShardedStages */,
null /* expectedDir, shouldn't be checked for this case */,
isSharded,
[] /* pipeline */,
{} /* hint */);
const contradictionColl = db.contradictionColl;
contradictionColl.drop();
// The queries below against this collection will hint a collection scan, so they will go
// through Bonsai. The index metadata information will tell us that a is non-multikey so the
// query is a contradiction and will therefore result in an EOF plan.
contradictionColl.insert({a: 10});
contradictionColl.createIndex({a: 1});
// Contradiction plan results in EOF plan, find.
analyzeFindExplain(contradictionColl,
["EOF"] /* expectedStandaloneStages */,
["EOF"] /* expectedShardedStages */,
null /* expectedDir */,
isSharded,
{$and: [{a: 2}, {a: 3}]} /* query */,
{} /* projection */,
{$natural: 1} /* hint */);
// Contradiction plan results in EOF plan, agg.
analyzeAggExplain(contradictionColl,
["EOF"] /* expectedStandaloneStages */,
["EOF"] /* expectedShardedStages */,
null /* expectedDir */,
isSharded,
[{$match: {$and: [{a: 2}, {a: 3}]}}] /* pipeline */,
{$natural: 1} /* hint */);
// Hinted forward scan, find.
analyzeFindExplain(coll,
["ROOT", "COLLSCAN"] /* expectedStandaloneStages */,
["ROOT", "FILTER", "COLLSCAN"] /* expectedShardedStages */,
kForwardDir /* expectedDir */,
isSharded,
{} /* query */,
{} /* projection */,
{$natural: 1} /* hint */);
// Hinted foward scan, agg.
analyzeAggExplain(coll,
["ROOT", "COLLSCAN"] /* expectedStandaloneStages */,
["ROOT", "FILTER", "COLLSCAN"] /* expectedShardedStages */,
kForwardDir /* expectedDir */,
isSharded,
[] /* pipeline */,
{$natural: 1} /* hint */);
// Hinted backward scan, find.
analyzeFindExplain(coll,
["ROOT", "COLLSCAN"] /* expectedStandaloneStages */,
["ROOT", "FILTER", "COLLSCAN"] /* expectedShardedStages */,
kBackwardDir /* expectedDir */,
isSharded,
{} /* query */,
{} /* projection */,
{$natural: -1} /* hint */);
// Hinted backward scan, agg.
analyzeAggExplain(coll,
["ROOT", "COLLSCAN"] /* expectedStandaloneStages */,
["ROOT", "FILTER", "COLLSCAN"] /* expectedShardedStages */,
kBackwardDir /* expectedDir */,
isSharded,
[] /* pipeline */,
{$natural: -1} /* hint */);
// Query that should have more interesting stages in the explain output, find.
analyzeFindExplain(
coll,
["ROOT", "EVALUATION", "FILTER", "COLLSCAN"] /* expectedStandaloneStages */,
["ROOT", "EVALUATION", "FILTER", "FILTER", "COLLSCAN"] /* expectedShardedStages */,
kForwardDir /* expectedDir */,
isSharded,
{a: {$lt: 5}} /* query */,
{a: 1} /* projection */,
{} /* hint */);
// Query that should have more interesting stages in the explain output, agg.
analyzeAggExplain(
coll,
["ROOT", "EVALUATION", "FILTER", "COLLSCAN"] /* expectedStandaloneStages */,
["ROOT", "EVALUATION", "FILTER", "FILTER", "COLLSCAN"] /* expectedShardedStages */,
kForwardDir /* expectedDir */,
isSharded,
[{$match: {a: {$lt: 5}}}, {$project: {'a': 1}}] /* pipeline */,
{} /* hint */);
}
function setup(conn, db, isSharded) {
const coll = db.explain_cqf;
coll.drop();
let docs = [];
for (let i = 0; i < 100; i++) {
docs.push({_id: i, a: i});
}
coll.insertMany(docs);
if (isSharded) {
conn.shardColl(coll.getName(), {_id: 1}, {_id: 50}, {_id: 51});
}
return coll;
}
// Standalone
let conn = MongoRunner.runMongod({
setParameter: {
featureFlagCommonQueryFramework: true,
"failpoint.enableExplainInBonsai": tojson({mode: "alwaysOn"}),
}
});
assert.neq(null, conn, "mongod was unable to start up");
const db = conn.getDB(dbName);
const coll = setup(conn, db, false);
runTest(db, coll, false /* isSharded */);
MongoRunner.stopMongod(conn);
// Sharded
let shardingConn = new ShardingTest({
shards: 2,
mongos: 1,
other: {
shardOptions: {
setParameter: {
"failpoint.enableExplainInBonsai": tojson({mode: "alwaysOn"}),
featureFlagCommonQueryFramework: true,
}
},
mongosOptions: {
setParameter: {
featureFlagCommonQueryFramework: true,
}
},
}
});
const shardedDb = shardingConn.getDB(dbName);
const shardedColl = setup(shardingConn, shardedDb, true);
runTest(shardedDb, shardedColl, true /* isSharded */);
shardingConn.stop();

View File

@ -13,7 +13,7 @@ Plan skeleton:
{
"queryPlanner" : {
"winningPlan" : {
"optimizerPlan" : {
"queryPlan" : {
"nodeType" : "Root",
"child" : {
"nodeType" : "Evaluation",
@ -47,7 +47,7 @@ Plan skeleton:
{
"queryPlanner" : {
"winningPlan" : {
"optimizerPlan" : {
"queryPlan" : {
"nodeType" : "Root",
"child" : {
"nodeType" : "Evaluation",

View File

@ -10,7 +10,7 @@ Plan skeleton:
{
"queryPlanner" : {
"winningPlan" : {
"optimizerPlan" : {
"queryPlan" : {
"nodeType" : "Root",
"child" : {
"nodeType" : "Evaluation",
@ -43,7 +43,7 @@ Plan skeleton:
{
"queryPlanner" : {
"winningPlan" : {
"optimizerPlan" : {
"queryPlan" : {
"nodeType" : "Root",
"child" : {
"nodeType" : "Evaluation",

View File

@ -9,7 +9,7 @@ Plan skeleton:
{
"queryPlanner" : {
"winningPlan" : {
"optimizerPlan" : {
"queryPlan" : {
"nodeType" : "Root",
"child" : {
"nodeType" : "Filter",
@ -32,7 +32,7 @@ Plan skeleton:
{
"queryPlanner" : {
"winningPlan" : {
"optimizerPlan" : {
"queryPlan" : {
"nodeType" : "Root",
"child" : {
"nodeType" : "NestedLoopJoin",

View File

@ -106,6 +106,7 @@
#include "mongo/db/query/optimizer/utils/utils.h"
#include "mongo/db/query/plan_executor_factory.h"
#include "mongo/db/query/plan_yield_policy.h"
#include "mongo/db/query/query_decorations.h"
#include "mongo/db/query/query_knobs_gen.h"
#include "mongo/db/query/query_planner_params.h"
#include "mongo/db/query/query_request_helper.h"
@ -454,6 +455,11 @@ static ExecParams createExecutor(
// plan instead.
PlanAndProps toExplain = std::move(planAndProps);
// TODO SERVER-82709: Instead of using the framework control here, use the query eligibility
// information.
auto frameworkControl =
QueryKnobConfiguration::decoration(opCtx).getInternalQueryFrameworkControlForOp();
ExplainVersion explainVersion = ExplainVersion::Vmax;
const auto& explainVersionStr = internalCascadesOptimizerExplainVersion.get();
if (explainVersionStr == "v1"_sd) {
@ -465,12 +471,16 @@ static ExecParams createExecutor(
} else if (explainVersionStr == "v2compact"_sd) {
explainVersion = ExplainVersion::V2Compact;
toExplain = *phaseManager.getPostMemoPlan();
} else if (explainVersionStr == "bson"_sd &&
frameworkControl == QueryFrameworkControlEnum::kTryBonsai) {
explainVersion = ExplainVersion::UserFacingExplain;
} else if (explainVersionStr == "bson"_sd) {
explainVersion = ExplainVersion::V3;
} else {
// Should have been validated.
MONGO_UNREACHABLE;
}
abtPrinter = std::make_unique<ABTPrinter>(
phaseManager.getMetadata(), std::move(toExplain), explainVersion);

View File

@ -163,6 +163,12 @@ void generatePlannerInfo(PlanExecutor* exec,
plannerBob.append("maxIndexedOrSolutionsReached", enumeratorInfo.hitIndexedOrLimit);
plannerBob.append("maxIndexedAndSolutionsReached", enumeratorInfo.hitIndexedAndLimit);
plannerBob.append("maxScansToExplodeReached", enumeratorInfo.hitScanLimit);
auto framework = exec->getQueryFramework();
if (framework == PlanExecutor::QueryFramework::kCQF) {
plannerBob.append("queryFramework", "cqf");
}
auto&& [winningStats, _] =
explainer.getWinningPlanStats(ExplainOptions::Verbosity::kQueryPlanner);
plannerBob.append("winningPlan", winningStats);

View File

@ -103,6 +103,10 @@ BSONObj ABTPrinter::explainBSON() const {
nullptr /*memoInterface*/,
_planAndProps._map);
case ExplainVersion::UserFacingExplain: {
UserFacingExplain ex(_planAndProps._map);
return ex.explain(_planAndProps._node);
}
case ExplainVersion::Vmax:
// Should not be seeing this value here.
break;

View File

@ -46,7 +46,7 @@
namespace mongo::optimizer {
enum class ExplainVersion { V1, V2, V2Compact, V3, Vmax };
enum class ExplainVersion { V1, V2, V2Compact, V3, UserFacingExplain, Vmax };
/**
* This structure holds any data that is required by the explain. It is self-sufficient and separate
@ -66,6 +66,166 @@ private:
ExplainVersion _explainVersion;
};
class UserFacingExplain {
public:
UserFacingExplain(const NodeToGroupPropsMap& nodeMap = {}) : _nodeMap(nodeMap) {}
// Constants relevant to all stages.
constexpr static StringData kStage = "stage"_sd;
constexpr static StringData kNodeId = "planNodeId"_sd;
constexpr static StringData kProj = "projections"_sd;
constexpr static StringData kCE = "cardinalityEstimate"_sd;
constexpr static StringData kInput = "inputStage"_sd;
// Specific to PhysicalScanNode.
constexpr static StringData kScanName = "COLLSCAN"_sd;
constexpr static StringData kDir = "direction"_sd;
constexpr static StringData kForward = "forward"_sd;
constexpr static StringData kBackward = "backward"_sd;
// Specific to FilterNode.
constexpr static StringData kFilterName = "FILTER"_sd;
constexpr static StringData kFilter = "filter"_sd;
// Specific to EvaluationNode.
constexpr static StringData kEvalName = "EVALUATION"_sd;
// Specific to RootNode.
constexpr static StringData kRootName = "ROOT"_sd;
constexpr static StringData kCost = "costEstimate"_sd;
// Specific to EOF.
constexpr static StringData kEOF = "EOF"_sd;
// The default noop case.
template <typename T, typename... Ts>
void walk(const T&, BSONObjBuilder* bob, Ts&&...) {
// If we get here, that means we are trying to generate explain for an unsupported node. We
// should never generate an unsupported node to explain to begin with.
tasserted(8075606, "Trying to generate explain for an unsupported node.");
}
void walk(const RootNode& node, BSONObjBuilder* bob, const ABT& child, const ABT& /* refs */) {
auto it = _nodeMap.find(&node);
tassert(8075600, "Failed to find node properties", it != _nodeMap.end());
const NodeProps& props = it->second;
bob->append(kStage, kRootName);
bob->append(kProj, "<todo>");
bob->append(kCE, "<todo>");
bob->append(kCost, props._cost.getCost());
BSONObjBuilder inputBob(bob->subobjStart(kInput));
generateExplain(child, &inputBob);
}
void walk(const FilterNode& node,
BSONObjBuilder* bob,
const ABT& child,
const ABT& /* expr */) {
auto it = _nodeMap.find(&node);
tassert(8075601, "Failed to find node properties", it != _nodeMap.end());
const NodeProps& props = it->second;
bob->append(kStage, kFilterName);
bob->append(kNodeId, props._planNodeId);
bob->append(kFilter, "<todo>");
bob->append(kCE, "<todo>");
BSONObjBuilder inputBob(bob->subobjStart(kInput));
generateExplain(child, &inputBob);
}
void walk(const EvaluationNode& node,
BSONObjBuilder* bob,
const ABT& child,
const ABT& /* expr */) {
auto it = _nodeMap.find(&node);
tassert(8075602, "Failed to find node properties", it != _nodeMap.end());
const NodeProps& props = it->second;
bob->append(kStage, kEvalName);
bob->append(kNodeId, props._planNodeId);
bob->append(kProj, "<todo>");
bob->append(kCE, "<todo>");
BSONObjBuilder inputBob(bob->subobjStart(kInput));
generateExplain(child, &inputBob);
}
void walk(const PhysicalScanNode& node, BSONObjBuilder* bob, const ABT& /* bind */) {
auto it = _nodeMap.find(&node);
tassert(8075603, "Failed to find node properties", it != _nodeMap.end());
const NodeProps& props = it->second;
bob->append(kStage, kScanName);
bob->append(kNodeId, props._planNodeId);
// TOOD SERVER-82876: Populate the scan direction here accordingly.
bob->append(kDir, "<todo>");
bob->append(kProj, "<todo>");
bob->append(kCE, "<todo>");
}
void generateExplain(const ABT::reference_type n, BSONObjBuilder* bob) {
algebra::walk<false>(n, *this, bob);
}
BSONObj generateEOFPlan(const ABT::reference_type node) {
BSONObjBuilder bob;
auto it = _nodeMap.find(node.cast<Node>());
tassert(8075605, "Failed to find node properties", it != _nodeMap.end());
const NodeProps& props = it->second;
bob.append(kStage, kEOF);
bob.append(kNodeId, props._planNodeId);
return bob.obj();
}
bool isEOFPlan(const ABT::reference_type node) {
// This function expects the full ABT to be the argument. So we must have a RootNode.
auto root = node.cast<RootNode>();
if (!root->getChild().is<EvaluationNode>()) {
// An EOF plan will have an EvaluationNode as the child of the RootNode.
return false;
}
auto eval = root->getChild().cast<EvaluationNode>();
if (eval->getProjection() != Constant::nothing()) {
// The EvaluationNode of an EOF plan will have Nothing as the projection.
return false;
}
// This is the rest of an EOF plan.
ABT eofChild =
make<LimitSkipNode>(properties::LimitSkipRequirement{0, 0}, make<CoScanNode>());
return eval->getChild() == eofChild;
}
BSONObj explain(const ABT::reference_type node) {
// Short circuit to return EOF stage if the collection is empty.
if (isEOFPlan(node)) {
return generateEOFPlan(node);
}
BSONObjBuilder bob;
generateExplain(node, &bob);
BSONObj result = bob.obj();
// If at this point (after the walk) the explain BSON is empty, that means the ABT had no
// nodes (if it had any unsupported nodes, we would have hit the MONGO_UNREACHABLE in the
// default case above).
tassert(8075604, "The ABT has no nodes.", !result.isEmpty());
return result;
}
private:
const NodeToGroupPropsMap& _nodeMap;
};
class ExplainGenerator {
public:
// Optionally display logical and physical properties using the memo.

View File

@ -489,7 +489,7 @@ PlanExplainer::PlanStatsDetails buildPlanStatsDetails(
invariant(execPlanDebugInfo);
BSONObjBuilder plan;
if (optimizerExplain) {
plan.append("optimizerPlan", *optimizerExplain);
plan.append("queryPlan", *optimizerExplain);
} else {
plan.append("queryPlan", bob.obj());
}