mirror of https://github.com/mongodb/mongo
SERVER-103381 Add PBT for timeseries collections for cache usage and basic aggregation stages behavior (#45017)
GitOrigin-RevId: ea262d142a7e12bd305d73224380c12366db77e2
This commit is contained in:
parent
07641ddd66
commit
70ef71e55c
|
|
@ -20,6 +20,7 @@ selector:
|
|||
- jstests/core/query/partial_index_logical.js
|
||||
- jstests/core/index/index_filter_commands.js
|
||||
- jstests/core/index/index_filter_commands_invalidate_plan_cache_entries.js
|
||||
- jstests/core/timeseries/pbt/timeseries_cache_usage_pbt.js
|
||||
|
||||
# Asserting on specific plan, CBR pick different one
|
||||
- jstests/core/query/agg_hint.js
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ selector:
|
|||
- jstests/core/query/partial_index_logical.js
|
||||
- jstests/core/index/index_filter_commands.js
|
||||
- jstests/core/index/index_filter_commands_invalidate_plan_cache_entries.js
|
||||
- jstests/core/timeseries/pbt/timeseries_cache_usage_pbt.js
|
||||
|
||||
# CBR picks a different plan which results in different results due to SERVER-23229
|
||||
- jstests/core/query/project/projection_dotted_paths.js
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@
|
|||
*
|
||||
* @tags: [
|
||||
* query_intensive_pbt,
|
||||
* requires_timeseries,
|
||||
* assumes_no_implicit_collection_creation_on_get_collection,
|
||||
* # Runs queries that may return many results, requiring getmores.
|
||||
* requires_getmore,
|
||||
|
|
@ -32,9 +31,16 @@ import {makeWorkloadModel} from "jstests/libs/property_test_helpers/models/workl
|
|||
import {testProperty} from "jstests/libs/property_test_helpers/property_testing_utils.js";
|
||||
import {isSlowBuild} from "jstests/libs/query/aggregation_pipeline_utils.js";
|
||||
import {fc} from "jstests/third_party/fast_check/fc-3.1.0.js";
|
||||
import {
|
||||
checkExclusionProjectionFieldResults,
|
||||
checkInclusionProjectionResults,
|
||||
checkLimitResults,
|
||||
checkSortResults,
|
||||
makeBehavioralPropertyFn,
|
||||
} from "jstests/libs/property_test_helpers/common_properties.js";
|
||||
|
||||
if (isSlowBuild(db)) {
|
||||
jsTestLog("Returning early because debug is on, opt is off, or a sanitizer is enabled.");
|
||||
jsTest.log.info("Returning early because debug is on, opt is off, or a sanitizer is enabled.");
|
||||
quit();
|
||||
}
|
||||
|
||||
|
|
@ -49,53 +55,16 @@ const numRuns = 20;
|
|||
* since we can still check if the field exists in the document or not (we don't need to inspect the
|
||||
* value).
|
||||
*/
|
||||
function checkExclusionProjectionResults(query, results) {
|
||||
const projectSpec = query.pipeline.at(-1)["$project"];
|
||||
const excludedField = Object.keys(projectSpec).filter((field) => field !== "_id")[0];
|
||||
const isIdFieldIncluded = projectSpec._id;
|
||||
|
||||
for (const doc of results) {
|
||||
const docFields = Object.keys(doc);
|
||||
// If the excluded field still exists, fail.
|
||||
if (docFields.includes(excludedField)) {
|
||||
return false;
|
||||
}
|
||||
// If _id is excluded and it exists, fail.
|
||||
if (!isIdFieldIncluded && docFields.includes("_id")) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
const exclusionProjectionTest = {
|
||||
// The stage we're testing.
|
||||
stageArb: getSingleFieldProjectArb(false /*isInclusion*/, {simpleFieldsOnly: true}), // Only allow simple paths, no dotted paths.
|
||||
// A function that tests the results are as expected.
|
||||
checkResultsFn: checkExclusionProjectionResults,
|
||||
checkResultsFn: checkExclusionProjectionFieldResults,
|
||||
// A message to output on failure.
|
||||
failMsg: "Exclusion projection did not remove the specified fields.",
|
||||
};
|
||||
|
||||
// --- Inclusion projection testing ---
|
||||
function checkInclusionProjectionResults(query, results) {
|
||||
const projectSpec = query.pipeline.at(-1)["$project"];
|
||||
const includedField = Object.keys(projectSpec).filter((field) => field !== "_id")[0];
|
||||
const isIdFieldExcluded = !projectSpec._id;
|
||||
|
||||
for (const doc of results) {
|
||||
for (const field of Object.keys(doc)) {
|
||||
// If the _id field is excluded and it exists, fail.
|
||||
if (field === "_id" && isIdFieldExcluded) {
|
||||
return false;
|
||||
}
|
||||
// If we have a field on the doc that is not the included field, fail.
|
||||
if (field !== "_id" && field !== includedField) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
const inclusionProjectionTest = {
|
||||
stageArb: getSingleFieldProjectArb(true /*isInclusion*/, {simpleFieldsOnly: true}),
|
||||
checkResultsFn: checkInclusionProjectionResults,
|
||||
|
|
@ -103,12 +72,6 @@ const inclusionProjectionTest = {
|
|||
};
|
||||
|
||||
// --- $limit testing ---
|
||||
function checkLimitResults(query, results) {
|
||||
const limitStage = query.pipeline.at(-1);
|
||||
const limitVal = limitStage["$limit"];
|
||||
|
||||
return results.length <= limitVal;
|
||||
}
|
||||
const limitTest = {
|
||||
stageArb: limitArb,
|
||||
checkResultsFn: checkLimitResults,
|
||||
|
|
@ -116,43 +79,6 @@ const limitTest = {
|
|||
};
|
||||
|
||||
// --- $sort testing ---
|
||||
function checkSortResults(query, results) {
|
||||
const sortSpec = query.pipeline.at(-1)["$sort"];
|
||||
const sortField = Object.keys(sortSpec)[0];
|
||||
const sortDirection = sortSpec[sortField];
|
||||
|
||||
function orderCorrect(doc1, doc2) {
|
||||
const doc1SortVal = doc1[sortField];
|
||||
const doc2SortVal = doc2[sortField];
|
||||
|
||||
// bsonWoCompare does not match the $sort semantics for arrays. It is nontrivial to write a
|
||||
// comparison function that matches these semantics, so we will ignore arrays.
|
||||
// TODO SERVER-101149 improve sort checking logic to possibly handle arrays and missing
|
||||
// values.
|
||||
if (Array.isArray(doc1SortVal) || Array.isArray(doc2SortVal)) {
|
||||
return true;
|
||||
}
|
||||
if (typeof doc1SortVal === "undefined" || typeof doc2SortVal === "undefined") {
|
||||
return true;
|
||||
}
|
||||
|
||||
const cmp = bsonWoCompare(doc1SortVal, doc2SortVal);
|
||||
if (sortDirection === 1) {
|
||||
return cmp <= 0;
|
||||
} else {
|
||||
return cmp >= 0;
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < results.length - 1; i++) {
|
||||
const doc1 = results[i];
|
||||
const doc2 = results[i + 1];
|
||||
if (!orderCorrect(doc1, doc2)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
const sortTest = {
|
||||
stageArb: getSortArb(),
|
||||
checkResultsFn: checkSortResults,
|
||||
|
|
@ -178,32 +104,10 @@ const groupTest = {
|
|||
};
|
||||
|
||||
const testCases = [exclusionProjectionTest, inclusionProjectionTest, limitTest, sortTest, groupTest];
|
||||
|
||||
const experimentColl = db.agg_behavior_correctness_experiment;
|
||||
|
||||
function makePropertyFn(checkResultsFn, failMsg) {
|
||||
return function (getQuery, testHelpers) {
|
||||
for (let queryIx = 0; queryIx < testHelpers.numQueryShapes; queryIx++) {
|
||||
const query = getQuery(queryIx, 0 /* paramIx */);
|
||||
const results = experimentColl.aggregate(query.pipeline, query.options).toArray();
|
||||
|
||||
const passed = checkResultsFn(query, results);
|
||||
if (!passed) {
|
||||
return {
|
||||
passed: false,
|
||||
msg: failMsg,
|
||||
query,
|
||||
results,
|
||||
explain: experimentColl.explain().aggregate(query.pipeline, query.options),
|
||||
};
|
||||
}
|
||||
}
|
||||
return {passed: true};
|
||||
};
|
||||
}
|
||||
|
||||
for (const {stageArb, checkResultsFn, failMsg} of testCases) {
|
||||
const propFn = makePropertyFn(checkResultsFn, failMsg);
|
||||
const propFn = makeBehavioralPropertyFn(experimentColl, checkResultsFn, failMsg);
|
||||
|
||||
// Create an agg model that ends with the stage we're testing. The bag does not have to be
|
||||
// deterministic because these properties should always hold.
|
||||
|
|
@ -222,13 +126,4 @@ for (const {stageArb, checkResultsFn, failMsg} of testCases) {
|
|||
makeWorkloadModel({collModel: getCollectionModel(), aggModel, numQueriesPerRun: 20}),
|
||||
numRuns,
|
||||
);
|
||||
|
||||
// TODO SERVER-103381 re-enable timeseries PBT testing.
|
||||
// Run the property with a TS collection.
|
||||
// testProperty(propFn,
|
||||
// {experimentColl},
|
||||
// makeWorkloadModel(
|
||||
// {collModel: getCollectionModel({isTS: true}), aggModel, numQueriesPerRun:
|
||||
// 20}),
|
||||
// numRuns);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@
|
|||
* query_intensive_pbt,
|
||||
* # This test runs commands that are not allowed with security token: setParameter.
|
||||
* not_allowed_with_signed_security_token,
|
||||
* requires_timeseries,
|
||||
* assumes_no_implicit_collection_creation_on_get_collection,
|
||||
* # Incompatible with setParameter
|
||||
* does_not_support_stepdowns,
|
||||
|
|
@ -24,7 +23,7 @@ import {testProperty} from "jstests/libs/property_test_helpers/property_testing_
|
|||
import {isSlowBuild} from "jstests/libs/query/aggregation_pipeline_utils.js";
|
||||
|
||||
if (isSlowBuild(db)) {
|
||||
jsTestLog("Returning early because debug is on, opt is off, or a sanitizer is enabled.");
|
||||
jsTest.log.info("Returning early because debug is on, opt is off, or a sanitizer is enabled.");
|
||||
quit();
|
||||
}
|
||||
|
||||
|
|
@ -50,21 +49,3 @@ testProperty(
|
|||
makeWorkloadModel({collModel: getCollectionModel(), aggModel, numQueriesPerRun}),
|
||||
numRuns,
|
||||
);
|
||||
|
||||
// TODO SERVER-103381 re-enable timeseries PBT testing.
|
||||
// Test with a TS collection.
|
||||
// TODO SERVER-83072 re-enable $group in this test, by removing the filter below.
|
||||
// const tsAggModel = aggModel.filter(query => {
|
||||
// for (const stage of query) {
|
||||
// if (Object.keys(stage).includes('$group')) {
|
||||
// return false;
|
||||
// }
|
||||
// }
|
||||
// return true;
|
||||
// });
|
||||
// testProperty(
|
||||
// correctnessProperty,
|
||||
// {controlColl, experimentColl},
|
||||
// makeWorkloadModel(
|
||||
// {collModel: getCollectionModel({isTS: true}), aggModel: tsAggModel, numQueriesPerRun}),
|
||||
// numRuns);
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@
|
|||
*
|
||||
* @tags: [
|
||||
* query_intensive_pbt,
|
||||
* requires_timeseries,
|
||||
* assumes_standalone_mongod,
|
||||
* # Plan cache state is node-local and will not get migrated alongside user data
|
||||
* assumes_balancer_off,
|
||||
|
|
@ -18,13 +17,12 @@
|
|||
import {getCollectionModel} from "jstests/libs/property_test_helpers/models/collection_models.js";
|
||||
import {getQueryAndOptionsModel} from "jstests/libs/property_test_helpers/models/query_models.js";
|
||||
import {makeWorkloadModel} from "jstests/libs/property_test_helpers/models/workload_models.js";
|
||||
import {getPlanCache, testProperty} from "jstests/libs/property_test_helpers/property_testing_utils.js";
|
||||
import {testProperty} from "jstests/libs/property_test_helpers/property_testing_utils.js";
|
||||
import {isSlowBuild} from "jstests/libs/query/aggregation_pipeline_utils.js";
|
||||
import {getRejectedPlans} from "jstests/libs/query/analyze_plan.js";
|
||||
import {checkSbeFullyEnabled} from "jstests/libs/query/sbe_util.js";
|
||||
import {createRepeatQueriesUseCacheProperty} from "jstests/libs/property_test_helpers/common_properties.js";
|
||||
|
||||
if (isSlowBuild(db)) {
|
||||
jsTestLog("Returning early because debug is on, opt is off, or a sanitizer is enabled.");
|
||||
jsTest.log.info("Returning early because debug is on, opt is off, or a sanitizer is enabled.");
|
||||
quit();
|
||||
}
|
||||
|
||||
|
|
@ -32,66 +30,11 @@ const numRuns = 100;
|
|||
const numQueriesPerRun = 40;
|
||||
|
||||
const experimentColl = db[jsTestName()];
|
||||
|
||||
// Motivation: Check that the plan cache key we use to lookup in the cache and to store in the cache
|
||||
// are consistent.
|
||||
function repeatQueriesUseCache(getQuery, testHelpers) {
|
||||
for (let queryIx = 0; queryIx < testHelpers.numQueryShapes; queryIx++) {
|
||||
const query = getQuery(queryIx, 0 /* paramIx */);
|
||||
const explain = experimentColl.explain().aggregate(query.pipeline, query.options);
|
||||
|
||||
// If there are no rejected plans, there is no need to cache.
|
||||
if (getRejectedPlans(explain).length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Currently, both classic and SBE queries use the classic plan cache.
|
||||
const serverStatusBefore = db.serverStatus();
|
||||
const classicHitsBefore = serverStatusBefore.metrics.query.planCache.classic.hits;
|
||||
const sbeHitsBefore = serverStatusBefore.metrics.query.planCache.sbe.hits;
|
||||
|
||||
for (let i = 0; i < 5; i++) {
|
||||
experimentColl.aggregate(query.pipeline, query.options).toArray();
|
||||
}
|
||||
|
||||
const serverStatusAfter = db.serverStatus();
|
||||
const classicHitsAfter = serverStatusAfter.metrics.query.planCache.classic.hits;
|
||||
const sbeHitsAfter = serverStatusAfter.metrics.query.planCache.sbe.hits;
|
||||
|
||||
// If neither the SBE plan cache hits nor the classic plan cache hits have incremented, then
|
||||
// our query must not have hit the cache. We check for at least one hit, since ties can
|
||||
// prevent a plan from being cached right away.
|
||||
if (checkSbeFullyEnabled(db) && sbeHitsAfter - sbeHitsBefore > 0) {
|
||||
continue;
|
||||
} else if (classicHitsAfter - classicHitsBefore > 0) {
|
||||
continue;
|
||||
}
|
||||
return {
|
||||
passed: false,
|
||||
message: "Plan cache hits failed to increment after running query several times.",
|
||||
query,
|
||||
explain,
|
||||
classicHitsBefore,
|
||||
classicHitsAfter,
|
||||
sbeHitsBefore,
|
||||
sbeHitsAfter,
|
||||
planCacheState: getPlanCache(experimentColl).list(),
|
||||
};
|
||||
}
|
||||
return {passed: true};
|
||||
}
|
||||
|
||||
const aggModel = getQueryAndOptionsModel();
|
||||
|
||||
testProperty(
|
||||
repeatQueriesUseCache,
|
||||
createRepeatQueriesUseCacheProperty(experimentColl),
|
||||
{experimentColl},
|
||||
makeWorkloadModel({collModel: getCollectionModel({isTS: false}), aggModel, numQueriesPerRun}),
|
||||
numRuns,
|
||||
);
|
||||
// TODO SERVER-103381 re-enable timeseries PBT testing.
|
||||
// testProperty(
|
||||
// repeatQueriesUseCache,
|
||||
// {experimentColl},
|
||||
// makeWorkloadModel({collModel: getCollectionModel({isTS: true}), aggModel, numQueriesPerRun}),
|
||||
// numRuns);
|
||||
|
|
|
|||
|
|
@ -0,0 +1,116 @@
|
|||
/**
|
||||
* Test basic properties that should hold for our core agg stages, when placed at the end of a
|
||||
* pipeline. This includes:
|
||||
* - An exclusion projection should drop the specified fields.
|
||||
* - An inclusion projection should keep the specified fields, and drop all others.
|
||||
* - $limit should limit the number of results.
|
||||
* - $sort should output documents in sorted order.
|
||||
* TODO SERVER-83072 enable $group once the timeseries bug array bug is fixed.
|
||||
* - $group should output documents with unique _ids (the group key).
|
||||
*
|
||||
* These may seem like simple checks that aren't worth testing. However with complex optimizations,
|
||||
* they may break sometimes.
|
||||
*
|
||||
* @tags: [
|
||||
* query_intensive_pbt,
|
||||
* requires_timeseries,
|
||||
* assumes_no_implicit_collection_creation_on_get_collection,
|
||||
* # Runs queries that may return many results, requiring getmores.
|
||||
* requires_getmore,
|
||||
* # This test runs commands that are not allowed with security token: setParameter.
|
||||
* not_allowed_with_signed_security_token,
|
||||
* ]
|
||||
*/
|
||||
|
||||
import {getCollectionModel} from "jstests/libs/property_test_helpers/models/collection_models.js";
|
||||
import {
|
||||
getAggPipelineArb,
|
||||
getSingleFieldProjectArb,
|
||||
getSortArb,
|
||||
limitArb,
|
||||
} from "jstests/libs/property_test_helpers/models/query_models.js";
|
||||
import {makeWorkloadModel} from "jstests/libs/property_test_helpers/models/workload_models.js";
|
||||
import {testProperty} from "jstests/libs/property_test_helpers/property_testing_utils.js";
|
||||
import {isSlowBuild} from "jstests/libs/query/aggregation_pipeline_utils.js";
|
||||
import {fc} from "jstests/third_party/fast_check/fc-3.1.0.js";
|
||||
import {
|
||||
checkExclusionProjectionFieldResults,
|
||||
checkInclusionProjectionResults,
|
||||
checkLimitResults,
|
||||
checkSortResults,
|
||||
makeBehavioralPropertyFn,
|
||||
} from "jstests/libs/property_test_helpers/common_properties.js";
|
||||
import {getNestedProperties} from "jstests/libs/query/analyze_plan.js";
|
||||
|
||||
if (isSlowBuild(db)) {
|
||||
jsTest.log.info("Returning early because debug is on, opt is off, or a sanitizer is enabled.");
|
||||
quit();
|
||||
}
|
||||
|
||||
const is83orAbove = (() => {
|
||||
const {version} = db.adminCommand({getParameter: 1, featureCompatibilityVersion: 1}).featureCompatibilityVersion;
|
||||
return MongoRunner.compareBinVersions(version, "8.3") >= 0;
|
||||
})();
|
||||
|
||||
const numRuns = 20;
|
||||
|
||||
// --- Exclusion projection testing ---
|
||||
const exclusionProjectionTest = {
|
||||
// The stage we're testing.
|
||||
stageArb: getSingleFieldProjectArb(false /*isInclusion*/, {simpleFieldsOnly: true}), // Only allow simple paths, no dotted paths.
|
||||
// A function that tests the results are as expected.
|
||||
checkResultsFn: checkExclusionProjectionFieldResults,
|
||||
// A message to output on failure.
|
||||
failMsg: "Exclusion projection did not remove the specified fields.",
|
||||
};
|
||||
|
||||
// --- Inclusion projection testing ---
|
||||
const inclusionProjectionTest = {
|
||||
stageArb: getSingleFieldProjectArb(true /*isInclusion*/, {simpleFieldsOnly: true}),
|
||||
checkResultsFn: checkInclusionProjectionResults,
|
||||
failMsg: "Inclusion projection did not drop all other fields.",
|
||||
};
|
||||
|
||||
// --- $limit testing ---
|
||||
const limitTest = {
|
||||
stageArb: limitArb,
|
||||
checkResultsFn: checkLimitResults,
|
||||
failMsg: "$limit did not limit how many documents there were in the output",
|
||||
};
|
||||
|
||||
// --- $sort testing ---
|
||||
const sortTest = {
|
||||
stageArb: getSortArb(),
|
||||
checkResultsFn: checkSortResults,
|
||||
failMsg: "$sort did not output documents in sorted order.",
|
||||
};
|
||||
|
||||
// TODO SERVER-114750 add more test cases here, or add a new PBT.
|
||||
|
||||
const testCases = [exclusionProjectionTest, inclusionProjectionTest, limitTest, sortTest];
|
||||
const experimentColl = db[`${jsTestName()}_experiment`];
|
||||
|
||||
for (const {stageArb, checkResultsFn, failMsg} of testCases) {
|
||||
const propFn = makeBehavioralPropertyFn(experimentColl, checkResultsFn, failMsg);
|
||||
|
||||
// Create an agg model that ends with the stage we're testing. The bag does not have to be
|
||||
// deterministic because these properties should always hold.
|
||||
const startOfPipelineArb = getAggPipelineArb({deterministicBag: false, isTS: true});
|
||||
const aggModel = fc
|
||||
.record({startOfPipeline: startOfPipelineArb, lastStage: stageArb})
|
||||
.filter(({startOfPipeline, _}) => {
|
||||
// Older versions suffer from SERVER-112844
|
||||
return is83orAbove || getNestedProperties(startOfPipeline, "$elemMatch").length == 0;
|
||||
})
|
||||
.map(function ({startOfPipeline, lastStage}) {
|
||||
return {"pipeline": [...startOfPipeline, lastStage], "options": {}};
|
||||
});
|
||||
|
||||
// Run the property with a TS collection.
|
||||
testProperty(
|
||||
propFn,
|
||||
{experimentColl},
|
||||
makeWorkloadModel({collModel: getCollectionModel({isTS: true}), aggModel, numQueriesPerRun: 20}),
|
||||
numRuns,
|
||||
);
|
||||
}
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
/**
|
||||
* A property-based test that runs the same query on a timeseries collection several times to assert
|
||||
* that it eventually uses the plan cache.
|
||||
* There have been issues where the key we use to lookup in the plan cache is different from the
|
||||
* key we use to store the cache entry. This test attempts to target these potential bugs.
|
||||
*
|
||||
* @tags: [
|
||||
* query_intensive_pbt,
|
||||
* requires_timeseries,
|
||||
* assumes_standalone_mongod,
|
||||
* # Plan cache state is node-local and will not get migrated alongside user data
|
||||
* assumes_balancer_off,
|
||||
* assumes_no_implicit_collection_creation_after_drop,
|
||||
* # Need to clear cache between runs.
|
||||
* does_not_support_stepdowns
|
||||
* ]
|
||||
*/
|
||||
import {getCollectionModel} from "jstests/libs/property_test_helpers/models/collection_models.js";
|
||||
import {getQueryAndOptionsModel} from "jstests/libs/property_test_helpers/models/query_models.js";
|
||||
import {makeWorkloadModel} from "jstests/libs/property_test_helpers/models/workload_models.js";
|
||||
import {testProperty} from "jstests/libs/property_test_helpers/property_testing_utils.js";
|
||||
import {isSlowBuild} from "jstests/libs/query/aggregation_pipeline_utils.js";
|
||||
import {getNestedProperties} from "jstests/libs/query/analyze_plan.js";
|
||||
import {createRepeatQueriesUseCacheProperty} from "jstests/libs/property_test_helpers/common_properties.js";
|
||||
|
||||
if (isSlowBuild(db)) {
|
||||
jsTest.log.info("Returning early because debug is on, opt is off, or a sanitizer is enabled.");
|
||||
quit();
|
||||
}
|
||||
|
||||
const is83orAbove = (() => {
|
||||
const {version} = db.adminCommand({getParameter: 1, featureCompatibilityVersion: 1});
|
||||
return MongoRunner.compareBinVersions(version, "8.3") >= 0;
|
||||
})();
|
||||
|
||||
const numRuns = 100;
|
||||
const numQueriesPerRun = 40;
|
||||
|
||||
const experimentColl = db[jsTestName()];
|
||||
|
||||
const aggModel = getQueryAndOptionsModel({isTS: true}).filter(
|
||||
// Older versions suffer from SERVER-101007
|
||||
({pipeline}) => is83orAbove || getNestedProperties(pipeline, "$elemMatch").length == 0,
|
||||
);
|
||||
|
||||
testProperty(
|
||||
createRepeatQueriesUseCacheProperty(experimentColl),
|
||||
{experimentColl},
|
||||
makeWorkloadModel({collModel: getCollectionModel({isTS: true}), aggModel, numQueriesPerRun}),
|
||||
numRuns,
|
||||
);
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
* Common properties our property-based tests may use. Intended to be paired with the `testProperty`
|
||||
* interface in property_testing_utils.js.
|
||||
*/
|
||||
import {runDeoptimized} from "jstests/libs/property_test_helpers/property_testing_utils.js";
|
||||
import {getPlanCache, runDeoptimized} from "jstests/libs/property_test_helpers/property_testing_utils.js";
|
||||
import {
|
||||
getAllPlans,
|
||||
getAllPlanStages,
|
||||
|
|
@ -10,6 +10,7 @@ import {
|
|||
getRejectedPlans,
|
||||
getWinningPlanFromExplain,
|
||||
} from "jstests/libs/query/analyze_plan.js";
|
||||
import {checkSbeFullyEnabled} from "jstests/libs/query/sbe_util.js";
|
||||
import {FixtureHelpers} from "jstests/libs/fixture_helpers.js";
|
||||
|
||||
// Returns different query shapes using the first parameters plugged in.
|
||||
|
|
@ -272,3 +273,166 @@ export function createQueriesWithKnobsSetAreSameAsControlCollScanProperty(contro
|
|||
});
|
||||
};
|
||||
}
|
||||
|
||||
// Motivation: Check that the plan cache key we use to lookup in the cache and to store in the cache
|
||||
// are consistent.
|
||||
export function createRepeatQueriesUseCacheProperty(experimentColl) {
|
||||
return function repeatQueriesUseCacheProperty(getQuery, testHelpers) {
|
||||
for (let queryIx = 0; queryIx < testHelpers.numQueryShapes; queryIx++) {
|
||||
const query = getQuery(queryIx, 0 /* paramIx */);
|
||||
const explain = experimentColl.explain().aggregate(query.pipeline, query.options);
|
||||
|
||||
// If there are no rejected plans, there is no need to cache.
|
||||
if (getRejectedPlans(explain).length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Currently, both classic and SBE queries use the classic plan cache.
|
||||
const serverStatusBefore = db.serverStatus();
|
||||
const classicHitsBefore = serverStatusBefore.metrics.query.planCache.classic.hits;
|
||||
const sbeHitsBefore = serverStatusBefore.metrics.query.planCache.sbe.hits;
|
||||
|
||||
for (let i = 0; i < 5; i++) {
|
||||
experimentColl.aggregate(query.pipeline, query.options).toArray();
|
||||
}
|
||||
|
||||
const serverStatusAfter = db.serverStatus();
|
||||
const classicHitsAfter = serverStatusAfter.metrics.query.planCache.classic.hits;
|
||||
const sbeHitsAfter = serverStatusAfter.metrics.query.planCache.sbe.hits;
|
||||
|
||||
// If neither the SBE plan cache hits nor the classic plan cache hits have incremented, then
|
||||
// our query must not have hit the cache. We check for at least one hit, since ties can
|
||||
// prevent a plan from being cached right away.
|
||||
if (checkSbeFullyEnabled(db) && sbeHitsAfter - sbeHitsBefore > 0) {
|
||||
continue;
|
||||
} else if (classicHitsAfter - classicHitsBefore > 0) {
|
||||
continue;
|
||||
}
|
||||
return {
|
||||
passed: false,
|
||||
message: "Plan cache hits failed to increment after running query several times.",
|
||||
query,
|
||||
explain,
|
||||
classicHitsBefore,
|
||||
classicHitsAfter,
|
||||
sbeHitsBefore,
|
||||
sbeHitsAfter,
|
||||
planCacheState: getPlanCache(experimentColl).list(),
|
||||
};
|
||||
}
|
||||
return {passed: true};
|
||||
};
|
||||
}
|
||||
|
||||
// Function to verify the field excluded by an exclusion projection does not exist in any result documents.
|
||||
export function checkExclusionProjectionFieldResults(query, results) {
|
||||
const projectSpec = query.pipeline.at(-1)["$project"];
|
||||
const excludedField = Object.keys(projectSpec).filter((field) => field !== "_id")[0];
|
||||
const isIdFieldIncluded = projectSpec._id;
|
||||
|
||||
for (const doc of results) {
|
||||
const docFields = Object.keys(doc);
|
||||
// If the excluded field still exists, fail.
|
||||
if (docFields.includes(excludedField)) {
|
||||
return false;
|
||||
}
|
||||
// If _id is excluded and it exists, fail.
|
||||
if (!isIdFieldIncluded && docFields.includes("_id")) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// Function to verify only the field included by an inclusion projection exists in the result documents.
|
||||
export function checkInclusionProjectionResults(query, results) {
|
||||
const projectSpec = query.pipeline.at(-1)["$project"];
|
||||
const includedField = Object.keys(projectSpec).filter((field) => field !== "_id")[0];
|
||||
const isIdFieldExcluded = !projectSpec._id;
|
||||
|
||||
for (const doc of results) {
|
||||
for (const field of Object.keys(doc)) {
|
||||
// If the _id field is excluded and it exists, fail.
|
||||
if (field === "_id" && isIdFieldExcluded) {
|
||||
return false;
|
||||
}
|
||||
// If we have a field on the doc that is not the included field, fail.
|
||||
if (field !== "_id" && field !== includedField) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// Function to verify that the number of results is less than or equal to the limit specified.
|
||||
export function checkLimitResults(query, results) {
|
||||
const limitStage = query.pipeline.at(-1);
|
||||
const limitVal = limitStage["$limit"];
|
||||
|
||||
return results.length <= limitVal;
|
||||
}
|
||||
|
||||
// Function to verify that the results are sorted according to the $sort specification.
|
||||
export function checkSortResults(query, results) {
|
||||
const sortSpec = query.pipeline.at(-1)["$sort"];
|
||||
const sortField = Object.keys(sortSpec)[0];
|
||||
const sortDirection = sortSpec[sortField];
|
||||
|
||||
function orderCorrect(doc1, doc2) {
|
||||
const doc1SortVal = doc1[sortField];
|
||||
const doc2SortVal = doc2[sortField];
|
||||
|
||||
// bsonWoCompare does not match the $sort semantics for arrays. It is nontrivial to write a
|
||||
// comparison function that matches these semantics, so we will ignore arrays.
|
||||
// TODO SERVER-101149 improve sort checking logic to possibly handle arrays and missing
|
||||
// values.
|
||||
if (Array.isArray(doc1SortVal) || Array.isArray(doc2SortVal)) {
|
||||
return true;
|
||||
}
|
||||
if (typeof doc1SortVal === "undefined" || typeof doc2SortVal === "undefined") {
|
||||
return true;
|
||||
}
|
||||
|
||||
const cmp = bsonWoCompare(doc1SortVal, doc2SortVal);
|
||||
if (sortDirection === 1) {
|
||||
return cmp <= 0;
|
||||
} else {
|
||||
return cmp >= 0;
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0; i < results.length - 1; i++) {
|
||||
const doc1 = results[i];
|
||||
const doc2 = results[i + 1];
|
||||
if (!orderCorrect(doc1, doc2)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// 'checkResultsFn' takes the query and the results and outputs a boolean. Use
|
||||
// 'makeBehavioralPropertyFn' when the expected behavior of a query is testable from the results
|
||||
// alone. For example if we have {$limit: 2}, we can test the limit worked from the results alone,
|
||||
// by asserting results.length <= 2
|
||||
export function makeBehavioralPropertyFn(experimentColl, checkResultsFn, failMsg) {
|
||||
return function (getQuery, testHelpers) {
|
||||
for (let queryIx = 0; queryIx < testHelpers.numQueryShapes; queryIx++) {
|
||||
const query = getQuery(queryIx, 0 /* paramIx */);
|
||||
const results = experimentColl.aggregate(query.pipeline, query.options).toArray();
|
||||
|
||||
const passed = checkResultsFn(query, results);
|
||||
if (!passed) {
|
||||
return {
|
||||
passed: false,
|
||||
msg: failMsg,
|
||||
query,
|
||||
results,
|
||||
explain: experimentColl.explain().aggregate(query.pipeline, query.options),
|
||||
};
|
||||
}
|
||||
}
|
||||
return {passed: true};
|
||||
};
|
||||
}
|
||||
|
|
|
|||
|
|
@ -139,10 +139,11 @@ export function getQueryAndOptionsModel({
|
|||
deterministicBag = true,
|
||||
allowCollation = false,
|
||||
allowedStages = [],
|
||||
isTS = false,
|
||||
} = {}) {
|
||||
const noCollation = fc.constant({});
|
||||
return fc.record({
|
||||
"pipeline": getAggPipelineArb({allowOrs, deterministicBag, allowedStages}),
|
||||
"pipeline": getAggPipelineArb({allowOrs, deterministicBag, allowedStages, isTS}),
|
||||
"options": allowCollation ? oneof(noCollation, fc.record({"collation": collationArb})) : noCollation,
|
||||
});
|
||||
}
|
||||
|
|
|
|||
Loading…
Reference in New Issue