mirror of https://github.com/mongodb/mongo
SERVER-115160 Remove stub fallback parsers from listMqlEntities (#45142)
GitOrigin-RevId: b0466ba903392d3e85f8e6aad2c5a5c700e94dfd
This commit is contained in:
parent
514b7bbae0
commit
b006f9bfed
|
|
@ -474,21 +474,7 @@ const allPipelines = [
|
|||
];
|
||||
const testedStages = new Set(allPipelines.flatMap((pipeline) => pipeline.map((obj) => Object.keys(obj)[0])));
|
||||
|
||||
for (const aggStage of aggStages) {
|
||||
// Confirm that every aggregation stage is either tested or explicitly skipped.
|
||||
if (testedStages.has(aggStage) || skips.has(aggStage)) {
|
||||
continue;
|
||||
}
|
||||
// If we reach here, then the aggregation stage has not been tested or skipped. We confirm that
|
||||
// this stage is a stub stage that is defined for tests in aggregation_stage_stub_parsers.json.
|
||||
// If not, we will trigger the assertion and should test or skip the new stage.
|
||||
try {
|
||||
coll.aggregate([{[aggStage]: {}}]);
|
||||
assert(false, aggStage + " has not been tested for null bytes.");
|
||||
} catch (e) {
|
||||
if (e.code !== 10918500) {
|
||||
// Stub aggregation stage error code.
|
||||
assert(false, aggStage + " has not been tested for null bytes.");
|
||||
}
|
||||
}
|
||||
for (const aggStage of aggStages) {
|
||||
assert(testedStages.has(aggStage) || skips.has(aggStage), aggStage + " has not been tested for null bytes.");
|
||||
}
|
||||
|
|
|
|||
|
|
@ -243,10 +243,6 @@ const skippedAuthTestingAggStages = [
|
|||
"$sessionWindow",
|
||||
"$validate",
|
||||
"$setStreamMeta",
|
||||
|
||||
// The following stages are stubs defined in aggregation_stage_stub_parsers.json.
|
||||
"$stubStage",
|
||||
"$testFoo",
|
||||
];
|
||||
|
||||
// The following commands are skipped in 'authCommandsLib' because they are unable to be
|
||||
|
|
|
|||
|
|
@ -52,10 +52,9 @@ for (let i = 0; i < 10; i++) {
|
|||
* This is expected for all stages that return **user** inserted documents.
|
||||
* 4. skippedStages: These stages **must** either be
|
||||
* (a) run on the admin db
|
||||
* (b) stub stages for testing
|
||||
* (c) tested elsewhere
|
||||
* (d) stages that can only run in stream processors
|
||||
* (e) internal only stages (cannot be made by user requests) that run on oplog data
|
||||
* (b) tested elsewhere
|
||||
* (c) stages that can only run in stream processors
|
||||
* (d) internal only stages (cannot be made by user requests) that run on oplog data
|
||||
*
|
||||
* Once you've determined which set your new aggregation stage belongs to add a test case to the appropriate set.
|
||||
* Each set has a slightly different test format.
|
||||
|
|
@ -291,10 +290,9 @@ const unpackTests = [
|
|||
// The following pipeline stages do not need to be tested for timeseries collections.
|
||||
// Stages that are skipped **must** be one of the following:
|
||||
// 1. Stages that only run on the admin database.
|
||||
// 2. Stub stages that are defined for tests in aggregation_stage_stub_parsers.json.
|
||||
// 3. Stages that are tested elsewhere.
|
||||
// 4. Stages that can only run in stream processors.
|
||||
// 5. Stages that cannot be made by user requests and run on oplog data.
|
||||
// 2. Stages that are tested elsewhere.
|
||||
// 3. Stages that can only run in stream processors.
|
||||
// 4. Stages that cannot be made by user requests and run on oplog data.
|
||||
const skippedStages = [
|
||||
// All change stream stages are temporarily here. TODO SERVER-113494 enable tests here.
|
||||
"$changeStream",
|
||||
|
|
@ -331,10 +329,6 @@ const skippedStages = [
|
|||
"$_internalAllCollectionStats",
|
||||
"$queue",
|
||||
|
||||
// Stub stages defined in 'aggregation_stage_stub_parsers.json'.
|
||||
"$stubStage",
|
||||
"$testFoo",
|
||||
|
||||
// Stages tested in 'search_disallowed_on_timeseries.js', since they require extra setup.
|
||||
"$search",
|
||||
"$searchMeta",
|
||||
|
|
|
|||
|
|
@ -79,9 +79,12 @@ ListMqlEntitiesStage::ListMqlEntitiesStage(
|
|||
MqlEntityTypeEnum type,
|
||||
const LiteParsedDocumentSource::ParserMap& docSourceParserMap)
|
||||
: Stage(stageName, pExpCtx), _type(type) {
|
||||
for (auto&& [stageName, _] : docSourceParserMap) {
|
||||
for (auto&& [stageName, registration] : docSourceParserMap) {
|
||||
// Exclude this stage if it is a stub that has no primary parser.
|
||||
if (registration.isExecutable()) {
|
||||
_results.push_back(stageName);
|
||||
}
|
||||
}
|
||||
|
||||
// Canonicalize output order of results. Sort in descending order so that we can use a cheap
|
||||
// 'pop_back()' to return the results in order.
|
||||
|
|
|
|||
|
|
@ -92,7 +92,8 @@ void registerStubParser(std::string stageName, std::string message, FeatureFlag*
|
|||
std::move(stubParser),
|
||||
featureFlag,
|
||||
AllowedWithApiStrict::kAlways,
|
||||
AllowedWithClientType::kAny);
|
||||
AllowedWithClientType::kAny,
|
||||
true /*isStub*/);
|
||||
}
|
||||
|
||||
void registerUnloadedExtensionStubParsers() {
|
||||
|
|
|
|||
|
|
@ -82,10 +82,11 @@ void LiteParsedDocumentSource::LiteParserRegistration::setPrimaryParser(LitePars
|
|||
}
|
||||
|
||||
void LiteParsedDocumentSource::LiteParserRegistration::setFallbackParser(
|
||||
LiteParserInfo&& lpi, IncrementalRolloutFeatureFlag* ff) {
|
||||
LiteParserInfo&& lpi, IncrementalRolloutFeatureFlag* ff, bool isStub) {
|
||||
_fallbackParser = std::move(lpi);
|
||||
_primaryParserFeatureFlag = ff;
|
||||
_fallbackIsSet = true;
|
||||
_isStub = isStub;
|
||||
}
|
||||
|
||||
bool LiteParsedDocumentSource::LiteParserRegistration::isPrimarySet() const {
|
||||
|
|
@ -117,7 +118,8 @@ void LiteParsedDocumentSource::registerFallbackParser(const std::string& name,
|
|||
Parser parser,
|
||||
FeatureFlag* parserFeatureFlag,
|
||||
AllowedWithApiStrict allowedWithApiStrict,
|
||||
AllowedWithClientType allowedWithClientType) {
|
||||
AllowedWithClientType allowedWithClientType,
|
||||
bool isStub) {
|
||||
if (parserMap.contains(name)) {
|
||||
const auto& registration = parserMap.at(name);
|
||||
|
||||
|
|
@ -151,8 +153,8 @@ void LiteParsedDocumentSource::registerFallbackParser(const std::string& name,
|
|||
ifrFeatureFlag != nullptr);
|
||||
}
|
||||
|
||||
registration.setFallbackParser({parser, allowedWithApiStrict, allowedWithClientType},
|
||||
ifrFeatureFlag);
|
||||
registration.setFallbackParser(
|
||||
{parser, allowedWithApiStrict, allowedWithClientType}, ifrFeatureFlag, isStub);
|
||||
}
|
||||
|
||||
void LiteParsedDocumentSource::unregisterParser_forTest(const std::string& name) {
|
||||
|
|
|
|||
|
|
@ -112,12 +112,20 @@ public:
|
|||
void setPrimaryParser(LiteParserInfo&& lpi);
|
||||
|
||||
// TODO SERVER-114028 Update when fallback parsing supports all feature flags.
|
||||
void setFallbackParser(LiteParserInfo&& lpi, IncrementalRolloutFeatureFlag* ff);
|
||||
void setFallbackParser(LiteParserInfo&& lpi,
|
||||
IncrementalRolloutFeatureFlag* ff,
|
||||
bool isStub = false);
|
||||
|
||||
bool isPrimarySet() const;
|
||||
|
||||
bool isFallbackSet() const;
|
||||
|
||||
// Returns true if the parser is executable, meaning it has either a primary or a non-stub
|
||||
// fallback.
|
||||
bool isExecutable() const {
|
||||
return _primaryIsSet || !_isStub;
|
||||
}
|
||||
|
||||
private:
|
||||
// The preferred method of parsing this LiteParsedDocumentSource. If the feature flag is
|
||||
// enabled, the primary parser will be used to parse the stage.
|
||||
|
|
@ -137,6 +145,9 @@ public:
|
|||
|
||||
// Whether or not the fallback parser has been registered or not.
|
||||
bool _fallbackIsSet = false;
|
||||
|
||||
// Whether the fallback parser is a stub parser that just throws an error.
|
||||
bool _isStub = false;
|
||||
};
|
||||
|
||||
using ParserMap = StringMap<LiteParsedDocumentSource::LiteParserRegistration>;
|
||||
|
|
@ -170,7 +181,8 @@ public:
|
|||
Parser parser,
|
||||
FeatureFlag* parserFeatureFlag,
|
||||
AllowedWithApiStrict allowedWithApiStrict,
|
||||
AllowedWithClientType allowedWithClientType);
|
||||
AllowedWithClientType allowedWithClientType,
|
||||
bool isStub = false);
|
||||
|
||||
/**
|
||||
* Function that will be used as an alternate parser for a document source that has been
|
||||
|
|
|
|||
Loading…
Reference in New Issue