mirror of https://github.com/mongodb/mongo
SERVER-112520 Fix index maintenance for 2dsphere indexes on timeseries collections (#42786)
GitOrigin-RevId: d646363161e0b29177de5c7d87dc1cdc9df8b711
This commit is contained in:
parent
eb7f1651cf
commit
c296e44fdb
|
|
@ -69,6 +69,8 @@ last-continuous:
|
|||
ticket: SERVER-110574
|
||||
- test_file: jstests/replsets/log_unprepared_abort_txns.js
|
||||
ticket: SERVER-111017
|
||||
- test_file: jstests/core/timeseries/geo/timeseries_geonear_measurements.js
|
||||
ticket: SERVER-112520
|
||||
suites: null
|
||||
last-lts:
|
||||
all:
|
||||
|
|
@ -640,4 +642,6 @@ last-lts:
|
|||
ticket: SERVER-110574
|
||||
- test_file: jstests/replsets/log_unprepared_abort_txns.js
|
||||
ticket: SERVER-111017
|
||||
- test_file: jstests/core/timeseries/geo/timeseries_geonear_measurements.js
|
||||
ticket: SERVER-112520
|
||||
suites: null
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@ function degreesToMeters(degrees) {
|
|||
return degrees * (earthCircumferenceMeters / 360);
|
||||
}
|
||||
|
||||
function insertTestData(coll) {
|
||||
function insertTestData(coll, batchInsert = true) {
|
||||
// When these points are interpreted as spherical coordinates, [long, lat],
|
||||
// the units are interpreted as degrees.
|
||||
const nMeasurements = 10;
|
||||
|
|
@ -54,7 +54,13 @@ function insertTestData(coll) {
|
|||
|
||||
// Insert in a random order to ensure queries are really sorting.
|
||||
Array.shuffle(docs);
|
||||
assert.commandWorked(coll.insert(docs));
|
||||
if (batchInsert) {
|
||||
assert.commandWorked(coll.insert(docs));
|
||||
} else {
|
||||
for (let i = 0; i < docs.length; i++) {
|
||||
assert.commandWorked(coll.insert(docs[i]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function runFlatExamples(coll, isTimeseries) {
|
||||
|
|
@ -590,6 +596,38 @@ function runSphereExamples(coll, isTimeseries, has2dsphereIndex, scaleResult, qu
|
|||
}
|
||||
}
|
||||
|
||||
function runIntersectsExamples(coll, isTimeseries, has2dsphereIndex, coord, expected) {
|
||||
let pipeline, plan;
|
||||
|
||||
// Run some additional tests with $geoIntersects.
|
||||
pipeline = [
|
||||
{
|
||||
$match: {
|
||||
loc: {
|
||||
$geoIntersects: {
|
||||
$geometry: {
|
||||
type: "Point",
|
||||
coordinates: coord,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
assert.eq(coll.aggregate(pipeline).toArray().length, expected);
|
||||
plan = coll.explain().aggregate(pipeline);
|
||||
if (isTimeseries) {
|
||||
if (has2dsphereIndex) {
|
||||
assert(aggPlanHasStage(plan, "IXSCAN"), plan);
|
||||
} else {
|
||||
assert(aggPlanHasStage(plan, "COLLSCAN"), plan);
|
||||
}
|
||||
} else {
|
||||
assert(aggPlanHasStage(plan, "IXSCAN"), plan);
|
||||
}
|
||||
}
|
||||
|
||||
function runExamples(coll, isTimeseries, has2dsphereIndex) {
|
||||
runFlatExamples(coll, isTimeseries, has2dsphereIndex);
|
||||
|
||||
|
|
@ -612,6 +650,12 @@ function runExamples(coll, isTimeseries, has2dsphereIndex) {
|
|||
near: [180, 0],
|
||||
spherical: true,
|
||||
});
|
||||
|
||||
// Run some additional tests with $geoIntersects to ensure the index is being maintained
|
||||
// correctly by finding the exact locations we inserted.
|
||||
runIntersectsExamples(coll, isTimeseries, has2dsphereIndex, [0, 0], 1);
|
||||
runIntersectsExamples(coll, isTimeseries, has2dsphereIndex, [0, 6], 1);
|
||||
runIntersectsExamples(coll, isTimeseries, has2dsphereIndex, [1, 4], 0);
|
||||
}
|
||||
|
||||
// Test $geoNear query results in several contexts:
|
||||
|
|
@ -669,3 +713,29 @@ function runExamples(coll, isTimeseries, has2dsphereIndex) {
|
|||
insertTestData(coll);
|
||||
runExamples(coll, true /* isTimeseries */, true /* has2dsphereIndex */);
|
||||
}
|
||||
|
||||
// 4. Test a timeseries collection, with a 2dsphere index on measurements.
|
||||
// Data is inserted one document at a time to verify correct index maintenance.
|
||||
// This should work if $geoWithin is indexed correctly.
|
||||
{
|
||||
const coll = db.getCollection(jsTestName() + "_indexed_nonbatch");
|
||||
coll.drop();
|
||||
assert.commandWorked(db.createCollection(coll.getName(), {timeseries: {timeField: "time"}}));
|
||||
assert.commandWorked(coll.createIndex({loc: "2dsphere"}));
|
||||
|
||||
// Make sure the 2dsphere index exists. (If the collection is implicitly sharded then we will
|
||||
// also see an implicitly created index.)
|
||||
let extraIndexesForSharding = {"control.min.time": 1, "control.max.time": 1};
|
||||
|
||||
assert.sameMembers(
|
||||
getTimeseriesCollForRawOps(coll)
|
||||
.getIndexes(kRawOperationSpec)
|
||||
.map((i) => i.key),
|
||||
isShardedTimeseries(coll)
|
||||
? [{"data.loc": "2dsphere_bucket"}, extraIndexesForSharding]
|
||||
: [{"data.loc": "2dsphere_bucket"}],
|
||||
);
|
||||
|
||||
insertTestData(coll, /* batchInsert */ false);
|
||||
runExamples(coll, true /* isTimeseries */, true /* has2dsphereIndex */);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -444,6 +444,17 @@ void IndexUpdateIdentifier::determineAffectedIndexes(DocumentDiffReader* reader,
|
|||
}
|
||||
}
|
||||
|
||||
boost::optional<BSONElement> binaryItem;
|
||||
while ((binaryItem = reader->nextBinary())) {
|
||||
FieldRef::FieldRefTempAppend tempAppend(fieldRef, binaryItem->fieldNameStringData());
|
||||
determineAffectedIndexes(fieldRef, indexesToUpdate);
|
||||
|
||||
// Early exit if possible.
|
||||
if (indexesToUpdate.count() == _numIndexes) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
for (auto subItem = reader->nextSubDiff(); subItem; subItem = reader->nextSubDiff()) {
|
||||
FieldRef::FieldRefTempAppend tempAppend(fieldRef, subItem->first);
|
||||
visit(OverloadedVisitor{[this, &fieldRef, &indexesToUpdate](DocumentDiffReader& item) {
|
||||
|
|
|
|||
|
|
@ -552,6 +552,125 @@ TEST(IndexUpdateIdentifierTest, DiffForWildcardIndexCombination) {
|
|||
ASSERT_EQ(2, affected.count());
|
||||
}
|
||||
|
||||
TEST(IndexUpdateIdentifierTest, BinaryDiffForSingleIndex) {
|
||||
// Constant for a binary diff at offset 0 with some binary change. We need to use a constant as
|
||||
// computeOplogDiff does not compute binary diffs.
|
||||
const BSONObj kBinaryDiffObj = BSON(
|
||||
"elem" << BSON("o" << 0 << "d" << BSONBinData("abcdef", 6, BinDataType::BinDataGeneral)));
|
||||
const BSONElement kBinaryDiffElem = kBinaryDiffObj["elem"];
|
||||
|
||||
// Generate a binary diff at 'a'
|
||||
diff_tree::DocumentSubDiffNode diffNode;
|
||||
diffNode.addBinary("a", kBinaryDiffElem);
|
||||
auto oplogDiff = diffNode.serialize();
|
||||
|
||||
{
|
||||
doc_diff::IndexUpdateIdentifier updateIdentifier(1 /* numIndexes */);
|
||||
{
|
||||
UpdateIndexData uid;
|
||||
uid.addPath(FieldRef("a"));
|
||||
updateIdentifier.addIndex(0, uid);
|
||||
}
|
||||
|
||||
doc_diff::IndexSet affected = updateIdentifier.determineAffectedIndexes(oplogDiff);
|
||||
ASSERT_TRUE(affected[0]);
|
||||
ASSERT_EQ(1, affected.count());
|
||||
}
|
||||
|
||||
{
|
||||
doc_diff::IndexUpdateIdentifier updateIdentifier(1 /* numIndexes */);
|
||||
{
|
||||
UpdateIndexData uid;
|
||||
uid.addPathComponent("a"_sd);
|
||||
updateIdentifier.addIndex(0, uid);
|
||||
}
|
||||
|
||||
doc_diff::IndexSet affected = updateIdentifier.determineAffectedIndexes(oplogDiff);
|
||||
ASSERT_TRUE(affected[0]);
|
||||
ASSERT_EQ(1, affected.count());
|
||||
}
|
||||
}
|
||||
|
||||
TEST(IndexUpdateIdentifierTest, BinaryDiffForSingleIndexDottedField) {
|
||||
// Constant for a binary diff at offset 0 with some binary change. We need to use a constant as
|
||||
// computeOplogDiff does not compute binary diffs.
|
||||
const BSONObj kBinaryDiffObj = BSON(
|
||||
"elem" << BSON("o" << 0 << "d" << BSONBinData("abcdef", 6, BinDataType::BinDataGeneral)));
|
||||
const BSONElement kBinaryDiffElem = kBinaryDiffObj["elem"];
|
||||
|
||||
// Generate a binary diff at 'a.b'
|
||||
diff_tree::DocumentSubDiffNode diffRoot;
|
||||
auto diffNode = std::make_unique<diff_tree::DocumentSubDiffNode>();
|
||||
diffNode->addBinary("b", kBinaryDiffElem);
|
||||
diffRoot.addChild("a", std::move(diffNode));
|
||||
auto oplogDiff = diffRoot.serialize();
|
||||
|
||||
{
|
||||
doc_diff::IndexUpdateIdentifier updateIdentifier(1 /* numIndexes */);
|
||||
{
|
||||
UpdateIndexData uid;
|
||||
uid.addPath(FieldRef("a.b"));
|
||||
updateIdentifier.addIndex(0, uid);
|
||||
}
|
||||
|
||||
doc_diff::IndexSet affected = updateIdentifier.determineAffectedIndexes(oplogDiff);
|
||||
ASSERT_TRUE(affected[0]);
|
||||
ASSERT_EQ(1, affected.count());
|
||||
}
|
||||
|
||||
{
|
||||
doc_diff::IndexUpdateIdentifier updateIdentifier(1 /* numIndexes */);
|
||||
{
|
||||
UpdateIndexData uid;
|
||||
uid.addPath(FieldRef("b.a"));
|
||||
updateIdentifier.addIndex(0, uid);
|
||||
}
|
||||
|
||||
doc_diff::IndexSet affected = updateIdentifier.determineAffectedIndexes(oplogDiff);
|
||||
ASSERT_FALSE(affected[0]);
|
||||
ASSERT_EQ(0, affected.count());
|
||||
}
|
||||
|
||||
{
|
||||
doc_diff::IndexUpdateIdentifier updateIdentifier(1 /* numIndexes */);
|
||||
{
|
||||
UpdateIndexData uid;
|
||||
uid.addPathComponent("a"_sd);
|
||||
updateIdentifier.addIndex(0, uid);
|
||||
}
|
||||
|
||||
doc_diff::IndexSet affected = updateIdentifier.determineAffectedIndexes(oplogDiff);
|
||||
ASSERT_TRUE(affected[0]);
|
||||
ASSERT_EQ(1, affected.count());
|
||||
}
|
||||
|
||||
{
|
||||
doc_diff::IndexUpdateIdentifier updateIdentifier(1 /* numIndexes */);
|
||||
{
|
||||
UpdateIndexData uid;
|
||||
uid.addPathComponent("b"_sd);
|
||||
updateIdentifier.addIndex(0, uid);
|
||||
}
|
||||
|
||||
doc_diff::IndexSet affected = updateIdentifier.determineAffectedIndexes(oplogDiff);
|
||||
ASSERT_TRUE(affected[0]);
|
||||
ASSERT_EQ(1, affected.count());
|
||||
}
|
||||
|
||||
{
|
||||
doc_diff::IndexUpdateIdentifier updateIdentifier(1 /* numIndexes */);
|
||||
{
|
||||
UpdateIndexData uid;
|
||||
uid.addPathComponent("c"_sd);
|
||||
updateIdentifier.addIndex(0, uid);
|
||||
}
|
||||
|
||||
doc_diff::IndexSet affected = updateIdentifier.determineAffectedIndexes(oplogDiff);
|
||||
ASSERT_FALSE(affected[0]);
|
||||
ASSERT_EQ(0, affected.count());
|
||||
}
|
||||
}
|
||||
|
||||
DEATH_TEST_REGEX(IndexUpdateIdentifierTest,
|
||||
FailsWhenAnIndexIsAddedWIthWrongCounter,
|
||||
R"#(Tripwire assertion.*7639000.*indexCounter should be less than _numIndexes)#") {
|
||||
|
|
|
|||
Loading…
Reference in New Issue