mirror of https://github.com/mongodb/mongo
SERVER-114252 Adapt `migration_fails_with_spurious_documents.js` for viewless timeseries (#44382)
GitOrigin-RevId: a71f1bdd209fc498c8372a63bb0b5153043bcda8
This commit is contained in:
parent
4b75b2fb50
commit
b4beaca4b2
|
|
@ -3,15 +3,11 @@
|
||||||
* the target range on the recipient shard. This prevents mixing legitimate
|
* the target range on the recipient shard. This prevents mixing legitimate
|
||||||
* documents (incoming via migration) with invalid ones (incorrectly present
|
* documents (incoming via migration) with invalid ones (incorrectly present
|
||||||
* due to historical reasons like direct connections or range deleter bugs).
|
* due to historical reasons like direct connections or range deleter bugs).
|
||||||
* @tags: [
|
|
||||||
* # TODO SERVER-112060 re-enable this test in viewless timeseries suites
|
|
||||||
* featureFlagCreateViewlessTimeseriesCollections_incompatible,
|
|
||||||
* ]
|
|
||||||
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import {ShardingTest} from "jstests/libs/shardingtest.js";
|
import {ShardingTest} from "jstests/libs/shardingtest.js";
|
||||||
import {getTimeseriesCollForDDLOps} from "jstests/core/timeseries/libs/viewless_timeseries_util.js";
|
import {getTimeseriesCollForDDLOps} from "jstests/core/timeseries/libs/viewless_timeseries_util.js";
|
||||||
|
import {getRawOperationSpec, getTimeseriesCollForRawOps} from "jstests/libs/raw_operation_utils.js";
|
||||||
|
|
||||||
const st = new ShardingTest({shards: 2, mongos: 1});
|
const st = new ShardingTest({shards: 2, mongos: 1});
|
||||||
st.stopBalancer();
|
st.stopBalancer();
|
||||||
|
|
@ -356,38 +352,42 @@ assert.commandWorked(admin.runCommand({enableSharding: "test", primaryShard: st.
|
||||||
// Insert a document directly on shard1 that should belong to the first chunk (owned by shard0).
|
// Insert a document directly on shard1 that should belong to the first chunk (owned by shard0).
|
||||||
jsTest.log.info("Creating spurious time series document by inserting directly on shard1 " + tsCollDDLOps.getName());
|
jsTest.log.info("Creating spurious time series document by inserting directly on shard1 " + tsCollDDLOps.getName());
|
||||||
|
|
||||||
const shard1TsBucketsColl = st.shard1.getDB("test").getCollection(tsCollDDLOps.getName());
|
const shard1DB = st.shard1.getDB("test");
|
||||||
|
const shard1TsColl = shard1DB.getCollection(tsColl.getName());
|
||||||
|
|
||||||
// Create a proper bucket document that would map to sensor_025 (which should be in first chunk)
|
// Create a proper bucket document that would map to sensor_025 (which should be in first chunk)
|
||||||
const spuriousTimestamp = new Date(baseTime.getTime() + 3000);
|
const spuriousTimestamp = new Date(baseTime.getTime() + 3000);
|
||||||
assert.commandWorked(
|
assert.commandWorked(
|
||||||
shard1TsBucketsColl.insert({
|
getTimeseriesCollForRawOps(shard1DB, shard1TsColl).insert(
|
||||||
_id: ObjectId(),
|
{
|
||||||
meta: "sensor_025", // This corresponds to sensor_id metaField
|
_id: ObjectId(),
|
||||||
control: {
|
meta: "sensor_025", // This corresponds to sensor_id metaField
|
||||||
version: 1,
|
control: {
|
||||||
min: {
|
version: 1,
|
||||||
timestamp: spuriousTimestamp,
|
min: {
|
||||||
temperature: 20.0,
|
timestamp: spuriousTimestamp,
|
||||||
|
temperature: 20.0,
|
||||||
|
},
|
||||||
|
max: {
|
||||||
|
timestamp: spuriousTimestamp,
|
||||||
|
temperature: 20.0,
|
||||||
|
},
|
||||||
|
closed: false,
|
||||||
},
|
},
|
||||||
max: {
|
data: {
|
||||||
timestamp: spuriousTimestamp,
|
timestamp: {"0": spuriousTimestamp},
|
||||||
temperature: 20.0,
|
temperature: {"0": 20.0},
|
||||||
|
data: {"0": "spurious_ts_document"},
|
||||||
},
|
},
|
||||||
closed: false,
|
|
||||||
},
|
},
|
||||||
data: {
|
getRawOperationSpec(shard1DB),
|
||||||
timestamp: {"0": spuriousTimestamp},
|
),
|
||||||
temperature: {"0": 20.0},
|
|
||||||
data: {"0": "spurious_ts_document"},
|
|
||||||
},
|
|
||||||
}),
|
|
||||||
);
|
);
|
||||||
|
|
||||||
// Verify the spurious document exists on shard1.
|
// Verify the spurious document exists on shard1.
|
||||||
assert.eq(
|
assert.eq(
|
||||||
1,
|
1,
|
||||||
shard1TsBucketsColl.find({meta: "sensor_025"}).count(),
|
getTimeseriesCollForRawOps(shard1DB, shard1TsColl).find({meta: "sensor_025"}).rawData().count(),
|
||||||
"Spurious time series document should exist on shard1",
|
"Spurious time series document should exist on shard1",
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
@ -419,10 +419,12 @@ assert.commandWorked(admin.runCommand({enableSharding: "test", primaryShard: st.
|
||||||
|
|
||||||
// Clean up the spurious document and verify migration works.
|
// Clean up the spurious document and verify migration works.
|
||||||
jsTest.log.info("Cleaning up spurious time series document and retrying migration...");
|
jsTest.log.info("Cleaning up spurious time series document and retrying migration...");
|
||||||
assert.commandWorked(shard1TsBucketsColl.remove({meta: "sensor_025"}));
|
assert.commandWorked(
|
||||||
|
getTimeseriesCollForRawOps(shard1DB, shard1TsColl).remove({meta: "sensor_025"}, getRawOperationSpec(shard1DB)),
|
||||||
|
);
|
||||||
assert.eq(
|
assert.eq(
|
||||||
0,
|
0,
|
||||||
shard1TsBucketsColl.find({meta: "sensor_025"}).count(),
|
getTimeseriesCollForRawOps(shard1DB, shard1TsColl).find({meta: "sensor_025"}).rawData().count(),
|
||||||
"Spurious time series document should be removed",
|
"Spurious time series document should be removed",
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue