mirror of https://github.com/mongodb/mongo
SERVER-79606 Convert validate collections hook to module
This commit is contained in:
parent
54591ec302
commit
8a41cf72c5
|
|
@ -283,10 +283,11 @@ def mongo_shell_program(logger, executable=None, connection_string=None, filenam
|
|||
eval_sb.append(str(kwargs.pop("eval")))
|
||||
|
||||
# Load a callback to check that the cluster-wide metadata is consistent.
|
||||
eval_sb.append("await import('jstests/libs/override_methods/check_metadata_consistency.js');")
|
||||
eval_sb.append('await import("jstests/libs/override_methods/check_metadata_consistency.js")')
|
||||
|
||||
# Load this file to allow a callback to validate collections before shutting down mongod.
|
||||
eval_sb.append("load('jstests/libs/override_methods/validate_collections_on_shutdown.js');")
|
||||
eval_sb.append(
|
||||
'await import("jstests/libs/override_methods/validate_collections_on_shutdown.js")')
|
||||
|
||||
# Load a callback to check UUID consistency before shutting down a ShardingTest.
|
||||
eval_sb.append(
|
||||
|
|
|
|||
|
|
@ -4289,6 +4289,7 @@ tasks:
|
|||
num_files: 3
|
||||
num_tasks: 5
|
||||
npm_command: rollback-fuzzer
|
||||
jstestfuzz_vars: --useEsModules
|
||||
suite: rollback_fuzzer
|
||||
# Rollback suites create indexes with majority of nodes not available for replication. So, disabling
|
||||
# index build commit quorum.
|
||||
|
|
@ -4306,6 +4307,7 @@ tasks:
|
|||
num_files: 3
|
||||
num_tasks: 5
|
||||
npm_command: rollback-fuzzer
|
||||
jstestfuzz_vars: --useEsModules
|
||||
suite: rollback_fuzzer
|
||||
# Rollback suites create indexes with majority of nodes not available for replication. So, disabling
|
||||
# index build commit quorum.
|
||||
|
|
@ -4321,7 +4323,7 @@ tasks:
|
|||
<<: *jstestfuzz_config_vars
|
||||
num_files: 1
|
||||
num_tasks: 4
|
||||
jstestfuzz_vars: --numLinesPerFile 300 --maxLinesBetweenEvents 50
|
||||
jstestfuzz_vars: --numLinesPerFile 300 --maxLinesBetweenEvents 50 --useEsModules
|
||||
npm_command: rollback-fuzzer
|
||||
suite: rollback_fuzzer_clean_shutdowns
|
||||
# Rollback suites create indexes with majority of nodes not available for replication. So, disabling
|
||||
|
|
@ -4338,7 +4340,7 @@ tasks:
|
|||
<<: *jstestfuzz_config_vars
|
||||
num_files: 1
|
||||
num_tasks: 4
|
||||
jstestfuzz_vars: --numLinesPerFile 300 --maxLinesBetweenEvents 50
|
||||
jstestfuzz_vars: --numLinesPerFile 300 --maxLinesBetweenEvents 50 --useEsModules
|
||||
npm_command: rollback-fuzzer
|
||||
suite: rollback_fuzzer_unclean_shutdowns
|
||||
# Rollback suites create indexes with majority of nodes not available for replication. So, disabling
|
||||
|
|
|
|||
|
|
@ -5,13 +5,10 @@
|
|||
// do_not_wrap_aggregations_in_facets,
|
||||
// uses_multiple_connections,
|
||||
// ]
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/libs/change_stream_util.js");
|
||||
load("jstests/libs/collection_drop_recreate.js"); // For assert[Drop|Create]Collection.
|
||||
load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.
|
||||
load("jstests/replsets/libs/two_phase_drops.js"); // For 'TwoPhaseDropCollectionTest'.
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
|
||||
const coll = assertDropAndRecreateCollection(db, "change_post_image");
|
||||
const cst = new ChangeStreamTest(db);
|
||||
|
|
@ -247,4 +244,3 @@ if (!isChangeStreamPassthrough()) {
|
|||
latestChange = cst.getOneChange(cursor, true);
|
||||
assert.eq(latestChange.operationType, "invalidate");
|
||||
}
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -5,11 +5,7 @@
|
|||
// do_not_run_in_whole_cluster_passthrough,
|
||||
// requires_fcv_63,
|
||||
// ]
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/libs/change_stream_util.js"); // For ChangeStreamTest.
|
||||
load('jstests/replsets/libs/two_phase_drops.js'); // For 'TwoPhaseDropCollectionTest'.
|
||||
load("jstests/libs/collection_drop_recreate.js"); // For assert[Drop|Create]Collection.
|
||||
load("jstests/libs/fixture_helpers.js"); // For isSharded.
|
||||
|
||||
|
|
@ -280,4 +276,3 @@ cst.assertNextChangesEqual(
|
|||
{cursor: cursor, expectedChanges: expectedChanges, expectInvalidate: true});
|
||||
|
||||
cst.cleanUp();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -3,11 +3,8 @@
|
|||
// collections will live on different shards. Majority read concern cannot be off with multi-shard
|
||||
// transactions, which is why this test needs the tag below.
|
||||
// @tags: [requires_majority_read_concern]
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/libs/change_stream_util.js"); // For ChangeStreamTest.
|
||||
load('jstests/replsets/libs/two_phase_drops.js'); // For 'TwoPhaseDropCollectionTest'.
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
load("jstests/libs/collection_drop_recreate.js"); // For assert[Drop|Create]Collection.
|
||||
load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.
|
||||
|
||||
|
|
@ -243,4 +240,3 @@ for (let collToInvalidate of [db1Coll, db2Coll]) {
|
|||
}
|
||||
|
||||
cst.cleanUp();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -2,11 +2,8 @@
|
|||
// Do not run in whole-cluster passthrough since this test assumes that the change stream will be
|
||||
// invalidated by a database drop.
|
||||
// @tags: [do_not_run_in_whole_cluster_passthrough]
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/libs/change_stream_util.js"); // For ChangeStreamTest
|
||||
load('jstests/replsets/libs/two_phase_drops.js'); // For 'TwoPhaseDropCollectionTest'.
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
load("jstests/libs/collection_drop_recreate.js"); // For assert[Drop|Create]Collection.
|
||||
load("jstests/libs/fixture_helpers.js"); // For FixtureHelpers.
|
||||
|
||||
|
|
@ -213,4 +210,3 @@ assert.soon(() => {
|
|||
assert.eq(resumeStream.getResumeToken(), invalidateEvent[0]._id);
|
||||
|
||||
cst.cleanUp();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -1,12 +1,10 @@
|
|||
'use strict';
|
||||
|
||||
/**
|
||||
* Represents a MongoDB cluster.
|
||||
*/
|
||||
load('jstests/hooks/validate_collections.js'); // For validateCollections.
|
||||
import {validateCollections} from "jstests/hooks/validate_collections.js";
|
||||
load('jstests/concurrency/fsm_libs/shard_fixture.js'); // For FSMShardingTest.
|
||||
|
||||
var Cluster = function(options) {
|
||||
export const Cluster = function(options) {
|
||||
if (!(this instanceof Cluster)) {
|
||||
return new Cluster(options);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
'use strict';
|
||||
|
||||
load('jstests/concurrency/fsm_libs/runner.js'); // for runner.internals
|
||||
import {Cluster} from "jstests/concurrency/fsm_libs/cluster.js";
|
||||
import {runner} from "jstests/concurrency/fsm_libs/runner.js";
|
||||
import {ThreadManager} from "jstests/concurrency/fsm_libs/thread_mgr.js";
|
||||
load('jstests/libs/discover_topology.js'); // For Topology and DiscoverTopology.
|
||||
|
||||
const validateExecutionOptions = runner.internals.validateExecutionOptions;
|
||||
|
|
|
|||
|
|
@ -1,12 +1,10 @@
|
|||
'use strict';
|
||||
|
||||
load('jstests/concurrency/fsm_libs/assert.js');
|
||||
load('jstests/concurrency/fsm_libs/cluster.js');
|
||||
import {Cluster} from "jstests/concurrency/fsm_libs/cluster.js";
|
||||
load('jstests/concurrency/fsm_libs/parse_config.js');
|
||||
load('jstests/concurrency/fsm_libs/thread_mgr.js');
|
||||
import {ThreadManager} from "jstests/concurrency/fsm_libs/thread_mgr.js";
|
||||
load('jstests/concurrency/fsm_utils/name_utils.js'); // for uniqueCollName and uniqueDBName
|
||||
|
||||
var runner = (function() {
|
||||
export const runner = (function() {
|
||||
function validateExecutionMode(mode) {
|
||||
var allowedKeys = ['composed', 'parallel', 'serial'];
|
||||
|
||||
|
|
@ -705,6 +703,6 @@ var runner = (function() {
|
|||
};
|
||||
})();
|
||||
|
||||
var runWorkloadsSerially = runner.serial;
|
||||
var runWorkloadsInParallel = runner.parallel;
|
||||
var runCompositionOfWorkloads = runner.composed;
|
||||
export const runWorkloadsSerially = runner.serial;
|
||||
export const runWorkloadsInParallel = runner.parallel;
|
||||
export const runCompositionOfWorkloads = runner.composed;
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
'use strict';
|
||||
|
||||
load('jstests/libs/parallelTester.js'); // for Thread and CountDownLatch
|
||||
load('jstests/concurrency/fsm_libs/worker_thread.js'); // for workerThread
|
||||
import {workerThread} from "jstests/concurrency/fsm_libs/worker_thread.js";
|
||||
|
||||
/**
|
||||
* Helper for spawning and joining worker threads.
|
||||
*/
|
||||
|
||||
var ThreadManager = function(clusterOptions, executionMode = {composed: false}) {
|
||||
export const ThreadManager = function(clusterOptions, executionMode = {
|
||||
composed: false
|
||||
}) {
|
||||
if (!(this instanceof ThreadManager)) {
|
||||
return new ThreadManager(clusterOptions, executionMode);
|
||||
}
|
||||
|
|
@ -196,8 +196,8 @@ var ThreadManager = function(clusterOptions, executionMode = {composed: false})
|
|||
* workload and a composition of them, respectively.
|
||||
*/
|
||||
|
||||
workerThread.fsm = function(workloads, args, options) {
|
||||
load('jstests/concurrency/fsm_libs/worker_thread.js'); // for workerThread.main
|
||||
workerThread.fsm = async function(workloads, args, options) {
|
||||
const {workerThread} = await import("jstests/concurrency/fsm_libs/worker_thread.js");
|
||||
load('jstests/concurrency/fsm_libs/fsm.js'); // for fsm.run
|
||||
|
||||
return workerThread.main(workloads, args, function(configs) {
|
||||
|
|
@ -207,8 +207,8 @@ workerThread.fsm = function(workloads, args, options) {
|
|||
});
|
||||
};
|
||||
|
||||
workerThread.composed = function(workloads, args, options) {
|
||||
load('jstests/concurrency/fsm_libs/worker_thread.js'); // for workerThread.main
|
||||
workerThread.composed = async function(workloads, args, options) {
|
||||
const {workerThread} = await import("jstests/concurrency/fsm_libs/worker_thread.js");
|
||||
load('jstests/concurrency/fsm_libs/composer.js'); // for composer.run
|
||||
|
||||
return workerThread.main(workloads, args, function(configs) {
|
||||
|
|
|
|||
|
|
@ -1,11 +1,9 @@
|
|||
'use strict';
|
||||
|
||||
load('jstests/concurrency/fsm_libs/assert.js');
|
||||
load('jstests/concurrency/fsm_libs/cluster.js'); // for Cluster.isStandalone
|
||||
import {Cluster} from "jstests/concurrency/fsm_libs/cluster.js";
|
||||
load('jstests/concurrency/fsm_libs/parse_config.js'); // for parseConfig
|
||||
load('jstests/libs/specific_secondary_reader_mongo.js');
|
||||
|
||||
var workerThread = (function() {
|
||||
export const workerThread = (function() {
|
||||
// workloads = list of workload filenames
|
||||
// args.tid = the thread identifier
|
||||
// args.data = map of workload -> 'this' parameter passed to the FSM state functions
|
||||
|
|
|
|||
|
|
@ -1,8 +1,5 @@
|
|||
// Runner that runs full validation on all collections of the initial sync node and checks the
|
||||
// dbhashes of all of the nodes including the initial sync node.
|
||||
'use strict';
|
||||
|
||||
(function() {
|
||||
var startTime = Date.now();
|
||||
|
||||
var primaryInfo = db.isMaster();
|
||||
|
|
@ -42,8 +39,7 @@ assert.eq(res.myState, ReplSetTest.State.SECONDARY, tojson(res));
|
|||
const excludedDBs = jsTest.options().excludedDBsFromDBHash;
|
||||
rst.checkReplicatedDataHashes(undefined, excludedDBs);
|
||||
|
||||
load('jstests/hooks/run_validate_collections.js');
|
||||
await import("jstests/hooks/run_validate_collections.js");
|
||||
|
||||
var totalTime = Date.now() - startTime;
|
||||
print('Finished consistency checks of initial sync node in ' + totalTime + ' ms.');
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -1,10 +1,8 @@
|
|||
// Runner for validateCollections that runs full validation on all collections when loaded into
|
||||
// the mongo shell.
|
||||
'use strict';
|
||||
|
||||
(function() {
|
||||
import {CollectionValidator} from "jstests/hooks/validate_collections.js";
|
||||
load('jstests/libs/discover_topology.js'); // For Topology and DiscoverTopology.
|
||||
load('jstests/hooks/validate_collections.js'); // For CollectionValidator.
|
||||
|
||||
assert.eq(typeof db, 'object', 'Invalid `db` object, is the shell connected to a mongod?');
|
||||
const topology = DiscoverTopology.findConnectedNodes(db.getMongo());
|
||||
|
|
@ -93,4 +91,3 @@ if (originalTransactionLifetimeLimitSeconds) {
|
|||
conn.adminCommand({setParameter: 1, transactionLifetimeLimitSeconds: originalValue}));
|
||||
}
|
||||
}
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -1,14 +1,37 @@
|
|||
// Wrapper around the validate command that can be used to validate index key counts.
|
||||
'use strict';
|
||||
import {Thread} from "jstests/libs/parallelTester_module.js";
|
||||
|
||||
function CollectionValidator() {
|
||||
load('jstests/libs/parallelTester.js');
|
||||
|
||||
if (!(this instanceof CollectionValidator)) {
|
||||
throw new Error('Please use "new CollectionValidator()"');
|
||||
export class CollectionValidator {
|
||||
validateCollections(db, obj) {
|
||||
return validateCollectionsImpl(db, obj);
|
||||
}
|
||||
|
||||
this.validateCollections = function(db, obj) {
|
||||
validateNodes(hostList) {
|
||||
// We run the scoped threads in a try/finally block in case any thread throws an exception,
|
||||
// in which case we want to still join all the threads.
|
||||
let threads = [];
|
||||
|
||||
try {
|
||||
hostList.forEach(host => {
|
||||
const thread = new Thread(validateCollectionsThread, validateCollectionsImpl, host);
|
||||
threads.push(thread);
|
||||
thread.start();
|
||||
});
|
||||
} finally {
|
||||
// Wait for each thread to finish. Throw an error if any thread fails.
|
||||
const returnData = threads.map(thread => {
|
||||
thread.join();
|
||||
return thread.returnData();
|
||||
});
|
||||
|
||||
returnData.forEach(res => {
|
||||
assert.commandWorked(res, 'Collection validation failed');
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function validateCollectionsImpl(db, obj) {
|
||||
function dumpCollection(coll, limit) {
|
||||
print('Printing indexes in: ' + coll.getFullName());
|
||||
printjson(coll.getIndexes());
|
||||
|
|
@ -88,10 +111,10 @@ function CollectionValidator() {
|
|||
}
|
||||
|
||||
return full_res;
|
||||
};
|
||||
}
|
||||
|
||||
// Run a separate thread to validate collections on each server in parallel.
|
||||
const validateCollectionsThread = function(validatorFunc, host) {
|
||||
function validateCollectionsThread(validatorFunc, host) {
|
||||
try {
|
||||
print('Running validate() on ' + host);
|
||||
const conn = new Mongo(host);
|
||||
|
|
@ -106,8 +129,8 @@ function CollectionValidator() {
|
|||
|
||||
let requiredFCV = jsTest.options().forceValidationWithFeatureCompatibilityVersion;
|
||||
if (requiredFCV) {
|
||||
requiredFCV = new Function(`return typeof ${requiredFCV} === "string" ? ${
|
||||
requiredFCV} : "${requiredFCV}"`)();
|
||||
requiredFCV = new Function(
|
||||
`return typeof ${requiredFCV} === "string" ? ${requiredFCV} : "${requiredFCV}"`)();
|
||||
// Make sure this node has the desired FCV as it may take time for the updates to
|
||||
// replicate to the nodes that weren't part of the w=majority.
|
||||
assert.soonNoExcept(() => {
|
||||
|
|
@ -131,38 +154,11 @@ function CollectionValidator() {
|
|||
}
|
||||
return {ok: 1};
|
||||
} catch (e) {
|
||||
print('Exception caught in scoped thread running validationCollections on server: ' +
|
||||
host);
|
||||
print('Exception caught in scoped thread running validationCollections on server: ' + host);
|
||||
return {ok: 0, error: e.toString(), stack: e.stack, host: host};
|
||||
}
|
||||
};
|
||||
|
||||
this.validateNodes = function(hostList) {
|
||||
// We run the scoped threads in a try/finally block in case any thread throws an exception,
|
||||
// in which case we want to still join all the threads.
|
||||
let threads = [];
|
||||
|
||||
try {
|
||||
hostList.forEach(host => {
|
||||
const thread =
|
||||
new Thread(validateCollectionsThread, this.validateCollections, host);
|
||||
threads.push(thread);
|
||||
thread.start();
|
||||
});
|
||||
} finally {
|
||||
// Wait for each thread to finish. Throw an error if any thread fails.
|
||||
const returnData = threads.map(thread => {
|
||||
thread.join();
|
||||
return thread.returnData();
|
||||
});
|
||||
|
||||
returnData.forEach(res => {
|
||||
assert.commandWorked(res, 'Collection validation failed');
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Ensure compatibility with existing callers. Cannot use `const` or `let` here since this file may
|
||||
// be loaded more than once.
|
||||
var validateCollections = new CollectionValidator().validateCollections;
|
||||
export const validateCollections = new CollectionValidator().validateCollections;
|
||||
|
|
|
|||
|
|
@ -7,14 +7,6 @@
|
|||
*
|
||||
* @param {Mongo} conn - a connection to the server
|
||||
*/
|
||||
function CommandSequenceWithRetries(conn) {
|
||||
"use strict";
|
||||
|
||||
if (!(this instanceof CommandSequenceWithRetries)) {
|
||||
return new CommandSequenceWithRetries(conn);
|
||||
}
|
||||
|
||||
const steps = [];
|
||||
|
||||
function attemptReconnect(conn) {
|
||||
try {
|
||||
|
|
@ -25,22 +17,31 @@ function CommandSequenceWithRetries(conn) {
|
|||
return true;
|
||||
}
|
||||
|
||||
this.then = function then(phase, action) {
|
||||
steps.push({phase: phase, action: action});
|
||||
return this;
|
||||
};
|
||||
export class CommandSequenceWithRetries {
|
||||
constructor(conn) {
|
||||
this.conn = conn;
|
||||
this.steps = [];
|
||||
}
|
||||
|
||||
this.execute = function execute() {
|
||||
then(phase, action) {
|
||||
this.steps.push({phase, action});
|
||||
return this;
|
||||
}
|
||||
|
||||
execute() {
|
||||
let i = 0;
|
||||
let stepHadNetworkErrorAlready = false;
|
||||
|
||||
while (i < steps.length) {
|
||||
while (i < this.steps.length) {
|
||||
try {
|
||||
// Treat no explicit return statement inside the action function as returning
|
||||
// {shouldStop: false} for syntactic convenience.
|
||||
const result = steps[i].action(conn);
|
||||
const result = this.steps[i].action(this.conn);
|
||||
if (result !== undefined && result.shouldStop) {
|
||||
return {ok: 0, msg: "giving up after " + steps[i].phase + ": " + result.reason};
|
||||
return {
|
||||
ok: 0,
|
||||
msg: "giving up after " + this.steps[i].phase + ": " + result.reason
|
||||
};
|
||||
}
|
||||
} catch (e) {
|
||||
if (!isNetworkError(e)) {
|
||||
|
|
@ -54,15 +55,15 @@ function CommandSequenceWithRetries(conn) {
|
|||
if (stepHadNetworkErrorAlready) {
|
||||
return {
|
||||
ok: 0,
|
||||
msg: "giving up after " + steps[i].phase +
|
||||
msg: "giving up after " + this.steps[i].phase +
|
||||
" because we encountered multiple network errors"
|
||||
};
|
||||
}
|
||||
|
||||
if (!attemptReconnect(conn)) {
|
||||
if (!attemptReconnect(this.conn)) {
|
||||
return {
|
||||
ok: 0,
|
||||
msg: "giving up after " + steps[i].phase +
|
||||
msg: "giving up after " + this.steps[i].phase +
|
||||
" because attempting to reconnect failed"
|
||||
};
|
||||
}
|
||||
|
|
@ -76,5 +77,5 @@ function CommandSequenceWithRetries(conn) {
|
|||
}
|
||||
|
||||
return {ok: 1};
|
||||
};
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,5 @@
|
|||
// This is the template file used in Powercycle testing for launching FSM Serial clients.
|
||||
'use strict';
|
||||
|
||||
load('jstests/concurrency/fsm_libs/runner.js');
|
||||
import {runWorkloadsSerially} from "jstests/concurrency/fsm_libs/runner.js";
|
||||
|
||||
var workloadDir = 'jstests/concurrency/fsm_workloads';
|
||||
|
||||
|
|
|
|||
|
|
@ -3,17 +3,14 @@
|
|||
* collections and indexes before shutting down a mongod while running JS tests.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
import {validateCollections} from "jstests/hooks/validate_collections.js";
|
||||
import {CommandSequenceWithRetries} from "jstests/libs/command_sequence_with_retries.js";
|
||||
|
||||
load("jstests/libs/command_sequence_with_retries.js"); // for CommandSequenceWithRetries
|
||||
|
||||
MongoRunner.validateCollectionsCallback = function(port) {
|
||||
// This function may be executed in a new Thread context, so ensure the proper definitions
|
||||
// are loaded.
|
||||
if (typeof CommandSequenceWithRetries === "undefined") {
|
||||
load("jstests/libs/command_sequence_with_retries.js");
|
||||
}
|
||||
MongoRunner.validateCollectionsCallback = function(port, options) {
|
||||
options = options || {};
|
||||
const CommandSequenceWithRetriesImpl =
|
||||
options.CommandSequenceWithRetries || CommandSequenceWithRetries;
|
||||
const validateCollectionsImpl = options.validateCollections || validateCollections;
|
||||
|
||||
if (jsTest.options().skipCollectionAndIndexValidation) {
|
||||
print("Skipping collection validation during mongod shutdown");
|
||||
|
|
@ -34,7 +31,7 @@ MongoRunner.validateCollectionsCallback = function(port) {
|
|||
|
||||
let dbNames;
|
||||
let result =
|
||||
new CommandSequenceWithRetries(conn)
|
||||
new CommandSequenceWithRetriesImpl(conn)
|
||||
.then("running the isMaster command",
|
||||
function(conn) {
|
||||
const res = assert.commandWorked(conn.adminCommand({isMaster: 1}));
|
||||
|
|
@ -119,9 +116,7 @@ MongoRunner.validateCollectionsCallback = function(port) {
|
|||
return;
|
||||
}
|
||||
|
||||
load('jstests/hooks/validate_collections.js'); // for validateCollections
|
||||
|
||||
const cmds = new CommandSequenceWithRetries(conn);
|
||||
const cmds = new CommandSequenceWithRetriesImpl(conn);
|
||||
for (let i = 0; i < dbNames.length; ++i) {
|
||||
const dbName = dbNames[i];
|
||||
cmds.then("validating " + dbName, function(conn) {
|
||||
|
|
@ -132,7 +127,7 @@ MongoRunner.validateCollectionsCallback = function(port) {
|
|||
validateOptions.enforceFastCount = false;
|
||||
}
|
||||
|
||||
const validate_res = validateCollections(conn.getDB(dbName), validateOptions);
|
||||
const validate_res = validateCollectionsImpl(conn.getDB(dbName), validateOptions);
|
||||
if (!validate_res.ok) {
|
||||
return {
|
||||
shouldStop: true,
|
||||
|
|
@ -144,4 +139,3 @@ MongoRunner.validateCollectionsCallback = function(port) {
|
|||
|
||||
assert.commandWorked(cmds.execute());
|
||||
};
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -0,0 +1,465 @@
|
|||
/**
|
||||
* The ParallelTester class is used to test more than one test concurrently
|
||||
*/
|
||||
|
||||
export var Thread, fork, EventGenerator, ParallelTester;
|
||||
|
||||
if (typeof _threadInject != "undefined") {
|
||||
// With --enableJavaScriptProtection functions are presented as Code objects.
|
||||
// This function evals all the Code objects then calls the provided start function.
|
||||
// arguments: [startFunction, startFunction args...]
|
||||
function _threadStartWrapper(testData) {
|
||||
// Recursively evals all the Code objects present in arguments
|
||||
// NOTE: This is a naive implementation that cannot handle cyclic objects.
|
||||
function evalCodeArgs(arg) {
|
||||
if (arg instanceof Code) {
|
||||
return eval("(" + arg.code + ")");
|
||||
} else if (arg !== null && isObject(arg)) {
|
||||
var newArg = arg instanceof Array ? [] : {};
|
||||
for (var prop in arg) {
|
||||
if (arg.hasOwnProperty(prop)) {
|
||||
newArg[prop] = evalCodeArgs(arg[prop]);
|
||||
}
|
||||
}
|
||||
return newArg;
|
||||
}
|
||||
return arg;
|
||||
}
|
||||
var realStartFn;
|
||||
var newArgs = [];
|
||||
// We skip the first argument, which is always TestData.
|
||||
TestData = evalCodeArgs(testData);
|
||||
for (var i = 1, l = arguments.length; i < l; i++) {
|
||||
newArgs.push(evalCodeArgs(arguments[i]));
|
||||
}
|
||||
realStartFn = newArgs.shift();
|
||||
return realStartFn.apply(this, newArgs);
|
||||
}
|
||||
|
||||
Thread = function() {
|
||||
var args = Array.prototype.slice.call(arguments);
|
||||
// Always pass TestData as the first argument.
|
||||
args.unshift(TestData);
|
||||
args.unshift(_threadStartWrapper);
|
||||
this.init.apply(this, args);
|
||||
};
|
||||
_threadInject(Thread.prototype);
|
||||
|
||||
fork = function() {
|
||||
var t = new Thread(function() {});
|
||||
Thread.apply(t, arguments);
|
||||
return t;
|
||||
};
|
||||
|
||||
// Helper class to generate a list of events which may be executed by a ParallelTester
|
||||
EventGenerator = function(me, collectionName, mean, host) {
|
||||
this.mean = mean;
|
||||
if (host == undefined)
|
||||
host = db.getMongo().host;
|
||||
this.events = new Array(me, collectionName, host);
|
||||
};
|
||||
|
||||
EventGenerator.prototype._add = function(action) {
|
||||
this.events.push([Random.genExp(this.mean), action]);
|
||||
};
|
||||
|
||||
EventGenerator.prototype.addInsert = function(obj) {
|
||||
this._add("t.insert( " + tojson(obj) + " )");
|
||||
};
|
||||
|
||||
EventGenerator.prototype.addRemove = function(obj) {
|
||||
this._add("t.remove( " + tojson(obj) + " )");
|
||||
};
|
||||
|
||||
EventGenerator.prototype.addCurrentOp = function() {
|
||||
this._add("db.currentOp()");
|
||||
};
|
||||
|
||||
EventGenerator.prototype.addUpdate = function(objOld, objNew) {
|
||||
this._add("t.update( " + tojson(objOld) + ", " + tojson(objNew) + " )");
|
||||
};
|
||||
|
||||
EventGenerator.prototype.addCheckCount = function(count, query, shouldPrint, checkQuery) {
|
||||
query = query || {};
|
||||
shouldPrint = shouldPrint || false;
|
||||
checkQuery = checkQuery || false;
|
||||
var action = "assert.eq( " + count + ", t.count( " + tojson(query) + " ) );";
|
||||
if (checkQuery) {
|
||||
action +=
|
||||
" assert.eq( " + count + ", t.find( " + tojson(query) + " ).toArray().length );";
|
||||
}
|
||||
if (shouldPrint) {
|
||||
action += " print( me + ' ' + " + count + " );";
|
||||
}
|
||||
this._add(action);
|
||||
};
|
||||
|
||||
EventGenerator.prototype.getEvents = function() {
|
||||
return this.events;
|
||||
};
|
||||
|
||||
EventGenerator.dispatch = function() {
|
||||
var args = Array.from(arguments);
|
||||
var me = args.shift();
|
||||
var collectionName = args.shift();
|
||||
var host = args.shift();
|
||||
var m = new Mongo(host);
|
||||
|
||||
// We define 'db' and 't' as local variables so that calling eval() on the stringified
|
||||
// JavaScript expression 'args[i][1]' can take advantage of using them.
|
||||
var db = m.getDB("test");
|
||||
var t = db[collectionName];
|
||||
for (var i in args) {
|
||||
sleep(args[i][0]);
|
||||
eval(args[i][1]);
|
||||
}
|
||||
};
|
||||
|
||||
// Helper class for running tests in parallel. It assembles a set of tests
|
||||
// and then calls assert.parallelests to run them.
|
||||
ParallelTester = function() {
|
||||
this.params = new Array();
|
||||
};
|
||||
|
||||
ParallelTester.prototype.add = function(fun, args) {
|
||||
args = args || [];
|
||||
args.unshift(fun);
|
||||
this.params.push(args);
|
||||
};
|
||||
|
||||
ParallelTester.prototype.run = async function(msg) {
|
||||
await assert.parallelTests(this.params, msg);
|
||||
};
|
||||
|
||||
// creates lists of tests from jstests dir in a format suitable for use by
|
||||
// ParallelTester.fileTester. The lists will be in random order.
|
||||
// n: number of lists to split these tests into
|
||||
ParallelTester.createJstestsLists = function(n) {
|
||||
var params = new Array();
|
||||
for (var i = 0; i < n; ++i) {
|
||||
params.push([]);
|
||||
}
|
||||
|
||||
var makeKeys = function(a) {
|
||||
var ret = {};
|
||||
for (var i in a) {
|
||||
ret[a[i]] = 1;
|
||||
}
|
||||
return ret;
|
||||
};
|
||||
|
||||
// some tests can't run in parallel with most others
|
||||
var skipTests = makeKeys([
|
||||
"index/indexb.js",
|
||||
|
||||
// Tests that set a parameter that causes the server to ignore
|
||||
// long index keys.
|
||||
"index_bigkeys_nofail.js",
|
||||
"index_bigkeys_validation.js",
|
||||
|
||||
// Tests that set the notablescan parameter, which makes queries fail rather than use a
|
||||
// non-indexed plan.
|
||||
"notablescan.js",
|
||||
"notablescan_capped.js",
|
||||
|
||||
"query/mr/mr_fail_invalid_js.js",
|
||||
"run_program1.js",
|
||||
"bench_test1.js",
|
||||
|
||||
// These tests use getLog to examine the logs. Tests which do so shouldn't be run in
|
||||
// this suite because any test being run at the same time could conceivably spam the
|
||||
// logs so much that the line they are looking for has been rotated off the server's
|
||||
// in-memory buffer of log messages, which only stores the 1024 most recent operations.
|
||||
"comment_field.js",
|
||||
"administrative/getlog2.js",
|
||||
"logprocessdetails.js",
|
||||
"query/queryoptimizera.js",
|
||||
"log_remote_op_wait.js",
|
||||
|
||||
"connections_opened.js", // counts connections, globally
|
||||
"opcounters_write_cmd.js",
|
||||
"administrative/set_param1.js", // changes global state
|
||||
"index/geo/geo_update_btree2.js", // SERVER-11132 test disables table scans
|
||||
"write/update/update_setOnInsert.js", // SERVER-9982
|
||||
"max_time_ms.js", // Sensitive to query execution time, by design
|
||||
"shell/autocomplete.js", // Likewise.
|
||||
|
||||
// This overwrites MinKey/MaxKey's singleton which breaks
|
||||
// any other test that uses MinKey/MaxKey
|
||||
"query/type/type6.js",
|
||||
|
||||
// Assumes that other tests are not creating cursors.
|
||||
"kill_cursors.js",
|
||||
|
||||
// Assumes that other tests are not starting operations.
|
||||
"administrative/current_op/currentop_shell.js",
|
||||
|
||||
// These tests check global command counters.
|
||||
"write/find_and_modify/find_and_modify_metrics.js",
|
||||
"write/update/update_metrics.js",
|
||||
|
||||
// Views tests
|
||||
"views/invalid_system_views.js", // Puts invalid view definitions in system.views.
|
||||
"views/views_all_commands.js", // Drops test DB.
|
||||
"views/view_with_invalid_dbname.js", // Puts invalid view definitions in system.views.
|
||||
|
||||
// This test causes collMod commands to hang, which interferes with other tests running
|
||||
// collMod.
|
||||
"write/crud_ops_do_not_throw_locktimeout.js",
|
||||
|
||||
// Can fail if isMaster takes too long on a loaded machine.
|
||||
"dbadmin.js",
|
||||
|
||||
// Other tests will fail while the requireApiVersion server parameter is set.
|
||||
"require_api_version.js",
|
||||
|
||||
// This sets the 'disablePipelineOptimization' failpoint, which causes other tests
|
||||
// running in parallel to fail if they were expecting their pipelines to be optimized.
|
||||
"type_bracket.js",
|
||||
|
||||
// This test updates global memory usage counters in the bucket catalog in a way that
|
||||
// may affect other time-series tests running concurrently.
|
||||
"timeseries/timeseries_idle_buckets.js",
|
||||
|
||||
// Assumes that other tests are not creating API version 1 incompatible data.
|
||||
"administrative/validate_db_metadata_command.js",
|
||||
|
||||
// The tests in 'bench_test*.js' files use 'benchRun()'. The main purpose of
|
||||
// 'benchRun()' is for performance testing and the 'benchRun()' implementation itself
|
||||
// launches multiple threads internally, it's not necessary to keep 'bench_test*.js'
|
||||
// within the parallel test job.
|
||||
"bench_test1.js",
|
||||
"bench_test2.js",
|
||||
|
||||
// These tests cause deletes and updates to hang, which may affect other tests running
|
||||
// concurrently.
|
||||
"timeseries/timeseries_delete_hint.js",
|
||||
"timeseries/timeseries_update_hint.js",
|
||||
"timeseries/timeseries_delete_concurrent.js",
|
||||
"timeseries/timeseries_update_concurrent.js",
|
||||
|
||||
// These tests rely on no writes happening that would force oplog truncation.
|
||||
"write_change_stream_pit_preimage_in_transaction.js",
|
||||
"write/write_change_stream_pit_preimage.js",
|
||||
|
||||
// These tests convert a non-unique index to a unique one, which is not compatible
|
||||
// when running against inMemory storage engine variants. Since this test only fails
|
||||
// in the parallel tester, which does not respect test tags, we omit the tests
|
||||
// instead of manually checking TestData values in the mongo shell for the Evergreen
|
||||
// variant.
|
||||
"ddl/collmod_convert_index_uniqueness.js",
|
||||
"ddl/collmod_convert_to_unique_apply_ops.js",
|
||||
"ddl/collmod_convert_to_unique_violations.js",
|
||||
"ddl/collmod_convert_to_unique_violations_size_limit.js",
|
||||
|
||||
// The parallel tester does not respect test tags, compact cannot run against the
|
||||
// inMemory storage engine.
|
||||
"timeseries/timeseries_compact.js",
|
||||
|
||||
// These tests load 'sbe_assert_error_override.js' unconditionally, which causes
|
||||
// failures in the parallel suite.
|
||||
"computed_projections.js",
|
||||
"query/project/projection_expr_mod.js",
|
||||
]);
|
||||
|
||||
// Get files, including files in subdirectories.
|
||||
var getFilesRecursive = function(dir) {
|
||||
var files = listFiles(dir);
|
||||
var fileList = [];
|
||||
files.forEach(file => {
|
||||
if (file.isDirectory) {
|
||||
getFilesRecursive(file.name).forEach(subDirFile => fileList.push(subDirFile));
|
||||
} else {
|
||||
fileList.push(file);
|
||||
}
|
||||
});
|
||||
return fileList;
|
||||
};
|
||||
|
||||
// Transactions are not supported on standalone nodes so we do not run them here.
|
||||
// NOTE: We need to take substring of the full test path to ensure that 'jstests/core/' is
|
||||
// not included.
|
||||
const txnsTestFiles =
|
||||
getFilesRecursive("jstests/core/txns/")
|
||||
.map(fullPathToTest => fullPathToTest.name.substring("jstests/core/".length));
|
||||
Object.assign(skipTests, makeKeys(txnsTestFiles));
|
||||
|
||||
var parallelFilesDir = "jstests/core";
|
||||
|
||||
// some tests can't be run in parallel with each other
|
||||
var serialTestsArr = [
|
||||
// These tests use fsyncLock.
|
||||
parallelFilesDir + "/fsync.js",
|
||||
parallelFilesDir + "/administrative/current_op/currentop.js",
|
||||
parallelFilesDir + "/ddl/killop_drop_collection.js",
|
||||
|
||||
// These tests expect the profiler to be on or off at specific points. They should not
|
||||
// be run in parallel with tests that perform fsyncLock. User operations skip writing to
|
||||
// the system.profile collection while the server is fsyncLocked.
|
||||
//
|
||||
// Most profiler tests can be run in parallel with each other as they use test-specific
|
||||
// databases, with the exception of tests which modify slowms or the profiler's sampling
|
||||
// rate, since those affect profile settings globally.
|
||||
parallelFilesDir + "/api/apitest_db_profile_level.js",
|
||||
parallelFilesDir + "/index/geo/geo_s2cursorlimitskip.js",
|
||||
parallelFilesDir + "/administrative/profile/profile1.js",
|
||||
parallelFilesDir + "/administrative/profile/profile2.js",
|
||||
parallelFilesDir + "/administrative/profile/profile3.js",
|
||||
parallelFilesDir + "/administrative/profile/profile_agg.js",
|
||||
parallelFilesDir + "/administrative/profile/profile_count.js",
|
||||
parallelFilesDir + "/administrative/profile/profile_delete.js",
|
||||
parallelFilesDir + "/administrative/profile/profile_distinct.js",
|
||||
parallelFilesDir + "/administrative/profile/profile_find.js",
|
||||
parallelFilesDir + "/administrative/profile/profile_findandmodify.js",
|
||||
parallelFilesDir + "/administrative/profile/profile_getmore.js",
|
||||
parallelFilesDir + "/administrative/profile/profile_hide_index.js",
|
||||
parallelFilesDir + "/administrative/profile/profile_insert.js",
|
||||
parallelFilesDir + "/administrative/profile/profile_list_collections.js",
|
||||
parallelFilesDir + "/administrative/profile/profile_list_indexes.js",
|
||||
parallelFilesDir + "/administrative/profile/profile_mapreduce.js",
|
||||
parallelFilesDir + "/administrative/profile/profile_no_such_db.js",
|
||||
parallelFilesDir + "/administrative/profile/profile_query_hash.js",
|
||||
parallelFilesDir + "/administrative/profile/profile_sampling.js",
|
||||
parallelFilesDir + "/administrative/profile/profile_update.js",
|
||||
parallelFilesDir + "/query/plan_cache/cached_plan_trial_does_not_discard_work.js",
|
||||
parallelFilesDir + "/sbe/from_plan_cache_flag.js",
|
||||
parallelFilesDir + "/timeseries/bucket_unpacking_with_sort_plan_cache.js",
|
||||
|
||||
// These tests rely on a deterministically refreshable logical session cache. If they
|
||||
// run in parallel, they could interfere with the cache and cause failures.
|
||||
parallelFilesDir + "/administrative/list_all_local_sessions.js",
|
||||
parallelFilesDir + "/administrative/list_all_sessions.js",
|
||||
parallelFilesDir + "/administrative/list_sessions.js",
|
||||
];
|
||||
var serialTests = makeKeys(serialTestsArr);
|
||||
|
||||
// prefix the first thread with the serialTests
|
||||
// (which we will exclude from the rest of the threads below)
|
||||
params[0] = serialTestsArr;
|
||||
var files = getFilesRecursive(parallelFilesDir);
|
||||
files = Array.shuffle(files);
|
||||
|
||||
var i = 0;
|
||||
files.forEach(function(x) {
|
||||
if ((/[\/\\]_/.test(x.name)) || (!/\.js$/.test(x.name)) ||
|
||||
(x.name.match(parallelFilesDir + "/(.*\.js)")[1] in skipTests) || //
|
||||
(x.name in serialTests)) {
|
||||
print(" >>>>>>>>>>>>>>> skipping " + x.name);
|
||||
return;
|
||||
}
|
||||
// add the test to run in one of the threads.
|
||||
params[i % n].push(x.name);
|
||||
++i;
|
||||
});
|
||||
|
||||
// randomize ordering of the serialTests
|
||||
params[0] = Array.shuffle(params[0]);
|
||||
|
||||
for (var i in params) {
|
||||
params[i].unshift(i);
|
||||
}
|
||||
|
||||
return params;
|
||||
};
|
||||
|
||||
async function measureAsync(fn) {
|
||||
const start = new Date();
|
||||
await fn.apply(null, Array.from(arguments).slice(2));
|
||||
return (new Date()).getTime() - start.getTime();
|
||||
}
|
||||
|
||||
// runs a set of test files
|
||||
// first argument is an identifier for this tester, remaining arguments are file names
|
||||
ParallelTester.fileTester = async function() {
|
||||
var args = Array.from(arguments);
|
||||
var suite = args.shift();
|
||||
for (const x of args) {
|
||||
print(" S" + suite + " Test : " + x + " ...");
|
||||
const time = await measureAsync(async function() {
|
||||
// Create a new connection to the db for each file. If tests share the same
|
||||
// connection it can create difficult to debug issues.
|
||||
var db = new Mongo(db.getMongo().host).getDB(db.getName());
|
||||
gc();
|
||||
await import(x);
|
||||
});
|
||||
print(" S" + suite + " Test : " + x + " " + time + "ms");
|
||||
}
|
||||
};
|
||||
|
||||
// params: array of arrays, each element of which consists of a function followed
|
||||
// by zero or more arguments to that function. Each function and its arguments will
|
||||
// be called in a separate thread.
|
||||
// msg: failure message
|
||||
assert.parallelTests = function(params, msg) {
|
||||
function wrapper(fun, argv, globals) {
|
||||
if (globals.hasOwnProperty("TestData")) {
|
||||
TestData = globals.TestData;
|
||||
}
|
||||
|
||||
try {
|
||||
fun.apply(0, argv);
|
||||
return {passed: true};
|
||||
} catch (e) {
|
||||
print("\n********** Parallel Test FAILED: " + tojson(e) + "\n");
|
||||
return {
|
||||
passed: false,
|
||||
testName: tojson(e).match(/Error: error loading js file: (.*\.js)/)[1]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
TestData.isParallelTest = true;
|
||||
|
||||
var runners = new Array();
|
||||
for (var i in params) {
|
||||
var param = params[i];
|
||||
var test = param.shift();
|
||||
|
||||
// Make a shallow copy of TestData so we can override the test name to
|
||||
// prevent tests on different threads that to use jsTestName() as the
|
||||
// collection name from colliding.
|
||||
const clonedTestData = Object.assign({}, TestData);
|
||||
clonedTestData.testName = `ParallelTesterThread${i}`;
|
||||
var t = new Thread(wrapper, test, param, {TestData: clonedTestData});
|
||||
runners.push(t);
|
||||
}
|
||||
|
||||
runners.forEach(function(x) {
|
||||
x.start();
|
||||
});
|
||||
var nFailed = 0;
|
||||
var failedTests = [];
|
||||
// SpiderMonkey doesn't like it if we exit before all threads are joined
|
||||
// (see SERVER-19615 for a similar issue).
|
||||
runners.forEach(function(x) {
|
||||
if (!x.returnData().passed) {
|
||||
++nFailed;
|
||||
failedTests.push(x.returnData().testName);
|
||||
}
|
||||
});
|
||||
msg += ": " + tojsononeline(failedTests);
|
||||
assert.eq(0, nFailed, msg);
|
||||
};
|
||||
}
|
||||
|
||||
globalThis.CountDownLatch = Object.extend(function(count) {
|
||||
if (!(this instanceof CountDownLatch)) {
|
||||
return new CountDownLatch(count);
|
||||
}
|
||||
this._descriptor = CountDownLatch._new.apply(null, arguments);
|
||||
|
||||
// NOTE: The following methods have to be defined on the instance itself,
|
||||
// and not on its prototype. This is because properties on the
|
||||
// prototype are lost during the serialization to BSON that occurs
|
||||
// when passing data to a child thread.
|
||||
|
||||
this.await = function() {
|
||||
CountDownLatch._await(this._descriptor);
|
||||
};
|
||||
this.countDown = function() {
|
||||
CountDownLatch._countDown(this._descriptor);
|
||||
};
|
||||
this.getCount = function() {
|
||||
return CountDownLatch._getCount(this._descriptor);
|
||||
};
|
||||
}, CountDownLatch);
|
||||
|
|
@ -22,9 +22,7 @@
|
|||
* For each index type, a v1 unique, v2 unique, v1 non-unique and v2 non-unique index
|
||||
* is considered except for hashed and wildcard, which only consider the v2 non-unique case.
|
||||
*/
|
||||
(function() {
|
||||
'use strict';
|
||||
load('jstests/hooks/validate_collections.js');
|
||||
import {validateCollections} from "jstests/hooks/validate_collections.js";
|
||||
|
||||
const kNumDocs = 100;
|
||||
|
||||
|
|
@ -273,4 +271,3 @@ function insertDocumentsUnique(collection, numDocs, getDoc) {
|
|||
}
|
||||
assert.commandWorked(bulk.execute());
|
||||
}
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -1,10 +1,8 @@
|
|||
/**
|
||||
* Tests that capped collections get the correct fast counts after rollback in FCV 4.4.
|
||||
*/
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
load("jstests/multiVersion/libs/multiversion_rollback.js");
|
||||
import {setupReplicaSet} from "jstests/multiVersion/libs/multiversion_rollback.js";
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const testName = 'rollback_capped_deletions';
|
||||
const dbName = testName;
|
||||
|
|
@ -47,4 +45,3 @@ try {
|
|||
|
||||
// The fast count checks occur when tearing down the fixture as part of the consistency checks.
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -3,9 +3,7 @@
|
|||
* downgraded version rollback node and a 'latest' version sync source.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
load("jstests/multiVersion/libs/multiversion_rollback.js");
|
||||
import {testMultiversionRollback} from "jstests/multiVersion/libs/multiversion_rollback.js";
|
||||
|
||||
var testName = "multiversion_rollback_last_lts_to_latest";
|
||||
jsTestLog("Testing multiversion rollback from last-lts to latest");
|
||||
|
|
@ -14,4 +12,3 @@ testMultiversionRollback(testName, "last-lts", "latest");
|
|||
testName = "multiversion_rollback_last_continuous_to_latest";
|
||||
jsTestLog("Testing multiversion rollback from last-continuous to latest");
|
||||
testMultiversionRollback(testName, "last-continuous", "latest");
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -3,9 +3,7 @@
|
|||
* 'latest' version rollback node and a downgraded version sync source.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
load("jstests/multiVersion/libs/multiversion_rollback.js");
|
||||
import {testMultiversionRollback} from "jstests/multiVersion/libs/multiversion_rollback.js";
|
||||
|
||||
var testName = "multiversion_rollback_latest_to_last_lts";
|
||||
jsTestLog("Testing multiversion rollback from latest to last-lts");
|
||||
|
|
@ -14,4 +12,3 @@ testMultiversionRollback(testName, "latest", "last-lts");
|
|||
var testName = "multiversion_rollback_latest_to_last_continuous";
|
||||
jsTestLog("Testing multiversion rollback from latest to last-continuous");
|
||||
testMultiversionRollback(testName, "latest", "last-continuous");
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -4,9 +4,10 @@
|
|||
* downgrading version rollback node and a lastLTS version sync source.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
load("jstests/multiVersion/libs/multiversion_rollback.js");
|
||||
import {
|
||||
testMultiversionRollbackDowngradingFromLastLTS,
|
||||
testMultiversionRollbackLatestFromDowngrading,
|
||||
} from "jstests/multiVersion/libs/multiversion_rollback.js";
|
||||
|
||||
let testName = "multiversion_rollback_latest_from_downgrading";
|
||||
jsTestLog("Testing multiversion rollback with a node in latest syncing from a node in downgrading");
|
||||
|
|
@ -17,4 +18,3 @@ testName = "multiversion_rollback_downgrading_from_last_lts";
|
|||
jsTestLog(
|
||||
"Testing multiversion rollback with a node in downgrading syncing from a node in lastLTS");
|
||||
testMultiversionRollbackDowngradingFromLastLTS(testName);
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -5,19 +5,17 @@
|
|||
* exercise rollback via refetch in the case that refetch is necessary.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
load("jstests/libs/collection_drop_recreate.js");
|
||||
load('jstests/libs/parallel_shell_helpers.js');
|
||||
load("jstests/libs/fail_point_util.js");
|
||||
|
||||
function printFCVDoc(nodeAdminDB, logMessage) {
|
||||
export function printFCVDoc(nodeAdminDB, logMessage) {
|
||||
const fcvDoc = nodeAdminDB.system.version.findOne({_id: 'featureCompatibilityVersion'});
|
||||
jsTestLog(logMessage + ` ${tojson(fcvDoc)}`);
|
||||
}
|
||||
|
||||
function CommonOps(dbName, node) {
|
||||
export function CommonOps(dbName, node) {
|
||||
// Insert four documents on both nodes.
|
||||
assert.commandWorked(node.getDB(dbName)["bothNodesKeep"].insert({a: 1}));
|
||||
assert.commandWorked(node.getDB(dbName)["rollbackNodeDeletes"].insert({b: 1}));
|
||||
|
|
@ -25,7 +23,7 @@ function CommonOps(dbName, node) {
|
|||
assert.commandWorked(node.getDB(dbName)["bothNodesUpdate"].insert({d: 1}));
|
||||
}
|
||||
|
||||
function RollbackOps(dbName, node) {
|
||||
export function RollbackOps(dbName, node) {
|
||||
// Perform operations only on the rollback node:
|
||||
// 1. Delete a document.
|
||||
// 2. Update a document only on this node.
|
||||
|
|
@ -36,7 +34,7 @@ function RollbackOps(dbName, node) {
|
|||
assert.commandWorked(node.getDB(dbName)["bothNodesUpdate"].update({d: 1}, {d: 0}));
|
||||
}
|
||||
|
||||
function SyncSourceOps(dbName, node) {
|
||||
export function SyncSourceOps(dbName, node) {
|
||||
// Perform operations only on the sync source:
|
||||
// 1. Make a conflicting write on one of the documents the rollback node updates.
|
||||
// 2. Insert a new document.
|
||||
|
|
@ -52,7 +50,7 @@ function SyncSourceOps(dbName, node) {
|
|||
* @param {string} syncSourceVersion the desired version for the sync source
|
||||
*
|
||||
*/
|
||||
function testMultiversionRollback(testName, rollbackNodeVersion, syncSourceVersion) {
|
||||
export function testMultiversionRollback(testName, rollbackNodeVersion, syncSourceVersion) {
|
||||
jsTestLog("Started multiversion rollback test for versions: {rollbackNode: " +
|
||||
rollbackNodeVersion + ", syncSource: " + syncSourceVersion + "}.");
|
||||
|
||||
|
|
@ -82,7 +80,7 @@ function testMultiversionRollback(testName, rollbackNodeVersion, syncSourceVersi
|
|||
}
|
||||
|
||||
// Test rollback between latest rollback node and downgrading sync node.
|
||||
function testMultiversionRollbackLatestFromDowngrading(testName, upgradeImmediately) {
|
||||
export function testMultiversionRollbackLatestFromDowngrading(testName, upgradeImmediately) {
|
||||
const dbName = testName;
|
||||
const replSet = new ReplSetTest(
|
||||
{name: testName, nodes: 3, useBridge: true, settings: {chainingAllowed: false}});
|
||||
|
|
@ -186,7 +184,7 @@ function testMultiversionRollbackLatestFromDowngrading(testName, upgradeImmediat
|
|||
}
|
||||
|
||||
// Test rollback between downgrading rollback node and lastLTS sync node.
|
||||
function testMultiversionRollbackDowngradingFromLastLTS(testName) {
|
||||
export function testMultiversionRollbackDowngradingFromLastLTS(testName) {
|
||||
const dbName = testName;
|
||||
const replSet = new ReplSetTest(
|
||||
{name: testName, nodes: 3, useBridge: true, settings: {chainingAllowed: false}});
|
||||
|
|
@ -290,7 +288,7 @@ function testMultiversionRollbackDowngradingFromLastLTS(testName) {
|
|||
* @param {string} rollbackNodeVersion the desired version for the rollback node
|
||||
* @param {string} syncSourceVersion the desired version for the sync source
|
||||
*/
|
||||
function setupReplicaSet(testName, rollbackNodeVersion, syncSourceVersion) {
|
||||
export function setupReplicaSet(testName, rollbackNodeVersion, syncSourceVersion) {
|
||||
jsTestLog(
|
||||
`[${testName}] Beginning cluster setup with versions: {rollbackNode: ${rollbackNodeVersion},
|
||||
syncSource: ${syncSourceVersion}}.`);
|
||||
|
|
|
|||
|
|
@ -5,10 +5,7 @@
|
|||
* requires_replication,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
load('jstests/replsets/libs/rollback_test.js');
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const rollbackTest = new RollbackTest(jsTestName());
|
||||
|
||||
|
|
@ -36,4 +33,3 @@ rollbackTest.transitionToSteadyStateOperations();
|
|||
// validation to fail if rollback did not perform capped deletes on documents that were inserted
|
||||
// earlier in rollback.
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -8,12 +8,6 @@
|
|||
* ]
|
||||
*/
|
||||
|
||||
// The global 'db' variable is used by the data consistency hooks.
|
||||
var db;
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
// We skip doing the data consistency checks while terminating the cluster because they conflict
|
||||
// with the counts of the number of times the "dbhash" and "validate" commands are run.
|
||||
TestData.skipCollectionAndIndexValidation = true;
|
||||
|
|
@ -42,24 +36,27 @@ function countMatches(pattern, output) {
|
|||
return numMatches;
|
||||
}
|
||||
|
||||
function runDataConsistencyChecks(testCase) {
|
||||
db = testCase.conn.getDB("test");
|
||||
try {
|
||||
async function runDataConsistencyChecks(testCase) {
|
||||
clearRawMongoProgramOutput();
|
||||
|
||||
// NOTE: once modules are imported they are cached, so we need to run this in a parallel shell.
|
||||
const awaitShell = startParallelShell(async function() {
|
||||
globalThis.db = db.getSiblingDB("test");
|
||||
load("jstests/hooks/run_check_repl_dbhash.js");
|
||||
load("jstests/hooks/run_validate_collections.js");
|
||||
await import("jstests/hooks/run_validate_collections.js");
|
||||
}, testCase.conn.port);
|
||||
|
||||
awaitShell();
|
||||
const output = rawMongoProgramOutput();
|
||||
|
||||
// We terminate the processes to ensure that the next call to rawMongoProgramOutput()
|
||||
// will return all of their output.
|
||||
testCase.teardown();
|
||||
return rawMongoProgramOutput();
|
||||
} finally {
|
||||
db = undefined;
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
(function testReplicaSetWithVotingSecondaries() {
|
||||
await (async function testReplicaSetWithVotingSecondaries() {
|
||||
const numNodes = 2;
|
||||
const rst = new ReplSetTest({
|
||||
nodes: numNodes,
|
||||
|
|
@ -72,7 +69,8 @@ function runDataConsistencyChecks(testCase) {
|
|||
|
||||
// Insert a document so the "dbhash" and "validate" commands have some actual work to do.
|
||||
assert.commandWorked(rst.nodes[0].getDB("test").mycoll.insert({}));
|
||||
const output = runDataConsistencyChecks({conn: rst.nodes[0], teardown: () => rst.stopSet()});
|
||||
const output =
|
||||
await runDataConsistencyChecks({conn: rst.nodes[0], teardown: () => rst.stopSet()});
|
||||
|
||||
let pattern = makePatternForDBHash("test");
|
||||
assert.eq(numNodes,
|
||||
|
|
@ -85,7 +83,7 @@ function runDataConsistencyChecks(testCase) {
|
|||
"expected to find " + tojson(pattern) + " from each node in the log output");
|
||||
})();
|
||||
|
||||
(function testReplicaSetWithNonVotingSecondaries() {
|
||||
await (async function testReplicaSetWithNonVotingSecondaries() {
|
||||
const numNodes = 2;
|
||||
const rst = new ReplSetTest({
|
||||
nodes: numNodes,
|
||||
|
|
@ -104,7 +102,8 @@ function runDataConsistencyChecks(testCase) {
|
|||
|
||||
// Insert a document so the "dbhash" and "validate" commands have some actual work to do.
|
||||
assert.commandWorked(rst.nodes[0].getDB("test").mycoll.insert({}));
|
||||
const output = runDataConsistencyChecks({conn: rst.nodes[0], teardown: () => rst.stopSet()});
|
||||
const output =
|
||||
await runDataConsistencyChecks({conn: rst.nodes[0], teardown: () => rst.stopSet()});
|
||||
|
||||
let pattern = makePatternForDBHash("test");
|
||||
assert.eq(numNodes,
|
||||
|
|
@ -117,7 +116,7 @@ function runDataConsistencyChecks(testCase) {
|
|||
"expected to find " + tojson(pattern) + " from each node in the log output");
|
||||
})();
|
||||
|
||||
(function testShardedClusterWithOneNodeCSRS() {
|
||||
await (async function testShardedClusterWithOneNodeCSRS() {
|
||||
const st = new ShardingTest({
|
||||
mongos: 1,
|
||||
config: 1,
|
||||
|
|
@ -131,7 +130,7 @@ function runDataConsistencyChecks(testCase) {
|
|||
// database exists for when we go to run the data consistency checks against the CSRS.
|
||||
st.shardColl(st.s.getDB("test").mycoll, {_id: 1}, false);
|
||||
|
||||
const output = runDataConsistencyChecks({conn: st.s, teardown: () => st.stop()});
|
||||
const output = await runDataConsistencyChecks({conn: st.s, teardown: () => st.stop()});
|
||||
|
||||
let pattern = makePatternForDBHash("config");
|
||||
assert.eq(0,
|
||||
|
|
@ -147,7 +146,7 @@ function runDataConsistencyChecks(testCase) {
|
|||
"expected to find " + tojson(pattern) + " in the log output for 1-node CSRS");
|
||||
})();
|
||||
|
||||
(function testShardedCluster() {
|
||||
await (async function testShardedCluster() {
|
||||
const st = new ShardingTest({
|
||||
mongos: 1,
|
||||
config: 3,
|
||||
|
|
@ -168,7 +167,7 @@ function runDataConsistencyChecks(testCase) {
|
|||
// Insert a document so the "dbhash" and "validate" commands have some actual work to do on
|
||||
// the replica set shard.
|
||||
assert.commandWorked(st.s.getDB("test").mycoll.insert({_id: 0}));
|
||||
const output = runDataConsistencyChecks({conn: st.s, teardown: () => st.stop()});
|
||||
const output = await runDataConsistencyChecks({conn: st.s, teardown: () => st.stop()});
|
||||
|
||||
// The "config" database exists on both the CSRS and the replica set shards due to the
|
||||
// "config.transactions" collection.
|
||||
|
|
@ -198,4 +197,3 @@ function runDataConsistencyChecks(testCase) {
|
|||
"expected to find " + tojson(pattern) +
|
||||
" from each replica set shard node in the log output");
|
||||
})();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -5,14 +5,11 @@
|
|||
* requires_replication,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
TestData.rollbackShutdowns = true;
|
||||
TestData.logComponentVerbosity = {
|
||||
storage: {recovery: 2}
|
||||
};
|
||||
load('jstests/replsets/libs/rollback_test.js');
|
||||
const {RollbackTest} = await import("jstests/replsets/libs/rollback_test.js");
|
||||
|
||||
const rollbackTest = new RollbackTest();
|
||||
let primary = rollbackTest.getPrimary();
|
||||
|
|
@ -51,4 +48,3 @@ rollbackTest.transitionToSteadyStateOperations();
|
|||
assert.contains("timestamped", rollbackNode.getDB("foo").getCollectionNames());
|
||||
assert.contains("untimestamped", rollbackNode.getDB("local").getCollectionNames());
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -5,10 +5,8 @@
|
|||
* requires_wiredtiger,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
load('jstests/replsets/libs/rollback_test.js');
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
|
||||
// Returns list of collections in database, including pending drops.
|
||||
// Assumes all collections fit in first batch of results.
|
||||
|
|
@ -148,4 +146,3 @@ assert.eq(2, noOpsToRollbackColl.find().itcount());
|
|||
assert.eq(2, noOpsToRollbackColl.count());
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -5,10 +5,7 @@
|
|||
* requires_replication,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
load('jstests/replsets/libs/rollback_test.js');
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const getExtendedRangeCount = (db) => {
|
||||
return assert.commandWorked(db.adminCommand({serverStatus: 1}))
|
||||
|
|
@ -68,4 +65,3 @@ rollbackTest.transitionToSteadyStateOperations();
|
|||
assert.eq(1, getExtendedRangeCount(rollbackNode));
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -6,10 +6,7 @@
|
|||
* requires_replication,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
load('jstests/replsets/libs/rollback_test.js');
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const rollbackTest = new RollbackTest(jsTestName());
|
||||
|
||||
|
|
@ -58,4 +55,3 @@ const buckets = bucketsColl.find().toArray();
|
|||
assert.eq(buckets.length, 2, 'Expected two bucket but found: ' + tojson(buckets));
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -4,12 +4,6 @@
|
|||
* the isCleaningServerMetadata state, where we must complete the downgrade before upgrading).
|
||||
*/
|
||||
|
||||
// The global 'db' variable is used by the data consistency hooks.
|
||||
var db;
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
// We skip doing the data consistency checks while terminating the cluster because they conflict
|
||||
// with the counts of the number of times the "validate" command is run.
|
||||
TestData.skipCollectionAndIndexValidation = true;
|
||||
|
|
@ -43,25 +37,27 @@ function countMatches(pattern, output) {
|
|||
return numMatches;
|
||||
}
|
||||
|
||||
function runValidateHook(testCase) {
|
||||
db = testCase.conn.getDB("test");
|
||||
TestData.forceValidationWithFeatureCompatibilityVersion = latestFCV;
|
||||
try {
|
||||
async function runValidateHook(testCase) {
|
||||
clearRawMongoProgramOutput();
|
||||
|
||||
load("jstests/hooks/run_validate_collections.js");
|
||||
// NOTE: once modules are imported they are cached, so we need to run this in a parallel shell.
|
||||
const awaitShell = startParallelShell(async function() {
|
||||
globalThis.db = db.getSiblingDB("test");
|
||||
TestData.forceValidationWithFeatureCompatibilityVersion = latestFCV;
|
||||
await import("jstests/hooks/run_validate_collections.js");
|
||||
}, testCase.conn.port);
|
||||
|
||||
awaitShell();
|
||||
const output = rawMongoProgramOutput();
|
||||
|
||||
// We terminate the processes to ensure that the next call to rawMongoProgramOutput()
|
||||
// will return all of their output.
|
||||
testCase.teardown();
|
||||
return rawMongoProgramOutput();
|
||||
} finally {
|
||||
db = undefined;
|
||||
TestData.forceValidationWithFeatureCompatibilityVersion = undefined;
|
||||
}
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
function testStandalone(additionalSetupFn, {
|
||||
async function testStandalone(additionalSetupFn, {
|
||||
expectedAtTeardownFCV,
|
||||
expectedSetLastLTSFCV: expectedSetLastLTSFCV = 0,
|
||||
expectedSetLatestFCV: expectedSetLatestFCV = 0
|
||||
|
|
@ -76,7 +72,7 @@ function testStandalone(additionalSetupFn, {
|
|||
// Run the additional setup function to put the server into the desired state.
|
||||
additionalSetupFn(conn);
|
||||
|
||||
const output = runValidateHook({
|
||||
const output = await runValidateHook({
|
||||
conn: conn,
|
||||
teardown: () => {
|
||||
// The validate hook should leave the server with a feature compatibility version of
|
||||
|
|
@ -185,31 +181,25 @@ function forceInterruptedUpgradeOrDowngrade(conn, targetVersion) {
|
|||
}));
|
||||
}
|
||||
|
||||
(function testStandaloneInLatestFCV() {
|
||||
testStandalone(conn => {
|
||||
checkFCV(conn.getDB("admin"), latestFCV);
|
||||
}, {expectedAtTeardownFCV: latestFCV});
|
||||
})();
|
||||
// testStandaloneInLatestFCV
|
||||
await testStandalone(conn => checkFCV(conn.getDB("admin"), latestFCV),
|
||||
{expectedAtTeardownFCV: latestFCV});
|
||||
|
||||
(function testStandaloneInLastLTSFCV() {
|
||||
testStandalone(conn => {
|
||||
// testStandaloneInLastLTSFCV
|
||||
await testStandalone(conn => {
|
||||
assert.commandWorked(
|
||||
conn.adminCommand({setFeatureCompatibilityVersion: lastLTSFCV, confirm: true}));
|
||||
checkFCV(conn.getDB("admin"), lastLTSFCV);
|
||||
}, {expectedAtTeardownFCV: lastLTSFCV, expectedSetLastLTSFCV: 1, expectedSetLatestFCV: 1});
|
||||
})();
|
||||
|
||||
(function testStandaloneWithInterruptedFCVDowngrade() {
|
||||
testStandalone(conn => {
|
||||
// testStandaloneWithInterruptedFCVDowngrade
|
||||
await testStandalone(conn => {
|
||||
forceInterruptedUpgradeOrDowngrade(conn, lastLTSFCV);
|
||||
}, {expectedAtTeardownFCV: lastLTSFCV, expectedSetLastLTSFCV: 2, expectedSetLatestFCV: 1});
|
||||
})();
|
||||
|
||||
(function testStandaloneWithInterruptedFCVUpgrade() {
|
||||
testStandalone(conn => {
|
||||
// testStandaloneWithInterruptedFCVUpgrade
|
||||
await testStandalone(conn => {
|
||||
assert.commandWorked(
|
||||
conn.adminCommand({setFeatureCompatibilityVersion: lastLTSFCV, confirm: true}));
|
||||
forceInterruptedUpgradeOrDowngrade(conn, latestFCV);
|
||||
}, {expectedAtTeardownFCV: lastLTSFCV, expectedSetLastLTSFCV: 1, expectedSetLatestFCV: 1});
|
||||
})();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -3,10 +3,7 @@
|
|||
* properly.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/two_phase_drops.js"); // For TwoPhaseDropCollectionTest.
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
|
||||
// Set up a two phase drop test.
|
||||
let testName = "drop_collection_two_phase";
|
||||
|
|
@ -21,7 +18,7 @@ let replTest = twoPhaseDropTest.initReplSet();
|
|||
if (!twoPhaseDropTest.supportsDropPendingNamespaces()) {
|
||||
jsTestLog('Drop pending namespaces not supported by storage engine. Skipping test.');
|
||||
twoPhaseDropTest.stop();
|
||||
return;
|
||||
quit();
|
||||
}
|
||||
|
||||
// Create the collection that will be dropped.
|
||||
|
|
@ -34,4 +31,3 @@ twoPhaseDropTest.prepareDropCollection(collName);
|
|||
twoPhaseDropTest.commitDropCollection(collName);
|
||||
|
||||
twoPhaseDropTest.stop();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -5,10 +5,7 @@
|
|||
* collection.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/two_phase_drops.js"); // For TwoPhaseDropCollectionTest.
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
|
||||
// Set up a two phase drop test.
|
||||
let testName = "drop_collection_two_phase_apply_ops_create";
|
||||
|
|
@ -23,7 +20,7 @@ let replTest = twoPhaseDropTest.initReplSet();
|
|||
if (!twoPhaseDropTest.supportsDropPendingNamespaces()) {
|
||||
jsTestLog('Drop pending namespaces not supported by storage engine. Skipping test.');
|
||||
twoPhaseDropTest.stop();
|
||||
return;
|
||||
quit();
|
||||
}
|
||||
|
||||
// Create the collection that will be dropped.
|
||||
|
|
@ -60,4 +57,3 @@ try {
|
|||
} finally {
|
||||
twoPhaseDropTest.stop();
|
||||
}
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -3,10 +3,7 @@
|
|||
* By definition, a drop-pending collection will be removed by the server eventually.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/two_phase_drops.js"); // For TwoPhaseDropCollectionTest.
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
|
||||
// Set up a two phase drop test.
|
||||
let testName = "drop_collection_two_phase_apply_ops_noop";
|
||||
|
|
@ -21,7 +18,7 @@ let replTest = twoPhaseDropTest.initReplSet();
|
|||
if (!twoPhaseDropTest.supportsDropPendingNamespaces()) {
|
||||
jsTestLog('Drop pending namespaces not supported by storage engine. Skipping test.');
|
||||
twoPhaseDropTest.stop();
|
||||
return;
|
||||
quit();
|
||||
}
|
||||
|
||||
// Create the collection that will be dropped.
|
||||
|
|
@ -62,4 +59,3 @@ assert(
|
|||
twoPhaseDropTest.commitDropCollection(collName);
|
||||
|
||||
twoPhaseDropTest.stop();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -3,10 +3,7 @@
|
|||
* remains in a drop-pending state. This is the same behavior as renaming a non-existent collection.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/two_phase_drops.js"); // For TwoPhaseDropCollectionTest.
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
|
||||
// Set up a two phase drop test.
|
||||
let testName = "drop_collection_two_phase_apply_ops_rename";
|
||||
|
|
@ -21,7 +18,7 @@ let replTest = twoPhaseDropTest.initReplSet();
|
|||
if (!twoPhaseDropTest.supportsDropPendingNamespaces()) {
|
||||
jsTestLog('Drop pending namespaces not supported by storage engine. Skipping test.');
|
||||
twoPhaseDropTest.stop();
|
||||
return;
|
||||
quit();
|
||||
}
|
||||
|
||||
// Create the collection that will be dropped.
|
||||
|
|
@ -77,4 +74,3 @@ try {
|
|||
} finally {
|
||||
twoPhaseDropTest.stop();
|
||||
}
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -2,12 +2,9 @@
|
|||
* Test to ensure that index creation fails on a drop-pending collection.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/libs/fail_point_util.js"); // For kDefaultWaitForFailPointTimeout.
|
||||
load("jstests/noPassthrough/libs/index_build.js"); // For IndexBuildTest.
|
||||
load("jstests/replsets/libs/two_phase_drops.js"); // For TwoPhaseDropCollectionTest.
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
|
||||
// Set up a two phase drop test.
|
||||
let testName = "drop_collection_two_phase";
|
||||
|
|
@ -22,7 +19,7 @@ let replTest = twoPhaseDropTest.initReplSet();
|
|||
if (!twoPhaseDropTest.supportsDropPendingNamespaces()) {
|
||||
jsTestLog('Drop pending namespaces not supported by storage engine. Skipping test.');
|
||||
twoPhaseDropTest.stop();
|
||||
return;
|
||||
quit();
|
||||
}
|
||||
|
||||
const primary = replTest.getPrimary();
|
||||
|
|
@ -67,4 +64,3 @@ try {
|
|||
}
|
||||
|
||||
twoPhaseDropTest.stop();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -3,10 +3,7 @@
|
|||
* phase collection drop.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
load("jstests/replsets/libs/two_phase_drops.js"); // For TwoPhaseDropCollectionTest.
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
|
||||
// Compute db hash for all collections on given database.
|
||||
function getDbHash(database) {
|
||||
|
|
@ -27,7 +24,7 @@ let replTest = twoPhaseDropTest.initReplSet();
|
|||
if (!twoPhaseDropTest.supportsDropPendingNamespaces()) {
|
||||
jsTestLog('Drop pending namespaces not supported by storage engine. Skipping test.');
|
||||
twoPhaseDropTest.stop();
|
||||
return;
|
||||
quit();
|
||||
}
|
||||
|
||||
let primaryDB = replTest.getPrimary().getDB(dbName);
|
||||
|
|
@ -49,4 +46,3 @@ let failMsg = "dbHash during drop pending phase did not match dbHash after drop
|
|||
assert.eq(dropPendingDbHash, dropCommittedDbHash, failMsg);
|
||||
|
||||
replTest.stopSet();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -5,10 +5,7 @@
|
|||
* ]
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/two_phase_drops.js"); // For TwoPhaseDropCollectionTest.
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
|
||||
// Set up a two phase drop test.
|
||||
let testName = "drop_collection_two_phase";
|
||||
|
|
@ -23,7 +20,7 @@ let replTest = twoPhaseDropTest.initReplSet();
|
|||
if (!twoPhaseDropTest.supportsDropPendingNamespaces()) {
|
||||
jsTestLog('Drop pending namespaces not supported by storage engine. Skipping test.');
|
||||
twoPhaseDropTest.stop();
|
||||
return;
|
||||
quit();
|
||||
}
|
||||
|
||||
const primary = replTest.getPrimary();
|
||||
|
|
@ -47,4 +44,3 @@ try {
|
|||
}
|
||||
|
||||
twoPhaseDropTest.stop();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -3,10 +3,7 @@
|
|||
* renameCollection command when dropTarget is set to true.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
load('jstests/replsets/libs/two_phase_drops.js'); // For TwoPhaseDropCollectionTest.
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
|
||||
// Return a list of all indexes for a given collection. Use 'args' as the
|
||||
// 'listIndexes' command arguments.
|
||||
|
|
@ -33,7 +30,7 @@ let replTest = twoPhaseDropTest.initReplSet();
|
|||
if (!twoPhaseDropTest.supportsDropPendingNamespaces()) {
|
||||
jsTestLog('Drop pending namespaces not supported by storage engine. Skipping test.');
|
||||
twoPhaseDropTest.stop();
|
||||
return;
|
||||
quit();
|
||||
}
|
||||
|
||||
// Create the collections that will be renamed and dropped.
|
||||
|
|
@ -114,4 +111,3 @@ try {
|
|||
|
||||
twoPhaseDropTest.stop();
|
||||
}
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -12,10 +12,7 @@
|
|||
* 6. Resume oplog application on secondary and make sure collection drop is eventually committed.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/two_phase_drops.js"); // For TwoPhaseDropCollectionTest.
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
|
||||
// Set up a two phase drop test.
|
||||
let testName = "drop_collection_two_phase_step_down";
|
||||
|
|
@ -30,7 +27,7 @@ let replTest = twoPhaseDropTest.initReplSet();
|
|||
if (!twoPhaseDropTest.supportsDropPendingNamespaces()) {
|
||||
jsTestLog('Drop pending namespaces not supported by storage engine. Skipping test.');
|
||||
twoPhaseDropTest.stop();
|
||||
return;
|
||||
quit();
|
||||
}
|
||||
|
||||
// Create the collection that will be dropped.
|
||||
|
|
@ -67,4 +64,3 @@ try {
|
|||
} finally {
|
||||
twoPhaseDropTest.stop();
|
||||
}
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -3,11 +3,8 @@
|
|||
* collections, with optimes preceding or equal to the operation's optime, to be reaped.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
load("jstests/libs/fail_point_util.js");
|
||||
load('jstests/replsets/libs/two_phase_drops.js'); // For TwoPhaseDropCollectionTest.
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
|
||||
// Alias to logging function in two_phase_drops.js
|
||||
const testLog = TwoPhaseDropCollectionTest._testLog;
|
||||
|
|
@ -34,7 +31,7 @@ let replTest = twoPhaseDropTest.initReplSet();
|
|||
if (!twoPhaseDropTest.supportsDropPendingNamespaces()) {
|
||||
jsTestLog('Drop pending namespaces not supported by storage engine. Skipping test.');
|
||||
twoPhaseDropTest.stop();
|
||||
return;
|
||||
quit();
|
||||
}
|
||||
|
||||
// Create the collection that will be dropped.
|
||||
|
|
@ -85,4 +82,3 @@ assert.eq(4, collForInserts.find().itcount());
|
|||
twoPhaseDropTest.commitDropCollection(collName);
|
||||
|
||||
twoPhaseDropTest.stop();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -14,10 +14,7 @@
|
|||
* unless explicitly requested by the user with a write concern.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load('jstests/replsets/libs/two_phase_drops.js'); // For TwoPhaseDropCollectionTest.
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
load("jstests/replsets/rslib.js");
|
||||
load("jstests/libs/write_concern_util.js");
|
||||
|
||||
|
|
@ -163,4 +160,3 @@ assert.eq(0, exitCode, 'dropDatabase command on ' + primary.host + ' failed.');
|
|||
jsTestLog('Completed dropDatabase command on ' + primary.host);
|
||||
|
||||
replTest.stopSet();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -9,11 +9,8 @@
|
|||
* ]
|
||||
*/
|
||||
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
load('jstests/replsets/libs/rollback_files.js');
|
||||
load('jstests/replsets/libs/rollback_test.js');
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
load('jstests/libs/uuid_util.js');
|
||||
|
||||
function uuidToCollName(uuid) {
|
||||
|
|
@ -534,4 +531,3 @@ for (let bulk of [false, true]) {
|
|||
}
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -2,11 +2,8 @@
|
|||
* Test that CollectionCloner completes without error when a collection is dropped during cloning.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/libs/fail_point_util.js");
|
||||
load('jstests/replsets/libs/two_phase_drops.js');
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
load("jstests/libs/uuid_util.js");
|
||||
|
||||
// Set up replica set. Disallow chaining so nodes always sync from primary.
|
||||
|
|
@ -188,4 +185,3 @@ runDropTest({
|
|||
});
|
||||
|
||||
replTest.stopSet();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -5,10 +5,7 @@
|
|||
* @tags: [multiversion_incompatible]
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const testName = "initial_sync_fails_on_rollback";
|
||||
const dbName = testName;
|
||||
|
|
@ -99,4 +96,3 @@ rst.stop(initialSyncNode);
|
|||
rst.remove(initialSyncNode);
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -2,12 +2,9 @@
|
|||
* Test that CollectionCloner completes without error when a collection is renamed during cloning.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/libs/fail_point_util.js");
|
||||
load("jstests/libs/uuid_util.js");
|
||||
load('jstests/replsets/libs/two_phase_drops.js');
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
|
||||
// Set up replica set. Disallow chaining so nodes always sync from primary.
|
||||
const testName = "initial_sync_rename_collection";
|
||||
|
|
@ -211,4 +208,3 @@ runRenameTest({
|
|||
});
|
||||
|
||||
replTest.stopSet();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -2,12 +2,10 @@
|
|||
* Fixture to test rollback permutations with index builds.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
load("jstests/noPassthrough/libs/index_build.js"); // for IndexBuildTest
|
||||
load('jstests/replsets/libs/rollback_test.js'); // for RollbackTest
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
class RollbackIndexBuildsTest {
|
||||
export class RollbackIndexBuildsTest {
|
||||
constructor(expectedErrors) {
|
||||
jsTestLog("Set up a Rollback Test.");
|
||||
const replTest = new ReplSetTest({
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
load("jstests/noPassthrough/libs/index_build.js");
|
||||
load('jstests/replsets/libs/rollback_test.js');
|
||||
|
||||
const RollbackResumableIndexBuildTest = class {
|
||||
static checkCompletedAndDrop(
|
||||
|
|
|
|||
|
|
@ -38,11 +38,9 @@
|
|||
* of each stage.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
import {CollectionValidator} from "jstests/hooks/validate_collections.js";
|
||||
load("jstests/replsets/rslib.js");
|
||||
load("jstests/replsets/libs/two_phase_drops.js");
|
||||
load("jstests/hooks/validate_collections.js");
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
load('jstests/libs/fail_point_util.js');
|
||||
|
||||
/**
|
||||
|
|
@ -70,7 +68,7 @@ load('jstests/libs/fail_point_util.js');
|
|||
* @param {Object} [optional] nodeOptions command-line options to apply to all nodes in the replica
|
||||
* set. Ignored if 'replSet' is provided.
|
||||
*/
|
||||
function RollbackTest(name = "RollbackTest", replSet, nodeOptions) {
|
||||
export function RollbackTest(name = "RollbackTest", replSet, nodeOptions) {
|
||||
const State = {
|
||||
kStopped: "kStopped",
|
||||
kRollbackOps: "kRollbackOps",
|
||||
|
|
|
|||
|
|
@ -35,10 +35,8 @@
|
|||
* of restarts.
|
||||
*/
|
||||
|
||||
"use strict";
|
||||
|
||||
load("jstests/hooks/validate_collections.js");
|
||||
load("jstests/replsets/libs/two_phase_drops.js");
|
||||
import {CollectionValidator} from "jstests/hooks/validate_collections.js";
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
load("jstests/replsets/rslib.js");
|
||||
|
||||
Random.setRandomSeed();
|
||||
|
|
@ -58,7 +56,7 @@ Random.setRandomSeed();
|
|||
* @param {Object} [optional] nodeOptions command-line options to apply to all nodes in the replica
|
||||
* set. Ignored if 'replSet' is provided.
|
||||
*/
|
||||
function RollbackTestDeluxe(name = "FiveNodeDoubleRollbackTest", replSet, nodeOptions) {
|
||||
export function RollbackTestDeluxe(name = "FiveNodeDoubleRollbackTest", replSet, nodeOptions) {
|
||||
const State = {
|
||||
kStopped: "kStopped",
|
||||
kRollbackOps: "kRollbackOps",
|
||||
|
|
|
|||
|
|
@ -14,13 +14,12 @@
|
|||
* of the replica set.
|
||||
*
|
||||
*/
|
||||
"use strict";
|
||||
|
||||
load("jstests/libs/fail_point_util.js");
|
||||
load("jstests/libs/fixture_helpers.js"); // For 'FixtureHelpers'.
|
||||
load("jstests/aggregation/extras/utils.js"); // For 'arrayEq'.
|
||||
|
||||
class TwoPhaseDropCollectionTest {
|
||||
export class TwoPhaseDropCollectionTest {
|
||||
constructor(testName, dbName) {
|
||||
this.testName = testName;
|
||||
this.dbName = dbName;
|
||||
|
|
|
|||
|
|
@ -8,10 +8,7 @@
|
|||
* @tags: [requires_majority_read_concern]
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
load("jstests/replsets/rslib.js");
|
||||
|
||||
const dbName = "test";
|
||||
|
|
@ -82,4 +79,3 @@ failPointAfterTransition.off();
|
|||
|
||||
rollbackTest.transitionToSteadyStateOperations();
|
||||
rollbackTest.stop();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -11,9 +11,7 @@
|
|||
* uses_transactions,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
load("jstests/core/txns/libs/prepare_helpers.js");
|
||||
|
||||
const dbName = "test";
|
||||
|
|
@ -67,4 +65,3 @@ const newSession2 = new _DelegatingDriverSession(primary, session);
|
|||
assert.commandWorked(PrepareHelpers.commitTransaction(newSession2, prepareTimestamp));
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -2,11 +2,7 @@
|
|||
* Test that primary rollbacks before receiving any votes from the replica set should not
|
||||
* make createIndexes command's commit quorum value to be lost.
|
||||
*/
|
||||
(function() {
|
||||
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
load('jstests/noPassthrough/libs/index_build.js');
|
||||
|
||||
const dbName = jsTest.name();
|
||||
|
|
@ -60,4 +56,3 @@ awaitBuild();
|
|||
IndexBuildTest.assertIndexes(newPrimaryDB[collName], 2, ['_id_', 'i_1']);
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -6,10 +6,7 @@
|
|||
* ]
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
load("jstests/replsets/rslib.js");
|
||||
|
||||
const dbName = "test";
|
||||
|
|
@ -124,4 +121,3 @@ assert.commandFailedWithCode(
|
|||
|
||||
// Check the replica set.
|
||||
rollbackTest.stop();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -2,10 +2,7 @@
|
|||
* Test that a node in rollback state can safely be removed from the replica set
|
||||
* config via reconfig. See SERVER-48179.
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const dbName = "test";
|
||||
const collName = "rollbackColl";
|
||||
|
|
@ -87,4 +84,3 @@ rollbackTest.getTestFixture().waitForState(rollbackNode, ReplSetTest.State.SECON
|
|||
rollbackTest.transitionToSteadyStateOperations();
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -9,11 +9,8 @@
|
|||
* uses_transactions,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/core/txns/libs/prepare_helpers.js");
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const dbName = "test";
|
||||
const collName = "recover_committed_aborted_prepared_transactions";
|
||||
|
|
@ -134,4 +131,3 @@ assert.sameMembers(testColl.find().toArray(), [{_id: 1}, {_id: 2}, {_id: 5}]);
|
|||
assert.eq(testColl.count(), 3);
|
||||
|
||||
rollbackTest.stop();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -17,11 +17,8 @@
|
|||
* uses_transactions,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/core/txns/libs/prepare_helpers.js");
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const dbName = "test";
|
||||
const collName = "recover_prepared_transaction_state_after_rollback";
|
||||
|
|
@ -195,4 +192,3 @@ assert.sameMembers(testColl.find().toArray(), [{_id: 1, a: 1}, {_id: 2}, {_id: 3
|
|||
assert.eq(testColl.count(), 3);
|
||||
|
||||
rollbackTest.stop();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -7,10 +7,8 @@
|
|||
* requires_persistence,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/rollback_resumable_index_build.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const dbName = "test";
|
||||
|
||||
|
|
@ -20,4 +18,3 @@ RollbackResumableIndexBuildTest.runResumeInterruptedByRollback(
|
|||
rollbackTest, dbName, [{a: 1}, {a: 2}], {a: 1}, [{a: 3}], [{a: 4}]);
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -7,11 +7,8 @@
|
|||
*
|
||||
* @tags: [uses_transactions, uses_prepare_transaction]
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/core/txns/libs/prepare_helpers.js");
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const dbName = "test";
|
||||
const collName = "rollback_aborted_prepared_transaction";
|
||||
|
|
@ -105,4 +102,3 @@ assert.eq(testColl.find().itcount(), 2);
|
|||
assert.eq(testColl.count(), 2);
|
||||
|
||||
rollbackTest.stop();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -11,10 +11,7 @@
|
|||
* @tags: [requires_fcv_53]
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/rollback_test_deluxe.js");
|
||||
import {RollbackTestDeluxe} from "jstests/replsets/libs/rollback_test_deluxe.js";
|
||||
|
||||
let noOp = () => {};
|
||||
|
||||
|
|
@ -380,4 +377,3 @@ rollbackTest.transitionToSteadyStateOperations();
|
|||
|
||||
// Check the replica set.
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -1,10 +1,7 @@
|
|||
/**
|
||||
* Tests that capped collections get the correct fastcounts after rollback.
|
||||
*/
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
load('jstests/replsets/libs/rollback_test.js');
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const testName = 'rollback_capped_deletions';
|
||||
const dbName = testName;
|
||||
|
|
@ -45,4 +42,3 @@ try {
|
|||
}
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -4,10 +4,7 @@
|
|||
* requires_replication,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
load('jstests/replsets/libs/rollback_test.js');
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
load('jstests/replsets/libs/rollback_files.js');
|
||||
load("jstests/libs/uuid_util.js");
|
||||
|
||||
|
|
@ -67,4 +64,3 @@ const uuid = getUUIDFromListCollections(rollbackTest.getPrimary().getDB(dbName),
|
|||
checkRollbackFiles(replTest.getDbPath(rollbackNode), collName, uuid, rollbackDocs);
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -5,10 +5,7 @@
|
|||
* @tags: [requires_fcv_53]
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/rollback_test_deluxe.js");
|
||||
import {RollbackTestDeluxe} from "jstests/replsets/libs/rollback_test_deluxe.js";
|
||||
|
||||
const testName = "rollback_collmods";
|
||||
const dbName = testName;
|
||||
|
|
@ -107,4 +104,3 @@ SteadyStateOps(rollbackTest.getPrimary());
|
|||
printCollectionOptions(rollbackTest, "at completion");
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -1,12 +1,9 @@
|
|||
/*
|
||||
* Basic test of a succesful replica set rollback for CRUD operations.
|
||||
*/
|
||||
load('jstests/replsets/libs/rollback_test.js');
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
load("jstests/replsets/rslib.js");
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
// Helper function for verifying contents at the end of the test.
|
||||
const checkFinalResults = function(db) {
|
||||
assert.eq(0, db.bar.count({q: 70}));
|
||||
|
|
@ -77,4 +74,3 @@ checkFinalResults(rollbackNodeDB);
|
|||
checkFinalResults(syncSourceDB);
|
||||
|
||||
rollbackTest.stop();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -19,9 +19,7 @@
|
|||
* ]
|
||||
*/
|
||||
|
||||
(function() {
|
||||
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const testName = "rollback_drop_database";
|
||||
|
||||
|
|
@ -104,4 +102,3 @@ assert.commandWorked(rollbackNode.getDB(conflictingDbName)["afterRollback"].inse
|
|||
{"num": 2}, {writeConcern: {w: 2}}));
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -5,10 +5,7 @@
|
|||
* then renaming that collection and rolling back a drop on that index.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const testName = "rollback_drop_index_after_rename";
|
||||
const dbName = testName;
|
||||
|
|
@ -55,4 +52,3 @@ rollbackTest.transitionToSyncSourceOperationsDuringRollback();
|
|||
rollbackTest.transitionToSteadyStateOperations();
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -1,10 +1,7 @@
|
|||
// When run with --majorityReadConcern=off, this test reproduces the bug described in SERVER-38925,
|
||||
// where rolling back a delete followed by a restart produces documents with duplicate _id.
|
||||
// @tags: [requires_persistence]
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
TestData.rollbackShutdowns = true;
|
||||
TestData.allowUncleanShutdowns = true;
|
||||
|
|
@ -40,4 +37,3 @@ rollbackTest.restartNode(0, 9);
|
|||
|
||||
// Check the replica set.
|
||||
rollbackTest.stop();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -6,10 +6,7 @@
|
|||
//
|
||||
// @tags: [requires_persistence]
|
||||
//
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
TestData.rollbackShutdowns = true;
|
||||
let dbName = "test";
|
||||
|
|
@ -52,4 +49,3 @@ rollbackTest.transitionToSteadyStateOperations();
|
|||
|
||||
// Check the replica set.
|
||||
rollbackTest.stop();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -7,10 +7,8 @@
|
|||
*
|
||||
* @tags: [uses_transactions, uses_prepare_transaction]
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
load("jstests/core/txns/libs/prepare_helpers.js");
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const name = "rollback_files_no_prepare_conflicts";
|
||||
const dbName = "test";
|
||||
|
|
@ -53,4 +51,3 @@ testColl = testDB.getCollection(collName);
|
|||
assert.docEq({_id: 42, a: "one"}, testColl.findOne({_id: 42}));
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -1,11 +1,7 @@
|
|||
/**
|
||||
* Test that rolling back an index build and collection creation behaves correctly.
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
// For RollbackIndexBuildsTest
|
||||
load('jstests/replsets/libs/rollback_index_builds_test.js');
|
||||
import {RollbackIndexBuildsTest} from "jstests/replsets/libs/rollback_index_builds_test.js";
|
||||
|
||||
const rollbackIndexTest = new RollbackIndexBuildsTest([ErrorCodes.InterruptedDueToReplStateChange]);
|
||||
|
||||
|
|
@ -25,4 +21,3 @@ const schedule = [
|
|||
|
||||
rollbackIndexTest.runSchedules([schedule]);
|
||||
rollbackIndexTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -2,11 +2,7 @@
|
|||
* Test that an index build aborted due to rollback restarts correctly, even if the none of the
|
||||
* associated oplog entries are rolled-back.
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
// For RollbackIndexBuildsTest
|
||||
load('jstests/replsets/libs/rollback_index_builds_test.js');
|
||||
import {RollbackIndexBuildsTest} from "jstests/replsets/libs/rollback_index_builds_test.js";
|
||||
|
||||
const rollbackIndexTest = new RollbackIndexBuildsTest([ErrorCodes.InterruptedDueToReplStateChange]);
|
||||
|
||||
|
|
@ -27,4 +23,3 @@ const schedule = [
|
|||
|
||||
rollbackIndexTest.runSchedules([schedule]);
|
||||
rollbackIndexTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -1,11 +1,7 @@
|
|||
/**
|
||||
* Tests different permutations of rolling-back index build start and abort oplog entries.
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
// for RollbackIndexBuildTest
|
||||
load('jstests/replsets/libs/rollback_index_builds_test.js');
|
||||
import {RollbackIndexBuildsTest} from "jstests/replsets/libs/rollback_index_builds_test.js";
|
||||
|
||||
const rollbackIndexTest = new RollbackIndexBuildsTest([ErrorCodes.Interrupted]);
|
||||
|
||||
|
|
@ -17,4 +13,3 @@ const indexBuildOps = ["start", "abort"];
|
|||
const schedules = RollbackIndexBuildsTest.makeSchedules(rollbackOps, indexBuildOps);
|
||||
rollbackIndexTest.runSchedules(schedules);
|
||||
rollbackIndexTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -2,11 +2,7 @@
|
|||
* Test that rolling back an index build, but not collection creation, behaves correctly even when
|
||||
* the index build is aborted.
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
// For RollbackIndexBuildsTest
|
||||
load('jstests/replsets/libs/rollback_index_builds_test.js');
|
||||
import {RollbackIndexBuildsTest} from "jstests/replsets/libs/rollback_index_builds_test.js";
|
||||
|
||||
const rollbackIndexTest = new RollbackIndexBuildsTest(
|
||||
[ErrorCodes.InterruptedDueToReplStateChange, ErrorCodes.Interrupted]);
|
||||
|
|
@ -26,4 +22,3 @@ const schedule = [
|
|||
|
||||
rollbackIndexTest.runSchedules([schedule]);
|
||||
rollbackIndexTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -1,11 +1,7 @@
|
|||
/**
|
||||
* Tests different permutations of rolling-back index build start and commit oplog entries.
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
// for RollbackIndexBuildTest
|
||||
load('jstests/replsets/libs/rollback_index_builds_test.js');
|
||||
import {RollbackIndexBuildsTest} from "jstests/replsets/libs/rollback_index_builds_test.js";
|
||||
|
||||
const rollbackIndexTest = new RollbackIndexBuildsTest();
|
||||
|
||||
|
|
@ -17,4 +13,3 @@ const indexBuildOps = ["start", "commit"];
|
|||
const schedules = RollbackIndexBuildsTest.makeSchedules(rollbackOps, indexBuildOps);
|
||||
rollbackIndexTest.runSchedules(schedules);
|
||||
rollbackIndexTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -1,11 +1,7 @@
|
|||
/**
|
||||
* Tests different permutations of rolling-back index build start, commit, and drop oplog entries.
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
// for RollbackIndexBuildTest
|
||||
load('jstests/replsets/libs/rollback_index_builds_test.js');
|
||||
import {RollbackIndexBuildsTest} from "jstests/replsets/libs/rollback_index_builds_test.js";
|
||||
|
||||
const rollbackIndexTest = new RollbackIndexBuildsTest();
|
||||
|
||||
|
|
@ -17,4 +13,3 @@ const indexBuildOps = ["start", "commit", "drop"];
|
|||
const schedules = RollbackIndexBuildsTest.makeSchedules(rollbackOps, indexBuildOps);
|
||||
rollbackIndexTest.runSchedules(schedules);
|
||||
rollbackIndexTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -1,11 +1,7 @@
|
|||
/**
|
||||
* Test that rolling back an index build, but not collection creation, behaves correctly.
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
// For RollbackIndexBuildsTest
|
||||
load('jstests/replsets/libs/rollback_index_builds_test.js');
|
||||
import {RollbackIndexBuildsTest} from "jstests/replsets/libs/rollback_index_builds_test.js";
|
||||
|
||||
const rollbackIndexTest = new RollbackIndexBuildsTest();
|
||||
|
||||
|
|
@ -24,4 +20,3 @@ const schedule = [
|
|||
|
||||
rollbackIndexTest.runSchedules([schedule]);
|
||||
rollbackIndexTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -5,8 +5,8 @@
|
|||
* requires_replication,
|
||||
* ]
|
||||
*/
|
||||
load('jstests/replsets/libs/rollback_test.js');
|
||||
import {FeatureFlagUtil} from "jstests/libs/feature_flag_util.js";
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
// Operations that will be present on both nodes, before the common point.
|
||||
const dbName = 'test';
|
||||
|
|
|
|||
|
|
@ -6,11 +6,8 @@
|
|||
* uses_transactions,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/core/txns/libs/prepare_helpers.js");
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
load("jstests/replsets/libs/rollback_files.js");
|
||||
load("jstests/libs/uuid_util.js");
|
||||
|
||||
|
|
@ -102,4 +99,3 @@ assert.commandWorked(adminDB.adminCommand({
|
|||
}));
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -8,10 +8,8 @@
|
|||
* ]
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
load("jstests/core/txns/libs/prepare_helpers.js");
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const dbName = "test";
|
||||
const collName = "rollback_reconstructs_transactions_prepared_before_stable";
|
||||
|
|
@ -134,4 +132,3 @@ assert.sameMembers(testColl.find().toArray(), [{_id: 0, a: 1}, {_id: 1}, {_id: 2
|
|||
assert.eq(testColl.count(), 3);
|
||||
|
||||
rollbackTest.stop();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -12,11 +12,8 @@
|
|||
* ]
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/core/txns/libs/prepare_helpers.js");
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const dbName = "test";
|
||||
const collName = "commit_transaction_rollback_recovery_data_already_applied";
|
||||
|
|
@ -102,4 +99,3 @@ assert.commandWorked(PrepareHelpers.commitTransaction(session, prepareTimestamp)
|
|||
assert.eq(testDB[collName].findOne({_id: 1}), {_id: 1, a: 1});
|
||||
|
||||
rollbackTest.stop();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -5,10 +5,8 @@
|
|||
* third attempt.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
load("jstests/libs/fail_point_util.js");
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const testName = "rollback_remote_cursor_retry";
|
||||
const dbName = testName;
|
||||
|
|
@ -45,4 +43,3 @@ rollbackHangBeforeStartFailPoint.off();
|
|||
|
||||
rollbackTest.transitionToSteadyStateOperations();
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -4,10 +4,7 @@
|
|||
* corruption on the rollback node.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
let dbName = "rollback_rename_collection_on_sync_source";
|
||||
let otherDbName = "rollback_rename_collection_on_sync_source_other";
|
||||
|
|
@ -74,4 +71,3 @@ rollbackTestAcrossDBs.transitionToSteadyStateOperations();
|
|||
|
||||
// Check the replica set.
|
||||
rollbackTestAcrossDBs.stop();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -1,10 +1,7 @@
|
|||
/**
|
||||
* Tests that rollback corrects fastcounts even when collections are renamed.
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const testName = "rollback_rename_count";
|
||||
const dbName = testName;
|
||||
|
|
@ -56,4 +53,3 @@ assert.eq(coll1.find().itcount(), 2);
|
|||
assert.eq(coll2.find().itcount(), 2);
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -10,10 +10,8 @@
|
|||
* incompatible_with_gcov,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load('jstests/replsets/libs/rollback_resumable_index_build.js');
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const dbName = "test";
|
||||
|
||||
|
|
@ -66,4 +64,3 @@ runRollbackTo(
|
|||
runRollbackTo({name: "hangIndexBuildDuringBulkLoadPhaseSecond", logIdWithIndexName: 4924400});
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -10,10 +10,8 @@
|
|||
* incompatible_with_gcov,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load('jstests/replsets/libs/rollback_resumable_index_build.js');
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const dbName = "test";
|
||||
|
||||
|
|
@ -63,4 +61,3 @@ runRollbackTo(
|
|||
runRollbackTo({name: "hangIndexBuildDuringBulkLoadPhaseSecond", logIdWithIndexName: 4924400});
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -10,10 +10,8 @@
|
|||
* incompatible_with_gcov,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load('jstests/replsets/libs/rollback_resumable_index_build.js');
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const dbName = "test";
|
||||
|
||||
|
|
@ -68,4 +66,3 @@ runRollbackTo("hangAfterSettingUpIndexBuild", 20387);
|
|||
runRollbackTo("hangIndexBuildDuringCollectionScanPhaseAfterInsertion", 20386);
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -11,10 +11,8 @@
|
|||
* requires_persistence,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load('jstests/replsets/libs/rollback_resumable_index_build.js');
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const dbName = "test";
|
||||
|
||||
|
|
@ -69,4 +67,3 @@ runRollbackTo("hangAfterSettingUpIndexBuild", 20387);
|
|||
runRollbackTo("hangIndexBuildDuringCollectionScanPhaseAfterInsertion", 20386);
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -10,10 +10,8 @@
|
|||
* incompatible_with_gcov,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load('jstests/replsets/libs/rollback_resumable_index_build.js');
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const dbName = "test";
|
||||
const rollbackTest = new RollbackTest(jsTestName());
|
||||
|
|
@ -81,4 +79,3 @@ runRollbackTo({name: "hangIndexBuildDuringBulkLoadPhase", logIdWithIndexName: 49
|
|||
runRollbackTo({name: "hangIndexBuildDuringDrainWritesPhaseSecond", logIdWithIndexName: 4841800});
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -10,10 +10,8 @@
|
|||
* incompatible_with_gcov,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load('jstests/replsets/libs/rollback_resumable_index_build.js');
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const dbName = "test";
|
||||
const rollbackTest = new RollbackTest(jsTestName());
|
||||
|
|
@ -137,4 +135,3 @@ runRollbackTo(
|
|||
[{skippedPhaseLogID: 20391}, {skippedPhaseLogID: 20392}]);
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@
|
|||
* @tags: [multiversion_incompatible]
|
||||
*/
|
||||
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
load('jstests/libs/parallel_shell_helpers.js');
|
||||
load("jstests/libs/fail_point_util.js");
|
||||
load("jstests/replsets/rslib.js");
|
||||
|
|
|
|||
|
|
@ -2,10 +2,7 @@
|
|||
* This test serves as a baseline for measuring the performance of the RollbackTest fixture.
|
||||
*/
|
||||
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
let rollbackTest = new RollbackTest();
|
||||
rollbackTest.transitionToRollbackOperations();
|
||||
|
|
@ -13,4 +10,3 @@ rollbackTest.transitionToSyncSourceOperationsBeforeRollback();
|
|||
rollbackTest.transitionToSyncSourceOperationsDuringRollback();
|
||||
rollbackTest.transitionToSteadyStateOperations();
|
||||
rollbackTest.stop();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -4,10 +4,7 @@
|
|||
*
|
||||
* @tags: [uses_transactions]
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const testName = "rollback_transactions_count";
|
||||
const dbName = testName;
|
||||
|
|
@ -63,4 +60,3 @@ assert.eq(sessionColl1.find().itcount(), 3);
|
|||
assert.eq(primary.getDB('config')['transactions'].find().itcount(), 2);
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -5,10 +5,7 @@
|
|||
*
|
||||
* @tags: [requires_persistence]
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
TestData.rollbackShutdowns = true;
|
||||
// Only clean shutdowns should be allowed.
|
||||
|
|
@ -36,4 +33,3 @@ rollbackTest.transitionToSteadyStateOperations();
|
|||
assert.eq(rawMongoProgramOutput().search(/Detected unclean shutdown/), -1);
|
||||
|
||||
rollbackTest.stop();
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -4,10 +4,7 @@
|
|||
* requires_replication,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
load('jstests/replsets/libs/rollback_test.js');
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
load('jstests/replsets/libs/rollback_files.js');
|
||||
load("jstests/libs/uuid_util.js");
|
||||
|
||||
|
|
@ -60,4 +57,3 @@ const uuid = getUUIDFromListCollections(rollbackTest.getPrimary().getDB(dbName),
|
|||
checkRollbackFiles(replTest.getDbPath(rollbackNode), collName, uuid, expectedDocs);
|
||||
|
||||
rollbackTest.stop();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -12,10 +12,8 @@
|
|||
|
||||
TestData.skipCheckDBHashes = true;
|
||||
|
||||
(function() {
|
||||
"use strict";
|
||||
load("jstests/core/txns/libs/prepare_helpers.js");
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const dbName = "test";
|
||||
const collName = "rollback_via_refetch_commit_transaction";
|
||||
|
|
@ -82,4 +80,3 @@ assert.soon(function() {
|
|||
|
||||
// Transaction is still in prepared state and validation will be blocked, so skip it.
|
||||
rst.stopSet(undefined, undefined, {skipValidation: true});
|
||||
}());
|
||||
|
|
|
|||
|
|
@ -1,11 +1,8 @@
|
|||
/**
|
||||
* Test of the RollbackTest helper library.
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/rslib.js");
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
let checkDataConsistencyCallCount = 0;
|
||||
let stopSetCallCount = 0;
|
||||
|
|
@ -36,4 +33,3 @@ rollbackTest.stop();
|
|||
|
||||
assert.eq(checkDataConsistencyCallCount, 1);
|
||||
assert.eq(stopSetCallCount, 1);
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ import {
|
|||
load("jstests/libs/fail_point_util.js");
|
||||
load("jstests/libs/parallelTester.js");
|
||||
load("jstests/libs/uuid_util.js");
|
||||
load('jstests/replsets/libs/two_phase_drops.js');
|
||||
import {TwoPhaseDropCollectionTest} from "jstests/replsets/libs/two_phase_drops.js";
|
||||
load("jstests/replsets/rslib.js"); // 'createRstArgs'
|
||||
|
||||
function runDropTest({failPointName, failPointData, expectedLog, createNew}) {
|
||||
|
|
|
|||
|
|
@ -14,10 +14,7 @@
|
|||
* requires_persistence,
|
||||
* ]
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
let rst = new ReplSetTest({
|
||||
name: "history_rollback_test",
|
||||
|
|
@ -61,4 +58,3 @@ pinnedTs = serverStatus["wiredTiger"]["snapshot-window-settings"]["min pinned ti
|
|||
assert.eq(maxTimestampValue, pinnedTs);
|
||||
|
||||
rst.stopSet();
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -1,10 +1,7 @@
|
|||
/*
|
||||
* This test makes sure the 'validate' command fails correctly during rollback.
|
||||
*/
|
||||
(function() {
|
||||
"use strict";
|
||||
|
||||
load("jstests/replsets/libs/rollback_test.js");
|
||||
import {RollbackTest} from "jstests/replsets/libs/rollback_test.js";
|
||||
|
||||
const dbName = "test";
|
||||
const collName = "coll";
|
||||
|
|
@ -38,4 +35,3 @@ rollbackTest.transitionToSteadyStateOperations();
|
|||
|
||||
// Check the replica set.
|
||||
rollbackTest.stop();
|
||||
}());
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue