mirror of https://github.com/mongodb/mongo
SERVER-73485 Remove jstests/parallel/basic* and associated dead code
(cherry picked from commit 456f73fc1f)
GitOrigin-RevId: 390d3bdce8d5b6fe2302bf7c44152910bc20cd18
This commit is contained in:
parent
b30b5ef2f8
commit
fe88dcb51a
|
|
@ -1,10 +1,3 @@
|
|||
# If the failure here is due to a test unexpected being run,
|
||||
# it may be due to the parallel suite not honoring feature flag tags.
|
||||
# If you want to skip such tests in parallel suite,
|
||||
# please add them to the exclusion list at
|
||||
# https://github.com/mongodb/mongo/blob/eb75b6ccc62f7c8ea26a57c1b5eb96a41809396a/jstests/libs/parallelTester.js#L149.
|
||||
|
||||
|
||||
test_kind: js_test
|
||||
|
||||
selector:
|
||||
|
|
|
|||
|
|
@ -128,248 +128,6 @@ if (typeof _threadInject != "undefined") {
|
|||
assert.parallelTests(this.params, msg);
|
||||
};
|
||||
|
||||
// creates lists of tests from jstests dir in a format suitable for use by
|
||||
// ParallelTester.fileTester. The lists will be in random order.
|
||||
// n: number of lists to split these tests into
|
||||
ParallelTester.createJstestsLists = function(n) {
|
||||
var params = new Array();
|
||||
for (var i = 0; i < n; ++i) {
|
||||
params.push([]);
|
||||
}
|
||||
|
||||
var makeKeys = function(a) {
|
||||
var ret = {};
|
||||
for (var i in a) {
|
||||
ret[a[i]] = 1;
|
||||
}
|
||||
return ret;
|
||||
};
|
||||
|
||||
// some tests can't run in parallel with most others
|
||||
var skipTests = makeKeys([
|
||||
"indexb.js",
|
||||
|
||||
// Tests that set a parameter that causes the server to ignore
|
||||
// long index keys.
|
||||
"index_bigkeys_nofail.js",
|
||||
"index_bigkeys_validation.js",
|
||||
|
||||
// Tests that set the notablescan parameter, which makes queries fail rather than use a
|
||||
// non-indexed plan.
|
||||
"notablescan.js",
|
||||
"notablescan_capped.js",
|
||||
|
||||
"mr_fail_invalid_js.js",
|
||||
"run_program1.js",
|
||||
"bench_test1.js",
|
||||
|
||||
// These tests use getLog to examine the logs. Tests which do so shouldn't be run in
|
||||
// this suite because any test being run at the same time could conceivably spam the
|
||||
// logs so much that the line they are looking for has been rotated off the server's
|
||||
// in-memory buffer of log messages, which only stores the 1024 most recent operations.
|
||||
"comment_field.js",
|
||||
"getlog2.js",
|
||||
"logprocessdetails.js",
|
||||
"queryoptimizera.js",
|
||||
"log_remote_op_wait.js",
|
||||
|
||||
"connections_opened.js", // counts connections, globally
|
||||
"opcounters_write_cmd.js",
|
||||
"set_param1.js", // changes global state
|
||||
"geo_update_btree2.js", // SERVER-11132 test disables table scans
|
||||
"update_setOnInsert.js", // SERVER-9982
|
||||
"max_time_ms.js", // Sensitive to query execution time, by design
|
||||
"autocomplete.js", // Likewise.
|
||||
|
||||
// This overwrites MinKey/MaxKey's singleton which breaks
|
||||
// any other test that uses MinKey/MaxKey
|
||||
"type6.js",
|
||||
|
||||
// Assumes that other tests are not creating cursors.
|
||||
"kill_cursors.js",
|
||||
|
||||
// Assumes that other tests are not starting operations.
|
||||
"currentop_shell.js",
|
||||
|
||||
// These tests check global command counters.
|
||||
"find_and_modify_metrics.js",
|
||||
"update_metrics.js",
|
||||
|
||||
// Views tests
|
||||
"views/invalid_system_views.js", // Puts invalid view definitions in system.views.
|
||||
"views/views_all_commands.js", // Drops test DB.
|
||||
"views/view_with_invalid_dbname.js", // Puts invalid view definitions in system.views.
|
||||
|
||||
// This test causes collMod commands to hang, which interferes with other tests running
|
||||
// collMod.
|
||||
"crud_ops_do_not_throw_locktimeout.js",
|
||||
|
||||
// Can fail if isMaster takes too long on a loaded machine.
|
||||
"dbadmin.js",
|
||||
|
||||
// Other tests will fail while the requireApiVersion server parameter is set.
|
||||
"require_api_version.js",
|
||||
|
||||
// This test updates global memory usage counters in the bucket catalog in a way that
|
||||
// may affect other time-series tests running concurrently.
|
||||
"timeseries/timeseries_idle_buckets.js",
|
||||
|
||||
// Assumes that other tests are not creating API version 1 incompatible data.
|
||||
"validate_db_metadata_command.js",
|
||||
|
||||
// The tests in 'bench_test*.js' files use 'benchRun()'. The main purpose of
|
||||
// 'benchRun()' is for performance testing and the 'benchRun()' implementation itself
|
||||
// launches multiple threads internally, it's not necessary to keep 'bench_test*.js'
|
||||
// within the parallel test job.
|
||||
"bench_test1.js",
|
||||
"bench_test2.js",
|
||||
|
||||
// These tests cause deletes and updates to hang, which may affect other tests running
|
||||
// concurrently.
|
||||
"timeseries/timeseries_delete_hint.js",
|
||||
"timeseries/timeseries_update_hint.js",
|
||||
"timeseries/timeseries_delete_concurrent.js",
|
||||
"timeseries/timeseries_update_concurrent.js",
|
||||
|
||||
// These tests rely on no writes happening that would force oplog truncation.
|
||||
"write_change_stream_pit_preimage_in_transaction.js",
|
||||
"write_change_stream_pit_preimage.js",
|
||||
|
||||
// These tests convert a non-unique index to a unique one, which is not compatible
|
||||
// when running against inMemory storage engine variants. Since this test only fails
|
||||
// in the parallel tester, which does not respect test tags, we omit the tests
|
||||
// instead of manually checking TestData values in the mongo shell for the Evergreen
|
||||
// variant.
|
||||
"collmod_convert_index_uniqueness.js",
|
||||
"collmod_convert_to_unique_apply_ops.js",
|
||||
"collmod_convert_to_unique_violations.js",
|
||||
"collmod_convert_to_unique_violations_size_limit.js",
|
||||
|
||||
// This test sets a server parameter.
|
||||
"group_lookup_with_canonical_query_prefix.js",
|
||||
|
||||
// This test requires latches to be enabled, which isn't true for all variants.
|
||||
"latch_analyzer.js",
|
||||
"currentop_waiting_for_latch.js"
|
||||
]);
|
||||
|
||||
// Get files, including files in subdirectories.
|
||||
var getFilesRecursive = function(dir) {
|
||||
var files = listFiles(dir);
|
||||
var fileList = [];
|
||||
files.forEach(file => {
|
||||
if (file.isDirectory) {
|
||||
getFilesRecursive(file.name).forEach(subDirFile => fileList.push(subDirFile));
|
||||
} else {
|
||||
fileList.push(file);
|
||||
}
|
||||
});
|
||||
return fileList;
|
||||
};
|
||||
|
||||
// Transactions are not supported on standalone nodes so we do not run them here.
|
||||
// NOTE: We need to take substring of the full test path to ensure that 'jstests/core/' is
|
||||
// not included.
|
||||
const txnsTestFiles =
|
||||
getFilesRecursive("jstests/core/txns/")
|
||||
.map(fullPathToTest => fullPathToTest.name.substring("jstests/core/".length));
|
||||
Object.assign(skipTests, makeKeys(txnsTestFiles));
|
||||
|
||||
var parallelFilesDir = "jstests/core";
|
||||
|
||||
// some tests can't be run in parallel with each other
|
||||
var serialTestsArr = [
|
||||
// These tests use fsyncLock.
|
||||
parallelFilesDir + "/fsync.js",
|
||||
parallelFilesDir + "/currentop.js",
|
||||
parallelFilesDir + "/killop_drop_collection.js",
|
||||
|
||||
// These tests expect the profiler to be on or off at specific points. They should not
|
||||
// be run in parallel with tests that perform fsyncLock. User operations skip writing to
|
||||
// the system.profile collection while the server is fsyncLocked.
|
||||
//
|
||||
// Most profiler tests can be run in parallel with each other as they use test-specific
|
||||
// databases, with the exception of tests which modify slowms or the profiler's sampling
|
||||
// rate, since those affect profile settings globally.
|
||||
parallelFilesDir + "/apitest_db_profile_level.js",
|
||||
parallelFilesDir + "/geo_s2cursorlimitskip.js",
|
||||
parallelFilesDir + "/profile1.js",
|
||||
parallelFilesDir + "/profile2.js",
|
||||
parallelFilesDir + "/profile3.js",
|
||||
parallelFilesDir + "/profile_agg.js",
|
||||
parallelFilesDir + "/profile_count.js",
|
||||
parallelFilesDir + "/profile_delete.js",
|
||||
parallelFilesDir + "/profile_distinct.js",
|
||||
parallelFilesDir + "/profile_find.js",
|
||||
parallelFilesDir + "/profile_findandmodify.js",
|
||||
parallelFilesDir + "/profile_getmore.js",
|
||||
parallelFilesDir + "/profile_hide_index.js",
|
||||
parallelFilesDir + "/profile_insert.js",
|
||||
parallelFilesDir + "/profile_list_collections.js",
|
||||
parallelFilesDir + "/profile_list_indexes.js",
|
||||
parallelFilesDir + "/profile_mapreduce.js",
|
||||
parallelFilesDir + "/profile_no_such_db.js",
|
||||
parallelFilesDir + "/profile_query_hash.js",
|
||||
parallelFilesDir + "/profile_sampling.js",
|
||||
parallelFilesDir + "/profile_update.js",
|
||||
parallelFilesDir + "/cached_plan_trial_does_not_discard_work.js",
|
||||
parallelFilesDir + "/timeseries/bucket_unpacking_with_sort_plan_cache.js",
|
||||
|
||||
// These tests rely on a deterministically refreshable logical session cache. If they
|
||||
// run in parallel, they could interfere with the cache and cause failures.
|
||||
parallelFilesDir + "/list_all_local_sessions.js",
|
||||
parallelFilesDir + "/list_all_sessions.js",
|
||||
parallelFilesDir + "/list_sessions.js",
|
||||
];
|
||||
var serialTests = makeKeys(serialTestsArr);
|
||||
|
||||
// prefix the first thread with the serialTests
|
||||
// (which we will exclude from the rest of the threads below)
|
||||
params[0] = serialTestsArr;
|
||||
var files = getFilesRecursive(parallelFilesDir);
|
||||
files = Array.shuffle(files);
|
||||
|
||||
var i = 0;
|
||||
files.forEach(function(x) {
|
||||
if ((/[\/\\]_/.test(x.name)) || (!/\.js$/.test(x.name)) ||
|
||||
(x.name.match(parallelFilesDir + "/(.*\.js)")[1] in skipTests) || //
|
||||
(x.name in serialTests)) {
|
||||
print(" >>>>>>>>>>>>>>> skipping " + x.name);
|
||||
return;
|
||||
}
|
||||
// add the test to run in one of the threads.
|
||||
params[i % n].push(x.name);
|
||||
++i;
|
||||
});
|
||||
|
||||
// randomize ordering of the serialTests
|
||||
params[0] = Array.shuffle(params[0]);
|
||||
|
||||
for (var i in params) {
|
||||
params[i].unshift(i);
|
||||
}
|
||||
|
||||
return params;
|
||||
};
|
||||
|
||||
// runs a set of test files
|
||||
// first argument is an identifier for this tester, remaining arguments are file names
|
||||
ParallelTester.fileTester = function() {
|
||||
var args = Array.from(arguments);
|
||||
var suite = args.shift();
|
||||
args.forEach(function(x) {
|
||||
print(" S" + suite + " Test : " + x + " ...");
|
||||
var time = Date.timeFunc(function() {
|
||||
// Create a new connection to the db for each file. If tests share the same
|
||||
// connection it can create difficult to debug issues.
|
||||
db = new Mongo(db.getMongo().host).getDB(db.getName());
|
||||
gc();
|
||||
load(x);
|
||||
}, 1);
|
||||
print(" S" + suite + " Test : " + x + " " + time + "ms");
|
||||
});
|
||||
};
|
||||
|
||||
// params: array of arrays, each element of which consists of a function followed
|
||||
// by zero or more arguments to that function. Each function and its arguments will
|
||||
// be called in a separate thread.
|
||||
|
|
|
|||
|
|
@ -1,22 +0,0 @@
|
|||
// perform basic js tests in parallel
|
||||
|
||||
load('jstests/libs/parallelTester.js');
|
||||
|
||||
Random.setRandomSeed();
|
||||
|
||||
var params = ParallelTester.createJstestsLists(4);
|
||||
var t = new ParallelTester();
|
||||
for (i in params) {
|
||||
t.add(ParallelTester.fileTester, params[i]);
|
||||
}
|
||||
|
||||
try {
|
||||
t.run("one or more tests failed");
|
||||
} finally {
|
||||
print(
|
||||
"If the failure here is due to a test unexpected being run, " +
|
||||
"it may be due to the parallel suite not honoring feature flag tags. " +
|
||||
"If you want to skip such tests in parallel suite, " +
|
||||
"please add them to the exclusion list at " +
|
||||
"https://github.com/mongodb/mongo/blob/eb75b6ccc62f7c8ea26a57c1b5eb96a41809396a/jstests/libs/parallelTester.js#L149.");
|
||||
}
|
||||
|
|
@ -1,39 +0,0 @@
|
|||
// perform basic js tests in parallel & some other tasks as well
|
||||
load('jstests/libs/parallelTester.js');
|
||||
|
||||
var c = db.jstests_parallel_basicPlus;
|
||||
c.drop();
|
||||
|
||||
Random.setRandomSeed();
|
||||
|
||||
var params = ParallelTester.createJstestsLists(4);
|
||||
var t = new ParallelTester();
|
||||
for (i in params) {
|
||||
t.add(ParallelTester.fileTester, params[i]);
|
||||
}
|
||||
|
||||
for (var i = 4; i < 8; ++i) {
|
||||
var g = new EventGenerator(i, "jstests_parallel_basicPlus", Random.randInt(20));
|
||||
for (var j = (i - 4) * 3000; j < (i - 3) * 3000; ++j) {
|
||||
var expected = j - ((i - 4) * 3000);
|
||||
g.addCheckCount(expected,
|
||||
{_id: {$gte: ((i - 4) * 3000), $lt: ((i - 3) * 3000)}},
|
||||
expected % 1000 == 0,
|
||||
expected % 500 == 0);
|
||||
g.addInsert({_id: j});
|
||||
// Add currentOp commands running in parallel. Historically there have been many race
|
||||
// conditions between various commands and the currentOp command.
|
||||
g.addCurrentOp();
|
||||
}
|
||||
t.add(EventGenerator.dispatch, g.getEvents());
|
||||
}
|
||||
try {
|
||||
t.run("one or more tests failed");
|
||||
} finally {
|
||||
print(
|
||||
"If the failure here is due to a test unexpected being run, " +
|
||||
"it may be due to the parallel suite not honoring feature flag tags. " +
|
||||
"If you want to skip such tests in parallel suite, " +
|
||||
"please add them to the exclusion list at " +
|
||||
"https://github.com/mongodb/mongo/blob/eb75b6ccc62f7c8ea26a57c1b5eb96a41809396a/jstests/libs/parallelTester.js#L149.");
|
||||
}
|
||||
Loading…
Reference in New Issue