diff options
153 files changed, 505 insertions, 505 deletions
diff --git a/jstests/.clang-format b/jstests/.clang-format index a3b01b7f47d..04a485af039 100644 --- a/jstests/.clang-format +++ b/jstests/.clang-format @@ -63,15 +63,13 @@ UseTab: Never --- Language: JavaScript -# Disable JS formatting until the JS linter is in place (SERVER-22338) -DisableFormat: true # BasedOnStyle: Google # --- AccessModifierOffset: -1 AlignAfterOpenBracket: true AlignEscapedNewlinesLeft: true AlignOperands: false -AlignTrailingComments: false +AlignTrailingComments: true AllowAllParametersOfDeclarationOnNextLine: true AllowShortBlocksOnASingleLine: false AllowShortCaseLabelsOnASingleLine: false @@ -94,7 +92,7 @@ ConstructorInitializerIndentWidth: 4 ContinuationIndentWidth: 4 Cpp11BracedListStyle: true DerivePointerAlignment: true -#DisableFormat: false +DisableFormat: false ExperimentalAutoDetectBinPacking: false ForEachMacros: [ ] IndentCaseLabels: true diff --git a/jstests/aggregation/bugs/reverseArray.js b/jstests/aggregation/bugs/reverseArray.js index 8cece4bdcb9..0fa4010654b 100644 --- a/jstests/aggregation/bugs/reverseArray.js +++ b/jstests/aggregation/bugs/reverseArray.js @@ -1,7 +1,7 @@ // SERVER-23029 added a new expression, $reverseArray, which consumes an array or a nullish value // and produces either the reversed version of that array, or null. In this test file, we check the // behavior and error cases. -load("jstests/aggregation/extras/utils.js"); // For assertErrorCode. +load("jstests/aggregation/extras/utils.js"); // For assertErrorCode. (function() { "use strict"; diff --git a/jstests/aggregation/bugs/server3253.js b/jstests/aggregation/bugs/server3253.js index fe47bba565e..1adab9ca977 100644 --- a/jstests/aggregation/bugs/server3253.js +++ b/jstests/aggregation/bugs/server3253.js @@ -30,7 +30,7 @@ function test(input, pipeline, expected) { var cursor = input.aggregate(pipeline); - assert.eq(cursor.itcount(), 0); // empty cursor returned + assert.eq(cursor.itcount(), 0); // empty cursor returned assert.eq(output.find().toArray(), expected); // correct results var outputIndexes = getOutputIndexes(); assert.eq(outputIndexes.length, indexes.length); // number of indexes maintained diff --git a/jstests/aggregation/bugs/server6189.js b/jstests/aggregation/bugs/server6189.js index 72f3e002a5a..4cb615f0659 100644 --- a/jstests/aggregation/bugs/server6189.js +++ b/jstests/aggregation/bugs/server6189.js @@ -91,9 +91,9 @@ test(ISODate('1960-01-02 03:04:05.006Z'), true); // Testing special rounding rules for seconds test(ISODate('1960-01-02 03:04:04.999Z'), false); // second = 4 -test(ISODate('1960-01-02 03:04:05.000Z'), true); // second = 5 -test(ISODate('1960-01-02 03:04:05.001Z'), true); // second = 5 -test(ISODate('1960-01-02 03:04:05.999Z'), true); // second = 5 +test(ISODate('1960-01-02 03:04:05.000Z'), true); // second = 5 +test(ISODate('1960-01-02 03:04:05.001Z'), true); // second = 5 +test(ISODate('1960-01-02 03:04:05.999Z'), true); // second = 5 // Test date before 1900 (negative tm_year values from gmtime) test(ISODate('1860-01-02 03:04:05.006Z'), false); diff --git a/jstests/aggregation/bugs/server9840.js b/jstests/aggregation/bugs/server9840.js index 130066d8af4..b3a73cfc3d7 100644 --- a/jstests/aggregation/bugs/server9840.js +++ b/jstests/aggregation/bugs/server9840.js @@ -78,7 +78,7 @@ test( } } } - }, // not commutative! + }, // not commutative! 4); // 10-6 not 6-10 or 6-6 // unicode is allowed diff --git a/jstests/aggregation/testall.js b/jstests/aggregation/testall.js index 2f17d955566..11f2936abfc 100644 --- a/jstests/aggregation/testall.js +++ b/jstests/aggregation/testall.js @@ -802,7 +802,7 @@ var g6 = db.runCommand({ {$sort: {author: -1}}, { $group: { - _id: "authors", /* constant string, *not* a field reference */ + _id: "authors", /* constant string, *not* a field reference */ firstAuthor: {$last: "$author"}, /* note reverse sort above */ lastAuthor: {$first: "$author"}, /* note reverse sort above */ count: {$sum: 1} diff --git a/jstests/auth/copyauth.js b/jstests/auth/copyauth.js index a8083df400e..3627038abbd 100644 --- a/jstests/auth/copyauth.js +++ b/jstests/auth/copyauth.js @@ -1,7 +1,7 @@ // Test copyDatabase command with various combinations of authed/unauthed and single node/replica // set source and dest. -TestData.authMechanism = "SCRAM-SHA-1"; // SERVER-11428 +TestData.authMechanism = "SCRAM-SHA-1"; // SERVER-11428 DB.prototype._defaultAuthenticationMechanism = "SCRAM-SHA-1"; // SERVER-11428 var baseName = "jstests_clone_copyauth"; diff --git a/jstests/auth/log_user_basic.js b/jstests/auth/log_user_basic.js index 06e74ea3109..817d83ac519 100644 --- a/jstests/auth/log_user_basic.js +++ b/jstests/auth/log_user_basic.js @@ -59,9 +59,9 @@ if (0) { */ var doTest = function(conn1, conn2) { var connInfo1 = { - id: null, // thread id of this connection + id: null, // thread id of this connection mongo: conn1, // connection object - users: {} // contains authenticated users represented as a map of db to user names. + users: {} // contains authenticated users represented as a map of db to user names. }; var connInfo2 = { diff --git a/jstests/concurrency/fsm_all_replication.js b/jstests/concurrency/fsm_all_replication.js index 88b01bc2231..5850c3054da 100644 --- a/jstests/concurrency/fsm_all_replication.js +++ b/jstests/concurrency/fsm_all_replication.js @@ -7,7 +7,7 @@ var dir = 'jstests/concurrency/fsm_workloads'; var blacklist = [ // Disabled due to MongoDB restrictions and/or workload restrictions 'agg_group_external.js', // uses >100MB of data, which can overwhelm test hosts - 'agg_sort_external.js', // uses >100MB of data, which can overwhelm test hosts + 'agg_sort_external.js', // uses >100MB of data, which can overwhelm test hosts ].map(function(file) { return dir + '/' + file; }); diff --git a/jstests/concurrency/fsm_all_sharded_replication.js b/jstests/concurrency/fsm_all_sharded_replication.js index b925a868b50..e1b3741583e 100644 --- a/jstests/concurrency/fsm_all_sharded_replication.js +++ b/jstests/concurrency/fsm_all_sharded_replication.js @@ -6,10 +6,10 @@ var dir = 'jstests/concurrency/fsm_workloads'; var blacklist = [ // Disabled due to known bugs - 'distinct.js', // SERVER-13116 distinct isn't sharding aware - 'distinct_noindex.js', // SERVER-13116 distinct isn't sharding aware + 'distinct.js', // SERVER-13116 distinct isn't sharding aware + 'distinct_noindex.js', // SERVER-13116 distinct isn't sharding aware 'distinct_projection.js', // SERVER-13116 distinct isn't sharding aware - 'drop_database.js', // SERVER-17397 Drops of sharded namespaces may not fully succeed + 'drop_database.js', // SERVER-17397 Drops of sharded namespaces may not fully succeed // Disabled due to SERVER-3645, '.count() can be wrong on sharded collections'. // This bug is problematic for these workloads because they assert on count() values: @@ -39,22 +39,22 @@ var blacklist = [ 'auth_drop_role.js', 'auth_drop_user.js', - 'agg_group_external.js', // uses >100MB of data, which can overwhelm test hosts - 'agg_sort_external.js', // uses >100MB of data, which can overwhelm test hosts - 'compact.js', // compact can only be run against a standalone mongod + 'agg_group_external.js', // uses >100MB of data, which can overwhelm test hosts + 'agg_sort_external.js', // uses >100MB of data, which can overwhelm test hosts + 'compact.js', // compact can only be run against a standalone mongod 'compact_simultaneous_padding_bytes.js', // compact can only be run against a mongod - 'convert_to_capped_collection.js', // convertToCapped can't be run on mongos processes + 'convert_to_capped_collection.js', // convertToCapped can't be run on mongos processes 'convert_to_capped_collection_index.js', // convertToCapped can't be run on mongos processes - 'findAndModify_remove_queue.js', // remove cannot be {} for findAndModify - 'findAndModify_update_collscan.js', // findAndModify requires a shard key - 'findAndModify_update_queue.js', // findAndModify requires a shard key - 'group.js', // the group command cannot be issued against a sharded cluster - 'group_cond.js', // the group command cannot be issued against a sharded cluster + 'findAndModify_remove_queue.js', // remove cannot be {} for findAndModify + 'findAndModify_update_collscan.js', // findAndModify requires a shard key + 'findAndModify_update_queue.js', // findAndModify requires a shard key + 'group.js', // the group command cannot be issued against a sharded cluster + 'group_cond.js', // the group command cannot be issued against a sharded cluster 'indexed_insert_eval.js', // eval doesn't work with sharded collections 'indexed_insert_eval_nolock.js', // eval doesn't work with sharded collections 'plan_cache_drop_database.js', // cannot ensureIndex after dropDatabase without sharding first - 'remove_single_document.js', // our .remove(query, {justOne: true}) calls lack shard keys - 'remove_single_document_eval.js', // eval doesn't work with sharded collections + 'remove_single_document.js', // our .remove(query, {justOne: true}) calls lack shard keys + 'remove_single_document_eval.js', // eval doesn't work with sharded collections 'remove_single_document_eval_nolock.js', // eval doesn't work with sharded collections // The rename_* workloads are disabled since renameCollection doesn't work with sharded @@ -68,11 +68,11 @@ var blacklist = [ 'rename_collection_dbname_droptarget.js', 'rename_collection_droptarget.js', - 'update_simple_eval.js', // eval doesn't work with sharded collections - 'update_simple_eval_nolock.js', // eval doesn't work with sharded collections - 'update_upsert_multi.js', // our update queries lack shard keys + 'update_simple_eval.js', // eval doesn't work with sharded collections + 'update_simple_eval_nolock.js', // eval doesn't work with sharded collections + 'update_upsert_multi.js', // our update queries lack shard keys 'update_upsert_multi_noindex.js', // our update queries lack shard keys - 'upsert_where.js', // cannot use upsert command with $where with sharded collections + 'upsert_where.js', // cannot use upsert command with $where with sharded collections 'yield_and_hashed.js', // stagedebug can only be run against a standalone mongod 'yield_and_sorted.js', // stagedebug can only be run against a standalone mongod ].map(function(file) { diff --git a/jstests/concurrency/fsm_all_sharded_replication_with_balancer.js b/jstests/concurrency/fsm_all_sharded_replication_with_balancer.js index 20f4fca18ba..c4437fdd7dd 100644 --- a/jstests/concurrency/fsm_all_sharded_replication_with_balancer.js +++ b/jstests/concurrency/fsm_all_sharded_replication_with_balancer.js @@ -6,10 +6,10 @@ var dir = 'jstests/concurrency/fsm_workloads'; var blacklist = [ // Disabled due to known bugs - 'distinct.js', // SERVER-13116 distinct isn't sharding aware - 'distinct_noindex.js', // SERVER-13116 distinct isn't sharding aware + 'distinct.js', // SERVER-13116 distinct isn't sharding aware + 'distinct_noindex.js', // SERVER-13116 distinct isn't sharding aware 'distinct_projection.js', // SERVER-13116 distinct isn't sharding aware - 'drop_database.js', // SERVER-17397 Drops of sharded namespaces may not fully succeed + 'drop_database.js', // SERVER-17397 Drops of sharded namespaces may not fully succeed 'remove_where.js', // SERVER-14669 Multi-removes that use $where miscount removed documents // Disabled due to SERVER-3645, '.count() can be wrong on sharded collections'. @@ -44,22 +44,22 @@ var blacklist = [ 'auth_drop_role.js', 'auth_drop_user.js', - 'agg_group_external.js', // uses >100MB of data, which can overwhelm test hosts - 'agg_sort_external.js', // uses >100MB of data, which can overwhelm test hosts - 'compact.js', // compact can only be run against a standalone mongod + 'agg_group_external.js', // uses >100MB of data, which can overwhelm test hosts + 'agg_sort_external.js', // uses >100MB of data, which can overwhelm test hosts + 'compact.js', // compact can only be run against a standalone mongod 'compact_simultaneous_padding_bytes.js', // compact can only be run against a mongod - 'convert_to_capped_collection.js', // convertToCapped can't be run on mongos processes + 'convert_to_capped_collection.js', // convertToCapped can't be run on mongos processes 'convert_to_capped_collection_index.js', // convertToCapped can't be run on mongos processes - 'findAndModify_remove_queue.js', // remove cannot be {} for findAndModify - 'findAndModify_update_collscan.js', // findAndModify requires a shard key - 'findAndModify_update_queue.js', // findAndModify requires a shard key - 'group.js', // the group command cannot be issued against a sharded cluster - 'group_cond.js', // the group command cannot be issued against a sharded cluster + 'findAndModify_remove_queue.js', // remove cannot be {} for findAndModify + 'findAndModify_update_collscan.js', // findAndModify requires a shard key + 'findAndModify_update_queue.js', // findAndModify requires a shard key + 'group.js', // the group command cannot be issued against a sharded cluster + 'group_cond.js', // the group command cannot be issued against a sharded cluster 'indexed_insert_eval.js', // eval doesn't work with sharded collections 'indexed_insert_eval_nolock.js', // eval doesn't work with sharded collections 'plan_cache_drop_database.js', // cannot ensureIndex after dropDatabase without sharding first - 'remove_single_document.js', // our .remove(query, {justOne: true}) calls lack shard keys - 'remove_single_document_eval.js', // eval doesn't work with sharded collections + 'remove_single_document.js', // our .remove(query, {justOne: true}) calls lack shard keys + 'remove_single_document_eval.js', // eval doesn't work with sharded collections 'remove_single_document_eval_nolock.js', // eval doesn't work with sharded collections // The rename_* workloads are disabled since renameCollection doesn't work with sharded @@ -73,11 +73,11 @@ var blacklist = [ 'rename_collection_dbname_droptarget.js', 'rename_collection_droptarget.js', - 'update_simple_eval.js', // eval doesn't work with sharded collections - 'update_simple_eval_nolock.js', // eval doesn't work with sharded collections - 'update_upsert_multi.js', // our update queries lack shard keys + 'update_simple_eval.js', // eval doesn't work with sharded collections + 'update_simple_eval_nolock.js', // eval doesn't work with sharded collections + 'update_upsert_multi.js', // our update queries lack shard keys 'update_upsert_multi_noindex.js', // our update queries lack shard keys - 'upsert_where.js', // cannot use upsert command with $where with sharded collections + 'upsert_where.js', // cannot use upsert command with $where with sharded collections 'yield_and_hashed.js', // stagedebug can only be run against a standalone mongod 'yield_and_sorted.js', // stagedebug can only be run against a standalone mongod ].map(function(file) { diff --git a/jstests/concurrency/fsm_all_simultaneous.js b/jstests/concurrency/fsm_all_simultaneous.js index 4eada69c5c0..bf4fdfe3d02 100644 --- a/jstests/concurrency/fsm_all_simultaneous.js +++ b/jstests/concurrency/fsm_all_simultaneous.js @@ -13,7 +13,7 @@ var blacklist = [ 'update_inc_capped.js', 'agg_group_external.js', // uses >100MB of data, which can overwhelm test hosts - 'agg_sort_external.js', // uses >100MB of data, which can overwhelm test hosts + 'agg_sort_external.js', // uses >100MB of data, which can overwhelm test hosts ].map(function(file) { return dir + '/' + file; }); diff --git a/jstests/concurrency/fsm_example.js b/jstests/concurrency/fsm_example.js index 79cc891609c..45b8ac5b47a 100644 --- a/jstests/concurrency/fsm_example.js +++ b/jstests/concurrency/fsm_example.js @@ -77,9 +77,9 @@ var $config = (function() { startState: 'init', // optional, default 'init' states: states, transitions: transitions, - setup: setup, // optional, default empty function + setup: setup, // optional, default empty function teardown: teardown, // optional, default empty function - data: data // optional, default empty object + data: data // optional, default empty object }; })(); diff --git a/jstests/concurrency/fsm_example_inheritance.js b/jstests/concurrency/fsm_example_inheritance.js index 702f0208dda..01d95a0d9d6 100644 --- a/jstests/concurrency/fsm_example_inheritance.js +++ b/jstests/concurrency/fsm_example_inheritance.js @@ -1,7 +1,7 @@ 'use strict'; load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_example.js'); // for $config +load('jstests/concurrency/fsm_example.js'); // for $config // extendWorkload takes a $config object and a callback, and returns an extended $config object. var $config = extendWorkload($config, diff --git a/jstests/concurrency/fsm_libs/thread_mgr.js b/jstests/concurrency/fsm_libs/thread_mgr.js index 283da7ab7ee..145fb57146e 100644 --- a/jstests/concurrency/fsm_libs/thread_mgr.js +++ b/jstests/concurrency/fsm_libs/thread_mgr.js @@ -1,6 +1,6 @@ 'use strict'; -load('jstests/libs/parallelTester.js'); // for ScopedThread and CountDownLatch +load('jstests/libs/parallelTester.js'); // for ScopedThread and CountDownLatch load('jstests/concurrency/fsm_libs/worker_thread.js'); // for workerThread /** @@ -209,7 +209,7 @@ var ThreadManager = function(clusterOptions, executionMode = {composed: false}) workerThread.fsm = function(workloads, args, options) { load('jstests/concurrency/fsm_libs/worker_thread.js'); // for workerThread.main - load('jstests/concurrency/fsm_libs/fsm.js'); // for fsm.run + load('jstests/concurrency/fsm_libs/fsm.js'); // for fsm.run return workerThread.main(workloads, args, @@ -222,7 +222,7 @@ workerThread.fsm = function(workloads, args, options) { workerThread.composed = function(workloads, args, options) { load('jstests/concurrency/fsm_libs/worker_thread.js'); // for workerThread.main - load('jstests/concurrency/fsm_libs/composer.js'); // for composer.run + load('jstests/concurrency/fsm_libs/composer.js'); // for composer.run return workerThread.main(workloads, args, diff --git a/jstests/concurrency/fsm_libs/worker_thread.js b/jstests/concurrency/fsm_libs/worker_thread.js index 64c7750a6f4..5c6dd771509 100644 --- a/jstests/concurrency/fsm_libs/worker_thread.js +++ b/jstests/concurrency/fsm_libs/worker_thread.js @@ -1,7 +1,7 @@ 'use strict'; load('jstests/concurrency/fsm_libs/assert.js'); -load('jstests/concurrency/fsm_libs/cluster.js'); // for Cluster.isStandalone +load('jstests/concurrency/fsm_libs/cluster.js'); // for Cluster.isStandalone load('jstests/concurrency/fsm_libs/parse_config.js'); // for parseConfig var workerThread = (function() { @@ -40,7 +40,7 @@ var workerThread = (function() { } workloads.forEach(function(workload) { - load(workload); // for $config + load(workload); // for $config var config = parseConfig($config); // to normalize // Copy any modifications that were made to $config.data diff --git a/jstests/concurrency/fsm_workloads/agg_group_external.js b/jstests/concurrency/fsm_workloads/agg_group_external.js index 38c47d79f13..22d71d4564d 100644 --- a/jstests/concurrency/fsm_workloads/agg_group_external.js +++ b/jstests/concurrency/fsm_workloads/agg_group_external.js @@ -8,8 +8,8 @@ * The data passed to the $group is greater than 100MB, which should force * disk to be used. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/agg_base.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/agg_base.js'); // for $config load('jstests/concurrency/fsm_workload_helpers/drop_utils.js'); // for dropCollections var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/agg_match.js b/jstests/concurrency/fsm_workloads/agg_match.js index d93c4cdddd5..a685096155b 100644 --- a/jstests/concurrency/fsm_workloads/agg_match.js +++ b/jstests/concurrency/fsm_workloads/agg_match.js @@ -6,7 +6,7 @@ * Runs an aggregation with a $match that returns half the documents. */ load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/agg_base.js'); // for $config +load('jstests/concurrency/fsm_workloads/agg_base.js'); // for $config var $config = extendWorkload( $config, diff --git a/jstests/concurrency/fsm_workloads/agg_sort.js b/jstests/concurrency/fsm_workloads/agg_sort.js index 03de9a1aeea..936ae2cf71b 100644 --- a/jstests/concurrency/fsm_workloads/agg_sort.js +++ b/jstests/concurrency/fsm_workloads/agg_sort.js @@ -6,8 +6,8 @@ * Runs an aggregation with a $match that returns half the documents followed * by a $sort on a field containing a random float. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/agg_base.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/agg_base.js'); // for $config load('jstests/concurrency/fsm_workload_helpers/drop_utils.js'); // for dropCollections var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/agg_sort_external.js b/jstests/concurrency/fsm_workloads/agg_sort_external.js index c2bda97e8cd..8a7b6b22495 100644 --- a/jstests/concurrency/fsm_workloads/agg_sort_external.js +++ b/jstests/concurrency/fsm_workloads/agg_sort_external.js @@ -8,8 +8,8 @@ * * The data returned by the $match is greater than 100MB, which should force an external sort. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/agg_base.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/agg_base.js'); // for $config load('jstests/concurrency/fsm_workload_helpers/drop_utils.js'); // for dropCollections var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/collmod_separate_collections.js b/jstests/concurrency/fsm_workloads/collmod_separate_collections.js index 5f9490dbaba..5233733eb2d 100644 --- a/jstests/concurrency/fsm_workloads/collmod_separate_collections.js +++ b/jstests/concurrency/fsm_workloads/collmod_separate_collections.js @@ -9,8 +9,8 @@ * * Each thread updates a TTL index on a separate collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/collmod.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/collmod.js'); // for $config load('jstests/concurrency/fsm_workload_helpers/drop_utils.js'); // for dropCollections var $config = extendWorkload($config, diff --git a/jstests/concurrency/fsm_workloads/compact.js b/jstests/concurrency/fsm_workloads/compact.js index b80e46c0d65..afea3f8a28f 100644 --- a/jstests/concurrency/fsm_workloads/compact.js +++ b/jstests/concurrency/fsm_workloads/compact.js @@ -8,13 +8,13 @@ * for each thread. */ -load('jstests/concurrency/fsm_workload_helpers/drop_utils.js'); // for dropCollections +load('jstests/concurrency/fsm_workload_helpers/drop_utils.js'); // for dropCollections load('jstests/concurrency/fsm_workload_helpers/server_types.js'); // for isEphemeral var $config = (function() { var data = { nDocumentsToInsert: 1000, - nIndexes: 3 + 1, // The number of indexes created in createIndexes + 1 for { _id: 1 } + nIndexes: 3 + 1, // The number of indexes created in createIndexes + 1 for { _id: 1 } prefix: 'compact' // Use filename for prefix because filename is assumed unique }; diff --git a/jstests/concurrency/fsm_workloads/compact_simultaneous_padding_bytes.js b/jstests/concurrency/fsm_workloads/compact_simultaneous_padding_bytes.js index 22eef359b87..47ad30cedab 100644 --- a/jstests/concurrency/fsm_workloads/compact_simultaneous_padding_bytes.js +++ b/jstests/concurrency/fsm_workloads/compact_simultaneous_padding_bytes.js @@ -8,8 +8,8 @@ * for all threads. Uses paddingBytes as a parameter for compact. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/compact.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/compact.js'); // for $config load('jstests/concurrency/fsm_workload_helpers/server_types.js'); // for isEphemeral var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/convert_to_capped_collection_index.js b/jstests/concurrency/fsm_workloads/convert_to_capped_collection_index.js index 2eaa8e261b2..01342b9b603 100644 --- a/jstests/concurrency/fsm_workloads/convert_to_capped_collection_index.js +++ b/jstests/concurrency/fsm_workloads/convert_to_capped_collection_index.js @@ -13,7 +13,7 @@ * but that only the _id index remains after (re-)converting * to a capped collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/convert_to_capped_collection.js'); // for $config var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/count_indexed.js b/jstests/concurrency/fsm_workloads/count_indexed.js index d7a49c6fb40..b9c09020042 100644 --- a/jstests/concurrency/fsm_workloads/count_indexed.js +++ b/jstests/concurrency/fsm_workloads/count_indexed.js @@ -10,8 +10,8 @@ * and then inserts 'modulus * countPerNum' documents. [250, 1000] * Each thread inserts docs into a unique collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/count.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/count.js'); // for $config load('jstests/concurrency/fsm_workload_helpers/drop_utils.js'); // for dropCollections var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/count_limit_skip.js b/jstests/concurrency/fsm_workloads/count_limit_skip.js index 999fc941f8b..59cc5db835f 100644 --- a/jstests/concurrency/fsm_workloads/count_limit_skip.js +++ b/jstests/concurrency/fsm_workloads/count_limit_skip.js @@ -10,8 +10,8 @@ * and then inserts 'modulus * countPerNum' documents. [250, 1000] * Each thread inserts docs into a unique collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/count.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/count.js'); // for $config load('jstests/concurrency/fsm_workload_helpers/drop_utils.js'); // for dropCollections var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/create_capped_collection.js b/jstests/concurrency/fsm_workloads/create_capped_collection.js index 43cf7fe2b54..96fdda8ebda 100644 --- a/jstests/concurrency/fsm_workloads/create_capped_collection.js +++ b/jstests/concurrency/fsm_workloads/create_capped_collection.js @@ -6,7 +6,7 @@ * Repeatedly creates a capped collection. Also verifies that truncation * occurs once the collection reaches a certain size. */ -load('jstests/concurrency/fsm_workload_helpers/drop_utils.js'); // for dropCollections +load('jstests/concurrency/fsm_workload_helpers/drop_utils.js'); // for dropCollections load('jstests/concurrency/fsm_workload_helpers/server_types.js'); // for isMongod and isMMAPv1 var $config = (function() { diff --git a/jstests/concurrency/fsm_workloads/create_capped_collection_maxdocs.js b/jstests/concurrency/fsm_workloads/create_capped_collection_maxdocs.js index 53bc9554904..b3a836b8b0b 100644 --- a/jstests/concurrency/fsm_workloads/create_capped_collection_maxdocs.js +++ b/jstests/concurrency/fsm_workloads/create_capped_collection_maxdocs.js @@ -7,7 +7,7 @@ * occurs once the collection reaches a certain size or contains a * certain number of documents. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/create_capped_collection.js'); // for $config var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/distinct_projection.js b/jstests/concurrency/fsm_workloads/distinct_projection.js index cf8d5ab9501..3f9c4c3192d 100644 --- a/jstests/concurrency/fsm_workloads/distinct_projection.js +++ b/jstests/concurrency/fsm_workloads/distinct_projection.js @@ -8,7 +8,7 @@ * Each thread operates on a separate collection. */ load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/distinct.js'); // for $config +load('jstests/concurrency/fsm_workloads/distinct.js'); // for $config var $config = extendWorkload($config, function($config, $super) { diff --git a/jstests/concurrency/fsm_workloads/explain_aggregate.js b/jstests/concurrency/fsm_workloads/explain_aggregate.js index 82542be4cc4..e0bbccb7683 100644 --- a/jstests/concurrency/fsm_workloads/explain_aggregate.js +++ b/jstests/concurrency/fsm_workloads/explain_aggregate.js @@ -7,7 +7,7 @@ * */ load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/explain.js'); // for $config +load('jstests/concurrency/fsm_workloads/explain.js'); // for $config var $config = extendWorkload( $config, diff --git a/jstests/concurrency/fsm_workloads/explain_count.js b/jstests/concurrency/fsm_workloads/explain_count.js index 05cfcc5ba87..b6a9f0fa8b8 100644 --- a/jstests/concurrency/fsm_workloads/explain_count.js +++ b/jstests/concurrency/fsm_workloads/explain_count.js @@ -5,10 +5,10 @@ * * Runs explain() and count() on a collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/explain.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/explain.js'); // for $config load('jstests/concurrency/fsm_workload_helpers/server_types.js'); // for isMongos -load('jstests/libs/analyze_plan.js'); // for planHasStage +load('jstests/libs/analyze_plan.js'); // for planHasStage var $config = extendWorkload( $config, diff --git a/jstests/concurrency/fsm_workloads/explain_distinct.js b/jstests/concurrency/fsm_workloads/explain_distinct.js index b772ac3ac25..86c30f3aca9 100644 --- a/jstests/concurrency/fsm_workloads/explain_distinct.js +++ b/jstests/concurrency/fsm_workloads/explain_distinct.js @@ -6,8 +6,8 @@ * Runs explain() and distinct() on a collection. */ load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/explain.js'); // for $config -load('jstests/libs/analyze_plan.js'); // for planHasStage +load('jstests/concurrency/fsm_workloads/explain.js'); // for $config +load('jstests/libs/analyze_plan.js'); // for planHasStage var $config = extendWorkload( $config, diff --git a/jstests/concurrency/fsm_workloads/explain_find.js b/jstests/concurrency/fsm_workloads/explain_find.js index 0712c94f483..f0b6f099c63 100644 --- a/jstests/concurrency/fsm_workloads/explain_find.js +++ b/jstests/concurrency/fsm_workloads/explain_find.js @@ -7,8 +7,8 @@ * */ load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/explain.js'); // for $config -load('jstests/libs/analyze_plan.js'); // for planHasStage and isIxscan +load('jstests/concurrency/fsm_workloads/explain.js'); // for $config +load('jstests/libs/analyze_plan.js'); // for planHasStage and isIxscan var $config = extendWorkload( $config, diff --git a/jstests/concurrency/fsm_workloads/explain_group.js b/jstests/concurrency/fsm_workloads/explain_group.js index 007c703c648..f379bf4e608 100644 --- a/jstests/concurrency/fsm_workloads/explain_group.js +++ b/jstests/concurrency/fsm_workloads/explain_group.js @@ -7,8 +7,8 @@ * */ load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/explain.js'); // for $config -load('jstests/libs/analyze_plan.js'); // for planHasStage +load('jstests/concurrency/fsm_workloads/explain.js'); // for $config +load('jstests/libs/analyze_plan.js'); // for planHasStage var $config = extendWorkload($config, diff --git a/jstests/concurrency/fsm_workloads/explain_remove.js b/jstests/concurrency/fsm_workloads/explain_remove.js index 37b451994d9..c5c05a9af69 100644 --- a/jstests/concurrency/fsm_workloads/explain_remove.js +++ b/jstests/concurrency/fsm_workloads/explain_remove.js @@ -6,7 +6,7 @@ * Runs explain() and remove() on a collection. */ load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/explain.js'); // for $config +load('jstests/concurrency/fsm_workloads/explain.js'); // for $config var $config = extendWorkload( $config, diff --git a/jstests/concurrency/fsm_workloads/explain_update.js b/jstests/concurrency/fsm_workloads/explain_update.js index 89876439bc2..e63f5948fef 100644 --- a/jstests/concurrency/fsm_workloads/explain_update.js +++ b/jstests/concurrency/fsm_workloads/explain_update.js @@ -5,8 +5,8 @@ * * Runs explain() and update() on a collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/explain.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/explain.js'); // for $config load('jstests/concurrency/fsm_workload_helpers/server_types.js'); // for isMongos var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/findAndModify_update_collscan.js b/jstests/concurrency/fsm_workloads/findAndModify_update_collscan.js index ed874f1bd81..f9e40b6b467 100644 --- a/jstests/concurrency/fsm_workloads/findAndModify_update_collscan.js +++ b/jstests/concurrency/fsm_workloads/findAndModify_update_collscan.js @@ -10,7 +10,7 @@ * * Attempts to force a collection scan by not creating an index. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/findAndModify_update.js'); // for $config var $config = extendWorkload($config, diff --git a/jstests/concurrency/fsm_workloads/findAndModify_update_queue.js b/jstests/concurrency/fsm_workloads/findAndModify_update_queue.js index 8ed1a148afa..9489f44708b 100644 --- a/jstests/concurrency/fsm_workloads/findAndModify_update_queue.js +++ b/jstests/concurrency/fsm_workloads/findAndModify_update_queue.js @@ -11,7 +11,7 @@ * This workload was designed to reproduce an issue similar to SERVER-18304 for update operations * using the findAndModify command where the old version of the document is returned. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/findAndModify_remove_queue.js'); // for $config load('jstests/concurrency/fsm_workload_helpers/server_types.js'); // for isMongod and isMMAPv1 diff --git a/jstests/concurrency/fsm_workloads/findAndModify_upsert_collscan.js b/jstests/concurrency/fsm_workloads/findAndModify_upsert_collscan.js index e9cca5d6d8f..aad1fbc644c 100644 --- a/jstests/concurrency/fsm_workloads/findAndModify_upsert_collscan.js +++ b/jstests/concurrency/fsm_workloads/findAndModify_upsert_collscan.js @@ -10,7 +10,7 @@ * * Forces 'sort' to perform a collection scan by using $natural. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/findAndModify_upsert.js'); // for $config var $config = extendWorkload($config, diff --git a/jstests/concurrency/fsm_workloads/group_cond.js b/jstests/concurrency/fsm_workloads/group_cond.js index 226b9a9afad..1ab6aa827c6 100644 --- a/jstests/concurrency/fsm_workloads/group_cond.js +++ b/jstests/concurrency/fsm_workloads/group_cond.js @@ -14,7 +14,7 @@ */ load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/group.js'); // for $config +load('jstests/concurrency/fsm_workloads/group.js'); // for $config var $config = extendWorkload($config, diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_1char.js b/jstests/concurrency/fsm_workloads/indexed_insert_1char.js index 3d90da7470a..5330bd9191e 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_1char.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_1char.js @@ -7,7 +7,7 @@ * documents appear in both a collection scan and an index scan. The indexed * value is a 1-character string based on the thread's id. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_base.js'); // for $config var $config = extendWorkload($config, diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_1char_noindex.js b/jstests/concurrency/fsm_workloads/indexed_insert_1char_noindex.js index cdbba38b172..1a8165cf82e 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_1char_noindex.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_1char_noindex.js @@ -5,8 +5,8 @@ * * Executes the indexed_insert_1char.js workload after dropping its index. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/indexed_insert_1char.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/indexed_insert_1char.js'); // for $config load('jstests/concurrency/fsm_workload_modifiers/indexed_noindex.js'); // for indexedNoindex var $config = extendWorkload($config, indexedNoindex); diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_2d.js b/jstests/concurrency/fsm_workloads/indexed_insert_2d.js index c8abb257745..674f229f53b 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_2d.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_2d.js @@ -7,7 +7,7 @@ * appear in both a collection scan and an index scan. The indexed value is a * legacy coordinate pair, indexed with a 2d index. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_base.js'); // for $config var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_2dsphere.js b/jstests/concurrency/fsm_workloads/indexed_insert_2dsphere.js index 6c8fd86c104..a0fb5613ef6 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_2dsphere.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_2dsphere.js @@ -7,7 +7,7 @@ * appear in both a collection scan and an index scan. The indexed value is a * legacy coordinate pair, indexed with a 2dsphere index. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_2d.js'); // for $config var $config = extendWorkload($config, diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_base_capped.js b/jstests/concurrency/fsm_workloads/indexed_insert_base_capped.js index bdffdce6c8c..5bb153e5890 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_base_capped.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_base_capped.js @@ -5,8 +5,8 @@ * * Executes the indexed_insert_base.js workload on a capped collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/indexed_insert_base.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/indexed_insert_base.js'); // for $config load('jstests/concurrency/fsm_workload_modifiers/make_capped.js'); // for makeCapped var $config = extendWorkload($config, makeCapped); diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_base_noindex.js b/jstests/concurrency/fsm_workloads/indexed_insert_base_noindex.js index aaa3b2e0e07..5e4a2f0f609 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_base_noindex.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_base_noindex.js @@ -5,8 +5,8 @@ * * Executes the indexed_insert_base.js workload after dropping its index. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/indexed_insert_base.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/indexed_insert_base.js'); // for $config load('jstests/concurrency/fsm_workload_modifiers/indexed_noindex.js'); // for indexedNoindex var $config = extendWorkload($config, indexedNoindex); diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_compound.js b/jstests/concurrency/fsm_workloads/indexed_insert_compound.js index fe9641502b0..a32fc084215 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_compound.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_compound.js @@ -7,7 +7,7 @@ * appear in both a collection scan and an index scan. The collection is indexed * with a compound index on three different fields. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_base.js'); // for $config var $config = @@ -21,7 +21,7 @@ var $config = $config.data.getDoc = function getDoc() { return { indexed_insert_compound_x: this.tid & 0x0f, // lowest 4 bits - indexed_insert_compound_y: this.tid >> 4, // high bits + indexed_insert_compound_y: this.tid >> 4, // high bits indexed_insert_compound_z: String.fromCharCode(33 + this.tid) }; }; diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_eval.js b/jstests/concurrency/fsm_workloads/indexed_insert_eval.js index a7a4797efef..ccb3696ffeb 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_eval.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_eval.js @@ -7,7 +7,7 @@ * Asserts that all documents appear in both a collection scan and an index * scan. The indexed value is the thread id. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_base.js'); // for $config var $config = diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_eval_nolock.js b/jstests/concurrency/fsm_workloads/indexed_insert_eval_nolock.js index d1d2727c0d7..33e8ef41d56 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_eval_nolock.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_eval_nolock.js @@ -7,7 +7,7 @@ * with the option { nolock: true }. Asserts that all documents appear in both a * collection scan and an index scan. The indexed value is the thread id. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_eval.js'); // for $config var $config = extendWorkload($config, diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_heterogeneous.js b/jstests/concurrency/fsm_workloads/indexed_insert_heterogeneous.js index c34b986bb7b..ddf2a0c0ead 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_heterogeneous.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_heterogeneous.js @@ -7,7 +7,7 @@ * documents appear in both a collection scan and an index scan. The indexed * value is a different BSON type, depending on the thread's id. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_base.js'); // for $config var $config = extendWorkload($config, @@ -41,12 +41,12 @@ var $config = extendWorkload($config, } var choices = [ - this.tid, // int + this.tid, // int this.tid.toString(), // string - this.tid * 0.0001, // float - {tid: this.tid}, // subdocument - makeOID(this.tid), // objectid - makeDate(this.tid), // date + this.tid * 0.0001, // float + {tid: this.tid}, // subdocument + makeOID(this.tid), // objectid + makeDate(this.tid), // date new Function('', 'return ' + this.tid + ';') // function ]; diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_heterogeneous_noindex.js b/jstests/concurrency/fsm_workloads/indexed_insert_heterogeneous_noindex.js index 56aac8ff2ca..f8adab70ffc 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_heterogeneous_noindex.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_heterogeneous_noindex.js @@ -5,8 +5,8 @@ * * Executes the indexed_insert_heterogeneous.js workload after dropping its index. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_heterogeneous.js'); // for $config -load('jstests/concurrency/fsm_workload_modifiers/indexed_noindex.js'); // for indexedNoindex +load('jstests/concurrency/fsm_workload_modifiers/indexed_noindex.js'); // for indexedNoindex var $config = extendWorkload($config, indexedNoindex); diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_large.js b/jstests/concurrency/fsm_workloads/indexed_insert_large.js index 50317368aa6..d7bedb22ade 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_large.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_large.js @@ -8,7 +8,7 @@ * value is a string large enough to make the whole index key be 1K, which is * the maximum. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_base.js'); // for $config var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_large_noindex.js b/jstests/concurrency/fsm_workloads/indexed_insert_large_noindex.js index 98c75cab734..625de8a387e 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_large_noindex.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_large_noindex.js @@ -5,8 +5,8 @@ * * Executes the indexed_insert_large.js workload after dropping its index. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/indexed_insert_large.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/indexed_insert_large.js'); // for $config load('jstests/concurrency/fsm_workload_modifiers/indexed_noindex.js'); // for indexedNoindex var $config = extendWorkload($config, indexedNoindex); diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_long_fieldname.js b/jstests/concurrency/fsm_workloads/indexed_insert_long_fieldname.js index 47867362aac..3c8c2f70223 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_long_fieldname.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_long_fieldname.js @@ -7,7 +7,7 @@ * documents appear in both a collection scan and an index scan. The indexed * field name is a long string. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_base.js'); // for $config var $config = extendWorkload($config, diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_long_fieldname_noindex.js b/jstests/concurrency/fsm_workloads/indexed_insert_long_fieldname_noindex.js index 4466d57efd0..f8960c40b4d 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_long_fieldname_noindex.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_long_fieldname_noindex.js @@ -5,8 +5,8 @@ * * Executes the indexed_insert_long_fieldname.js workload after dropping its index. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_long_fieldname.js'); // for $config -load('jstests/concurrency/fsm_workload_modifiers/indexed_noindex.js'); // for indexedNoindex +load('jstests/concurrency/fsm_workload_modifiers/indexed_noindex.js'); // for indexedNoindex var $config = extendWorkload($config, indexedNoindex); diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_multikey.js b/jstests/concurrency/fsm_workloads/indexed_insert_multikey.js index bff99ae85c7..e49b5356760 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_multikey.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_multikey.js @@ -7,7 +7,7 @@ * documents appear in both a collection scan and an index scan. The indexed * value is an array of numbers. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_base.js'); // for $config var $config = diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_multikey_noindex.js b/jstests/concurrency/fsm_workloads/indexed_insert_multikey_noindex.js index 9f8e491d2da..8995d209a47 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_multikey_noindex.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_multikey_noindex.js @@ -5,8 +5,8 @@ * * Executes the indexed_insert_multikey.js workload after dropping its index. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/indexed_insert_multikey.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/indexed_insert_multikey.js'); // for $config load('jstests/concurrency/fsm_workload_modifiers/indexed_noindex.js'); // for indexedNoindex var $config = extendWorkload($config, indexedNoindex); diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_ordered_bulk.js b/jstests/concurrency/fsm_workloads/indexed_insert_ordered_bulk.js index 17ffec0bb40..eb70c850488 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_ordered_bulk.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_ordered_bulk.js @@ -8,7 +8,7 @@ * * Uses an ordered, bulk operation to perform the inserts. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_base.js'); // for $config var $config = diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_text_multikey.js b/jstests/concurrency/fsm_workloads/indexed_insert_text_multikey.js index bacaff869e4..0cc7b590684 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_text_multikey.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_text_multikey.js @@ -5,7 +5,7 @@ * * like indexed_insert_text.js but the indexed value is an array of strings */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_text.js'); // for $config var $config = diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_unordered_bulk.js b/jstests/concurrency/fsm_workloads/indexed_insert_unordered_bulk.js index f1d00d7cf64..aa64e8d21e5 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_unordered_bulk.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_unordered_bulk.js @@ -8,7 +8,7 @@ * * Uses an unordered, bulk operation to perform the inserts. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_base.js'); // for $config var $config = diff --git a/jstests/concurrency/fsm_workloads/indexed_insert_upsert.js b/jstests/concurrency/fsm_workloads/indexed_insert_upsert.js index a3d0bd2c8cd..cc26d364ace 100644 --- a/jstests/concurrency/fsm_workloads/indexed_insert_upsert.js +++ b/jstests/concurrency/fsm_workloads/indexed_insert_upsert.js @@ -10,7 +10,7 @@ * Instead of inserting via coll.insert(), this workload inserts using an * upsert. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_base.js'); // for $config var $config = extendWorkload( @@ -30,7 +30,7 @@ var $config = extendWorkload( $config.states.insert = function insert(db, collName) { var doc = this.getDoc(); doc.counter = this.counter++; // ensure doc is unique to guarantee an upsert occurs - doc._id = new ObjectId(); // _id is required for shard targeting + doc._id = new ObjectId(); // _id is required for shard targeting var res = db[collName].update(doc, {$inc: {unused: 0}}, {upsert: true}); assertAlways.eq(0, res.nMatched, tojson(res)); diff --git a/jstests/concurrency/fsm_workloads/map_reduce_merge.js b/jstests/concurrency/fsm_workloads/map_reduce_merge.js index fd892dc72d9..4f96c229eac 100644 --- a/jstests/concurrency/fsm_workloads/map_reduce_merge.js +++ b/jstests/concurrency/fsm_workloads/map_reduce_merge.js @@ -13,7 +13,7 @@ * * Writes the results of each thread to the same collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/map_reduce_inline.js'); // for $config var $config = diff --git a/jstests/concurrency/fsm_workloads/map_reduce_merge_nonatomic.js b/jstests/concurrency/fsm_workloads/map_reduce_merge_nonatomic.js index 49897e20548..8f0804b365d 100644 --- a/jstests/concurrency/fsm_workloads/map_reduce_merge_nonatomic.js +++ b/jstests/concurrency/fsm_workloads/map_reduce_merge_nonatomic.js @@ -13,7 +13,7 @@ * * Specifies nonAtomic=true. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/map_reduce_inline.js'); // for $config load('jstests/concurrency/fsm_workload_helpers/drop_utils.js'); // for dropDatabases diff --git a/jstests/concurrency/fsm_workloads/map_reduce_reduce.js b/jstests/concurrency/fsm_workloads/map_reduce_reduce.js index 7f6ff6d535f..b11ccf3614f 100644 --- a/jstests/concurrency/fsm_workloads/map_reduce_reduce.js +++ b/jstests/concurrency/fsm_workloads/map_reduce_reduce.js @@ -11,7 +11,7 @@ * Uses the "reduce" action to combine the results with the contents * of the output collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/map_reduce_inline.js'); // for $config load('jstests/concurrency/fsm_workload_helpers/drop_utils.js'); // for dropCollections diff --git a/jstests/concurrency/fsm_workloads/map_reduce_reduce_nonatomic.js b/jstests/concurrency/fsm_workloads/map_reduce_reduce_nonatomic.js index b566f9db39f..5953c7c2a07 100644 --- a/jstests/concurrency/fsm_workloads/map_reduce_reduce_nonatomic.js +++ b/jstests/concurrency/fsm_workloads/map_reduce_reduce_nonatomic.js @@ -14,7 +14,7 @@ * Specifies nonAtomic=true and writes the results of each thread to * the same collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/map_reduce_inline.js'); // for $config var $config = diff --git a/jstests/concurrency/fsm_workloads/map_reduce_replace.js b/jstests/concurrency/fsm_workloads/map_reduce_replace.js index 4f22bd225b2..ce268bf5e20 100644 --- a/jstests/concurrency/fsm_workloads/map_reduce_replace.js +++ b/jstests/concurrency/fsm_workloads/map_reduce_replace.js @@ -11,7 +11,7 @@ * Uses the "replace" action to overwrite the entire contents of the * collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/map_reduce_inline.js'); // for $config load('jstests/concurrency/fsm_workload_helpers/drop_utils.js'); // for dropCollections diff --git a/jstests/concurrency/fsm_workloads/map_reduce_replace_nonexistent.js b/jstests/concurrency/fsm_workloads/map_reduce_replace_nonexistent.js index 3ee8af21409..3bfdb6086de 100644 --- a/jstests/concurrency/fsm_workloads/map_reduce_replace_nonexistent.js +++ b/jstests/concurrency/fsm_workloads/map_reduce_replace_nonexistent.js @@ -10,7 +10,7 @@ * Uses the "replace" action to write the results to a nonexistent * output collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/map_reduce_inline.js'); // for $config load('jstests/concurrency/fsm_workload_helpers/drop_utils.js'); // for dropCollections diff --git a/jstests/concurrency/fsm_workloads/reindex.js b/jstests/concurrency/fsm_workloads/reindex.js index 7d71e4ea7be..cec33eddc5f 100644 --- a/jstests/concurrency/fsm_workloads/reindex.js +++ b/jstests/concurrency/fsm_workloads/reindex.js @@ -13,7 +13,7 @@ var $config = (function() { var data = { nIndexes: 3 + 1, // 3 created and 1 for _id nDocumentsToInsert: 1000, - maxInteger: 100, // Used for document values. Must be a factor of nDocumentsToInsert + maxInteger: 100, // Used for document values. Must be a factor of nDocumentsToInsert prefix: 'reindex' // Use filename for prefix because filename is assumed unique }; diff --git a/jstests/concurrency/fsm_workloads/reindex_background.js b/jstests/concurrency/fsm_workloads/reindex_background.js index 7a5c25679f1..22db164ae6a 100644 --- a/jstests/concurrency/fsm_workloads/reindex_background.js +++ b/jstests/concurrency/fsm_workloads/reindex_background.js @@ -10,7 +10,7 @@ */ load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/reindex.js'); // for $config +load('jstests/concurrency/fsm_workloads/reindex.js'); // for $config var $config = extendWorkload($config, diff --git a/jstests/concurrency/fsm_workloads/remove_single_document_eval.js b/jstests/concurrency/fsm_workloads/remove_single_document_eval.js index 97dca4e242f..e90eaa63114 100644 --- a/jstests/concurrency/fsm_workloads/remove_single_document_eval.js +++ b/jstests/concurrency/fsm_workloads/remove_single_document_eval.js @@ -5,7 +5,7 @@ * * Runs remove_single_document using the eval command. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/remove_single_document.js'); // for $config var $config = diff --git a/jstests/concurrency/fsm_workloads/remove_single_document_eval_nolock.js b/jstests/concurrency/fsm_workloads/remove_single_document_eval_nolock.js index 1663f808fdb..e88868c3345 100644 --- a/jstests/concurrency/fsm_workloads/remove_single_document_eval_nolock.js +++ b/jstests/concurrency/fsm_workloads/remove_single_document_eval_nolock.js @@ -5,7 +5,7 @@ * * Runs remove_single_document_eval with the eval option { nolock: true }. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/remove_single_document_eval.js'); // for $config var $config = extendWorkload($config, diff --git a/jstests/concurrency/fsm_workloads/remove_where.js b/jstests/concurrency/fsm_workloads/remove_where.js index 0ef4f3d9931..36e228ebd9a 100644 --- a/jstests/concurrency/fsm_workloads/remove_where.js +++ b/jstests/concurrency/fsm_workloads/remove_where.js @@ -8,7 +8,7 @@ * counts. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_where.js'); // for $config var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/touch_base.js b/jstests/concurrency/fsm_workloads/touch_base.js index 6e2cce202ed..df419e17db7 100644 --- a/jstests/concurrency/fsm_workloads/touch_base.js +++ b/jstests/concurrency/fsm_workloads/touch_base.js @@ -7,7 +7,7 @@ * and queries to verify the number of documents inserted by the thread. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_where.js'); // for $config // For isMongod, isMMAPv1, and isEphemeral. load('jstests/concurrency/fsm_workload_helpers/server_types.js'); diff --git a/jstests/concurrency/fsm_workloads/update_array_noindex.js b/jstests/concurrency/fsm_workloads/update_array_noindex.js index 2e99c5a709b..c6111684380 100644 --- a/jstests/concurrency/fsm_workloads/update_array_noindex.js +++ b/jstests/concurrency/fsm_workloads/update_array_noindex.js @@ -6,8 +6,8 @@ * Executes the update_array.js workload after dropping all non-_id indexes on * the collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/update_array.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/update_array.js'); // for $config load('jstests/concurrency/fsm_workload_modifiers/drop_all_indexes.js'); // for dropAllIndexes var $config = extendWorkload($config, dropAllIndexes); diff --git a/jstests/concurrency/fsm_workloads/update_inc_capped.js b/jstests/concurrency/fsm_workloads/update_inc_capped.js index 19588195f07..146c1f83fe9 100644 --- a/jstests/concurrency/fsm_workloads/update_inc_capped.js +++ b/jstests/concurrency/fsm_workloads/update_inc_capped.js @@ -5,8 +5,8 @@ * * Executes the update_inc.js workload on a capped collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/update_inc.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/update_inc.js'); // for $config load('jstests/concurrency/fsm_workload_modifiers/make_capped.js'); // for makeCapped var $config = extendWorkload($config, makeCapped); diff --git a/jstests/concurrency/fsm_workloads/update_multifield_isolated_multiupdate.js b/jstests/concurrency/fsm_workloads/update_multifield_isolated_multiupdate.js index a8debf271e7..3dd8b584d98 100644 --- a/jstests/concurrency/fsm_workloads/update_multifield_isolated_multiupdate.js +++ b/jstests/concurrency/fsm_workloads/update_multifield_isolated_multiupdate.js @@ -6,7 +6,7 @@ * Does updates that affect multiple fields on multiple documents, using $isolated. * The collection has an index for each field, and a multikey index for all fields. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/update_multifield.js'); // for $config var $config = diff --git a/jstests/concurrency/fsm_workloads/update_multifield_multiupdate.js b/jstests/concurrency/fsm_workloads/update_multifield_multiupdate.js index 46532c8db47..799d104a323 100644 --- a/jstests/concurrency/fsm_workloads/update_multifield_multiupdate.js +++ b/jstests/concurrency/fsm_workloads/update_multifield_multiupdate.js @@ -6,8 +6,8 @@ * Does updates that affect multiple fields on multiple documents. * The collection has an index for each field, and a multikey index for all fields. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/update_multifield.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/update_multifield.js'); // for $config load('jstests/concurrency/fsm_workload_helpers/server_types.js'); // for isMongod and isMMAPv1 var $config = diff --git a/jstests/concurrency/fsm_workloads/update_multifield_multiupdate_noindex.js b/jstests/concurrency/fsm_workloads/update_multifield_multiupdate_noindex.js index f2739e329dd..ea0cc4fc4b7 100644 --- a/jstests/concurrency/fsm_workloads/update_multifield_multiupdate_noindex.js +++ b/jstests/concurrency/fsm_workloads/update_multifield_multiupdate_noindex.js @@ -6,8 +6,8 @@ * Executes the update_multifield_multiupdate.js workload after dropping all * non-_id indexes on the collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/update_multifield_multiupdate.js'); // for $config -load('jstests/concurrency/fsm_workload_modifiers/drop_all_indexes.js'); // for dropAllIndexes +load('jstests/concurrency/fsm_workload_modifiers/drop_all_indexes.js'); // for dropAllIndexes var $config = extendWorkload($config, dropAllIndexes); diff --git a/jstests/concurrency/fsm_workloads/update_multifield_noindex.js b/jstests/concurrency/fsm_workloads/update_multifield_noindex.js index 22b230d7c9e..0f15037c568 100644 --- a/jstests/concurrency/fsm_workloads/update_multifield_noindex.js +++ b/jstests/concurrency/fsm_workloads/update_multifield_noindex.js @@ -6,8 +6,8 @@ * Executes the update_multifield.js workload after dropping all non-_id indexes * on the collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/update_multifield.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/update_multifield.js'); // for $config load('jstests/concurrency/fsm_workload_modifiers/drop_all_indexes.js'); // for dropAllIndexes var $config = extendWorkload($config, dropAllIndexes); diff --git a/jstests/concurrency/fsm_workloads/update_rename_noindex.js b/jstests/concurrency/fsm_workloads/update_rename_noindex.js index 0bcb0cd9145..96af5a8f1cc 100644 --- a/jstests/concurrency/fsm_workloads/update_rename_noindex.js +++ b/jstests/concurrency/fsm_workloads/update_rename_noindex.js @@ -6,8 +6,8 @@ * Executes the update_rename.js workload after dropping all non-_id indexes on * the collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/update_rename.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/update_rename.js'); // for $config load('jstests/concurrency/fsm_workload_modifiers/drop_all_indexes.js'); // for dropAllIndexes var $config = extendWorkload($config, dropAllIndexes); diff --git a/jstests/concurrency/fsm_workloads/update_replace_noindex.js b/jstests/concurrency/fsm_workloads/update_replace_noindex.js index a10323fb455..14dc0b16e2a 100644 --- a/jstests/concurrency/fsm_workloads/update_replace_noindex.js +++ b/jstests/concurrency/fsm_workloads/update_replace_noindex.js @@ -6,8 +6,8 @@ * Executes the update_replace.js workload after dropping all non-_id indexes * on the collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/update_replace.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/update_replace.js'); // for $config load('jstests/concurrency/fsm_workload_modifiers/drop_all_indexes.js'); // for dropAllIndexes var $config = extendWorkload($config, dropAllIndexes); diff --git a/jstests/concurrency/fsm_workloads/update_simple_eval.js b/jstests/concurrency/fsm_workloads/update_simple_eval.js index cf2b10f897a..988c2e44ab3 100644 --- a/jstests/concurrency/fsm_workloads/update_simple_eval.js +++ b/jstests/concurrency/fsm_workloads/update_simple_eval.js @@ -9,7 +9,7 @@ * - what value to $set the field to * and then applies the update using db.runCommand({ eval: ... }) */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/update_simple.js'); // for $config var $config = diff --git a/jstests/concurrency/fsm_workloads/update_simple_eval_nolock.js b/jstests/concurrency/fsm_workloads/update_simple_eval_nolock.js index 87e24965a7a..282e98a461d 100644 --- a/jstests/concurrency/fsm_workloads/update_simple_eval_nolock.js +++ b/jstests/concurrency/fsm_workloads/update_simple_eval_nolock.js @@ -5,7 +5,7 @@ * * Runs update_simple_eval with the eval option { nolock: true }. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/update_simple_eval.js'); // for $config var $config = extendWorkload($config, diff --git a/jstests/concurrency/fsm_workloads/update_simple_noindex.js b/jstests/concurrency/fsm_workloads/update_simple_noindex.js index 65bad2855ab..f255967b614 100644 --- a/jstests/concurrency/fsm_workloads/update_simple_noindex.js +++ b/jstests/concurrency/fsm_workloads/update_simple_noindex.js @@ -6,8 +6,8 @@ * Executes the update_simple.js workload after dropping all non-_id indexes on * the collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/update_simple.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/update_simple.js'); // for $config load('jstests/concurrency/fsm_workload_modifiers/drop_all_indexes.js'); // for dropAllIndexes var $config = extendWorkload($config, dropAllIndexes); diff --git a/jstests/concurrency/fsm_workloads/update_upsert_multi_noindex.js b/jstests/concurrency/fsm_workloads/update_upsert_multi_noindex.js index 14b6c02d61f..9841a2e91af 100644 --- a/jstests/concurrency/fsm_workloads/update_upsert_multi_noindex.js +++ b/jstests/concurrency/fsm_workloads/update_upsert_multi_noindex.js @@ -6,8 +6,8 @@ * Executes the update_upsert_multi.js workload after dropping all non-_id * indexes on the collection. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/update_upsert_multi.js'); // for $config +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_workloads/update_upsert_multi.js'); // for $config load('jstests/concurrency/fsm_workload_modifiers/drop_all_indexes.js'); // for dropAllIndexes var $config = extendWorkload($config, dropAllIndexes); diff --git a/jstests/concurrency/fsm_workloads/update_where.js b/jstests/concurrency/fsm_workloads/update_where.js index ac0bb893160..d7ef045131f 100644 --- a/jstests/concurrency/fsm_workloads/update_where.js +++ b/jstests/concurrency/fsm_workloads/update_where.js @@ -7,7 +7,7 @@ * thread and updates them. Also queries by the thread that created the documents to verify counts. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_where.js'); // for $config var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/upsert_where.js b/jstests/concurrency/fsm_workloads/upsert_where.js index 35430ccfa2f..72ff542c572 100644 --- a/jstests/concurrency/fsm_workloads/upsert_where.js +++ b/jstests/concurrency/fsm_workloads/upsert_where.js @@ -6,7 +6,7 @@ * Bulk inserts documents in batches of 100, randomly selects a document that doesn't exist and * updates it, and queries by the thread that created the documents to verify counts. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/indexed_insert_where.js'); // for $config var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/yield_and_hashed.js b/jstests/concurrency/fsm_workloads/yield_and_hashed.js index d0eef4c8d4f..48a14d706e2 100644 --- a/jstests/concurrency/fsm_workloads/yield_and_hashed.js +++ b/jstests/concurrency/fsm_workloads/yield_and_hashed.js @@ -6,7 +6,7 @@ * Intersperse queries which use the AND_HASH stage with updates and deletes of documents they may * match. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/yield_rooted_or.js'); // for $config var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/yield_and_sorted.js b/jstests/concurrency/fsm_workloads/yield_and_sorted.js index 42bd94b4acd..2bea4226ba0 100644 --- a/jstests/concurrency/fsm_workloads/yield_and_sorted.js +++ b/jstests/concurrency/fsm_workloads/yield_and_sorted.js @@ -6,7 +6,7 @@ * Intersperse queries which use the AND_SORTED stage with updates and deletes of documents they * may match. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/yield_rooted_or.js'); // for $config var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/yield_fetch.js b/jstests/concurrency/fsm_workloads/yield_fetch.js index 0e1073f774a..b3f47a5fe5d 100644 --- a/jstests/concurrency/fsm_workloads/yield_fetch.js +++ b/jstests/concurrency/fsm_workloads/yield_fetch.js @@ -6,7 +6,7 @@ * Intersperse queries which use the FETCH stage with updates and deletes of documents they may * match. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/yield_rooted_or.js'); // for $config var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/yield_geo_near.js b/jstests/concurrency/fsm_workloads/yield_geo_near.js index 324c384636e..fd13bd31014 100644 --- a/jstests/concurrency/fsm_workloads/yield_geo_near.js +++ b/jstests/concurrency/fsm_workloads/yield_geo_near.js @@ -6,7 +6,7 @@ * Intersperse geo $near queries with updates and deletes of documents they may match. */ load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/yield.js'); // for $config +load('jstests/concurrency/fsm_workloads/yield.js'); // for $config var $config = extendWorkload( $config, diff --git a/jstests/concurrency/fsm_workloads/yield_geo_near_dedup.js b/jstests/concurrency/fsm_workloads/yield_geo_near_dedup.js index e2a63f8c546..5efd9cf7242 100644 --- a/jstests/concurrency/fsm_workloads/yield_geo_near_dedup.js +++ b/jstests/concurrency/fsm_workloads/yield_geo_near_dedup.js @@ -5,7 +5,7 @@ * * Intersperse geo $near queries with updates of non-geo fields to test deduplication. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/yield_geo_near.js'); // for $config var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/yield_id_hack.js b/jstests/concurrency/fsm_workloads/yield_id_hack.js index 81a5acbb0fd..0d50eb7d350 100644 --- a/jstests/concurrency/fsm_workloads/yield_id_hack.js +++ b/jstests/concurrency/fsm_workloads/yield_id_hack.js @@ -7,7 +7,7 @@ * match. */ load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/yield.js'); // for $config +load('jstests/concurrency/fsm_workloads/yield.js'); // for $config var $config = extendWorkload($config, function($config, $super) { diff --git a/jstests/concurrency/fsm_workloads/yield_rooted_or.js b/jstests/concurrency/fsm_workloads/yield_rooted_or.js index 4f8415b4fb0..f7dd0dcffdf 100644 --- a/jstests/concurrency/fsm_workloads/yield_rooted_or.js +++ b/jstests/concurrency/fsm_workloads/yield_rooted_or.js @@ -8,7 +8,7 @@ * Other workloads that need an index on c and d can inherit from this. */ load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/yield.js'); // for $config +load('jstests/concurrency/fsm_workloads/yield.js'); // for $config var $config = extendWorkload($config, diff --git a/jstests/concurrency/fsm_workloads/yield_sort.js b/jstests/concurrency/fsm_workloads/yield_sort.js index 628314fd36b..d0d905177f8 100644 --- a/jstests/concurrency/fsm_workloads/yield_sort.js +++ b/jstests/concurrency/fsm_workloads/yield_sort.js @@ -6,7 +6,7 @@ * Intersperse queries which use the SORT stage with updates and deletes of documents they may * match. */ -load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload +load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload load('jstests/concurrency/fsm_workloads/yield_sort_merge.js'); // for $config var $config = extendWorkload( diff --git a/jstests/concurrency/fsm_workloads/yield_sort_merge.js b/jstests/concurrency/fsm_workloads/yield_sort_merge.js index ee63b0d8298..c46163df492 100644 --- a/jstests/concurrency/fsm_workloads/yield_sort_merge.js +++ b/jstests/concurrency/fsm_workloads/yield_sort_merge.js @@ -8,7 +8,7 @@ * Other workloads that need an index { a: 1, b: 1 } can extend this */ load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/yield.js'); // for $config +load('jstests/concurrency/fsm_workloads/yield.js'); // for $config var $config = extendWorkload( $config, diff --git a/jstests/concurrency/fsm_workloads/yield_text.js b/jstests/concurrency/fsm_workloads/yield_text.js index 67d7c618319..9291c25e527 100644 --- a/jstests/concurrency/fsm_workloads/yield_text.js +++ b/jstests/concurrency/fsm_workloads/yield_text.js @@ -7,7 +7,7 @@ * match. */ load('jstests/concurrency/fsm_libs/extend_workload.js'); // for extendWorkload -load('jstests/concurrency/fsm_workloads/yield.js'); // for $config +load('jstests/concurrency/fsm_workloads/yield.js'); // for $config var $config = extendWorkload( $config, diff --git a/jstests/core/bulk_insert_capped.js b/jstests/core/bulk_insert_capped.js index 129c393dbfb..b7d21cee3f5 100644 --- a/jstests/core/bulk_insert_capped.js +++ b/jstests/core/bulk_insert_capped.js @@ -15,9 +15,9 @@ assert(res.valid, tojson(res)); // Ensure that various ways of iterating the collection only return one document. - assert.eq(t.find().itcount(), 1); // Table scan. + assert.eq(t.find().itcount(), 1); // Table scan. assert.eq(t.find({}, {_id: 1}).hint({_id: 1}).itcount(), 1); // Index only (covered). - assert.eq(t.find().hint({_id: 1}).itcount(), 1); // Index scan with fetch. + assert.eq(t.find().hint({_id: 1}).itcount(), 1); // Index scan with fetch. // Ensure that the second document is the one that is kept. assert.eq(t.findOne(), {_id: 2}); diff --git a/jstests/core/capped5.js b/jstests/core/capped5.js index 33d78c5e17f..930cbabb462 100644 --- a/jstests/core/capped5.js +++ b/jstests/core/capped5.js @@ -19,7 +19,7 @@ t.drop(); db.createCollection(tn, {capped: true, size: 1024 * 1024 * 1}); t.insert({_id: 5, x: 11}); t.insert({_id: 5, x: 12}); -assert.eq(1, t.getIndexes().length); // now we assume _id index +assert.eq(1, t.getIndexes().length); // now we assume _id index assert.eq(1, t.find().toArray().length); //_id index unique, so second insert fails t.drop(); diff --git a/jstests/core/counta.js b/jstests/core/counta.js index 0762769a088..26ce4d2269e 100644 --- a/jstests/core/counta.js +++ b/jstests/core/counta.js @@ -1,29 +1,29 @@ // Check that count returns 0 in some exception cases. (function() { -'use strict'; + 'use strict'; -var t = db.jstests_counta; -t.drop(); + var t = db.jstests_counta; + t.drop(); -for (var i = 0; i < 10; ++i) { - t.save({a: i}); -} + for (var i = 0; i < 10; ++i) { + t.save({a: i}); + } -// f() is undefined, causing an assertion -assert.throws(function() { - t.count({ - $where: function() { - if (this.a < 5) { - return true; - } else { - f(); + // f() is undefined, causing an assertion + assert.throws(function() { + t.count({ + $where: function() { + if (this.a < 5) { + return true; + } else { + f(); + } } - } + }); }); -}); -// count must return error if collection name is absent -var res = assert.commandFailed(db.runCommand("count")); -assert.eq(ErrorCodes.InvalidNamespace, res.code); + // count must return error if collection name is absent + var res = assert.commandFailed(db.runCommand("count")); + assert.eq(ErrorCodes.InvalidNamespace, res.code); })(); diff --git a/jstests/core/countc.js b/jstests/core/countc.js index ea4aed54903..39a297ec8c4 100644 --- a/jstests/core/countc.js +++ b/jstests/core/countc.js @@ -28,7 +28,7 @@ assert.eq(2, t.count({a: {$in: vals}})); t.drop(); t.ensureIndex({a: 1}); t.save({a: [1, 2]}); // Will match because 'a' is in range. -t.save({a: 9}); // Will not match because 'a' is not in range. +t.save({a: 9}); // Will not match because 'a' is not in range. // Only one document matches. assert.eq(1, t.count({a: {$gt: 0, $lt: 5}})); @@ -43,7 +43,7 @@ assert.eq(0, t.count({'a.b': 2, 'a.c': 2})); t.drop(); t.ensureIndex({a: 1}); t.save({a: 'a'}); // Will match. -t.save({a: {}}); // Will not match because {} is not a string. +t.save({a: {}}); // Will not match because {} is not a string. // Only one document matches. assert.eq(1, t.count({a: {$gte: ''}})); @@ -51,7 +51,7 @@ assert.eq(1, t.count({a: {$gte: ''}})); t.drop(); t.ensureIndex({a: 1}); t.save({a: new Date(1)}); // Will match. -t.save({a: true}); // Will not match because 'true' is not a date. +t.save({a: true}); // Will not match because 'true' is not a date. // Only one document matches. assert.eq(1, t.count({a: {$lte: new Date(1)}})); diff --git a/jstests/core/drop3.js b/jstests/core/drop3.js index 1215d218e4f..a3807faab12 100644 --- a/jstests/core/drop3.js +++ b/jstests/core/drop3.js @@ -21,5 +21,5 @@ t.drop(); // should invalidate cursor, but not subcursor assert.throws(function() { cursor.itcount(); -}); // throws "cursor doesn't exist on server" error on getMore +}); // throws "cursor doesn't exist on server" error on getMore assert.eq(subcursor.itcount(), 9); // one already seen diff --git a/jstests/core/existsa.js b/jstests/core/existsa.js index 466a5e94a63..45adb5d3172 100644 --- a/jstests/core/existsa.js +++ b/jstests/core/existsa.js @@ -85,7 +85,7 @@ assertExistsUnindexed({a: {$not: {$exists: false}}}); // Nested $exists queries disallow the sparse index in some cases where it is not strictly // necessary to do so. (Descriptive tests.) assertExistsUnindexed({$nor: [{b: {$exists: false}}]}, 1); // Unindexed field. -assertExists({$or: [{a: {$exists: true}}]}); // $exists:true not $exists:false. +assertExists({$or: [{a: {$exists: true}}]}); // $exists:true not $exists:false. // Behavior is similar with $elemMatch. t.drop(); diff --git a/jstests/core/explain_distinct.js b/jstests/core/explain_distinct.js index 37d5a485516..bc5b3635c95 100644 --- a/jstests/core/explain_distinct.js +++ b/jstests/core/explain_distinct.js @@ -35,10 +35,10 @@ assert.writeOK(coll.insert({a: 2, c: 1})); } - assert.commandFailed(runDistinctExplain(coll, {}, {})); // Bad keyString. - assert.commandFailed(runDistinctExplain(coll, 'a', 'a')); // Bad query. - assert.commandFailed(runDistinctExplain(coll, 'b', {$not: 1})); // Bad query. - assert.commandFailed(runDistinctExplain(coll, 'a', {$not: 1})); // Bad query. + assert.commandFailed(runDistinctExplain(coll, {}, {})); // Bad keyString. + assert.commandFailed(runDistinctExplain(coll, 'a', 'a')); // Bad query. + assert.commandFailed(runDistinctExplain(coll, 'b', {$not: 1})); // Bad query. + assert.commandFailed(runDistinctExplain(coll, 'a', {$not: 1})); // Bad query. assert.commandFailed(runDistinctExplain(coll, '_id', {$not: 1})); // Bad query. // Ensure that server accepts a distinct command with no 'query' field. diff --git a/jstests/core/geo_center_sphere2.js b/jstests/core/geo_center_sphere2.js index ac8f09cbe77..f3dc465e350 100644 --- a/jstests/core/geo_center_sphere2.js +++ b/jstests/core/geo_center_sphere2.js @@ -31,7 +31,7 @@ for (var test = 0; test < numTests; test++) { Random.srand(1337 + test); var radius = 5000 * Random.rand(); // km - radius = radius / 6378.1; // radians; earth radius from geoconstants.h + radius = radius / 6378.1; // radians; earth radius from geoconstants.h var numDocs = Math.floor(400 * Random.rand()); // TODO: Wrapping uses the error value to figure out what would overlap... var bits = Math.floor(5 + Random.rand() * 28); diff --git a/jstests/core/geo_polygon1.js b/jstests/core/geo_polygon1.js index 487df91a167..d1dbf0c19dc 100644 --- a/jstests/core/geo_polygon1.js +++ b/jstests/core/geo_polygon1.js @@ -58,8 +58,8 @@ assert.commandWorked(t.ensureIndex({loc: "2d"})); assert.eq(1, t.find({loc: {$within: {$polygon: pacman}}}).count(), "Pacman single point"); -t.save({loc: [5, 3]}); // Add a point that's out right in the mouth opening -t.save({loc: [3, 7]}); // Add a point above the center of the head +t.save({loc: [5, 3]}); // Add a point that's out right in the mouth opening +t.save({loc: [3, 7]}); // Add a point above the center of the head t.save({loc: [3, -1]}); // Add a point below the center of the bottom assert.eq(1, t.find({loc: {$within: {$polygon: pacman}}}).count(), "Pacman double point"); diff --git a/jstests/core/geo_polygon1_noindex.js b/jstests/core/geo_polygon1_noindex.js index 672f53ebd90..22f90e7157c 100644 --- a/jstests/core/geo_polygon1_noindex.js +++ b/jstests/core/geo_polygon1_noindex.js @@ -53,8 +53,8 @@ assert.writeOK(t.save({loc: [1, 3]})); // Add a point that's in assert.eq(1, t.find({loc: {$within: {$polygon: pacman}}}).count(), "Pacman single point"); -t.save({loc: [5, 3]}); // Add a point that's out right in the mouth opening -t.save({loc: [3, 7]}); // Add a point above the center of the head +t.save({loc: [5, 3]}); // Add a point that's out right in the mouth opening +t.save({loc: [3, 7]}); // Add a point above the center of the head t.save({loc: [3, -1]}); // Add a point below the center of the bottom assert.eq(1, t.find({loc: {$within: {$polygon: pacman}}}).count(), "Pacman double point"); diff --git a/jstests/core/geo_polygon3.js b/jstests/core/geo_polygon3.js index 887e81701cd..ed8f040fa8d 100644 --- a/jstests/core/geo_polygon3.js +++ b/jstests/core/geo_polygon3.js @@ -56,8 +56,8 @@ for (var n = 0; n < numTests; n++) { assert.eq(1, t.find({loc: {$within: {$polygon: pacman}}}).itcount(), "Pacman single point"); - t.save({loc: [5, 3]}); // Add a point that's out right in the mouth opening - t.save({loc: [3, 7]}); // Add a point above the center of the head + t.save({loc: [5, 3]}); // Add a point that's out right in the mouth opening + t.save({loc: [3, 7]}); // Add a point above the center of the head t.save({loc: [3, -1]}); // Add a point below the center of the bottom assert.eq(1, t.find({loc: {$within: {$polygon: pacman}}}).itcount(), "Pacman double point"); diff --git a/jstests/core/index_bigkeys_update.js b/jstests/core/index_bigkeys_update.js index a3074bfdfdd..73be9c77b94 100644 --- a/jstests/core/index_bigkeys_update.js +++ b/jstests/core/index_bigkeys_update.js @@ -14,5 +14,5 @@ assert.eq(1, t.count()); assert.writeError(t.update({}, {$set: {x: bigString}})); assert.eq(1, t.count()); -assert.eq("asd", t.findOne().x); // make sure doc is the old version +assert.eq("asd", t.findOne().x); // make sure doc is the old version assert.eq("asd", t.findOne({_id: 0}).x); // make sure doc is the old version diff --git a/jstests/core/index_plugins.js b/jstests/core/index_plugins.js index f32e1e4345a..0ec565d1d50 100644 --- a/jstests/core/index_plugins.js +++ b/jstests/core/index_plugins.js @@ -46,7 +46,7 @@ coll.dropIndexes(); assert.commandWorked(coll.ensureIndex({a: "text", b: "text"})); coll.dropIndexes(); -assert.commandFailed(coll.ensureIndex({a: "2d", b: "2d"})); // unsupported +assert.commandFailed(coll.ensureIndex({a: "2d", b: "2d"})); // unsupported assert.commandFailed(coll.ensureIndex({a: "geoHaystack", b: "geoHaystack"}, // unsupported {bucketSize: 1})); @@ -54,9 +54,9 @@ assert.commandFailed(coll.ensureIndex({a: "hashed", b: "hashed"})); // unsuppor // Test compounding different special index types with each other. -assert.commandFailed(coll.ensureIndex({a: "2d", b: "hashed"})); // unsupported -assert.commandFailed(coll.ensureIndex({a: "hashed", b: "2dsphere"})); // unsupported -assert.commandFailed(coll.ensureIndex({a: "2dsphere", b: "text"})); // unsupported +assert.commandFailed(coll.ensureIndex({a: "2d", b: "hashed"})); // unsupported +assert.commandFailed(coll.ensureIndex({a: "hashed", b: "2dsphere"})); // unsupported +assert.commandFailed(coll.ensureIndex({a: "2dsphere", b: "text"})); // unsupported assert.commandFailed(coll.ensureIndex({a: "text", b: "geoHaystack"})); // unsupported -assert.commandFailed(coll.ensureIndex({a: "geoHaystack", b: "2d"}, // unsupported +assert.commandFailed(coll.ensureIndex({a: "geoHaystack", b: "2d"}, // unsupported {bucketSize: 1})); diff --git a/jstests/core/indexp.js b/jstests/core/indexp.js index 0111f0cca35..71e9f8eadb9 100644 --- a/jstests/core/indexp.js +++ b/jstests/core/indexp.js @@ -17,7 +17,7 @@ assert.commandWorked(coll.ensureIndex({'a.b': 1})); assert.commandFailed(coll.ensureIndex({'$a': 1})); assert.commandFailed(coll.ensureIndex({'a.$b': 1})); assert.commandFailed(coll.ensureIndex({'$db': 1})); -assert.commandWorked(coll.ensureIndex({'a$ap': 1})); // $ in middle is ok +assert.commandWorked(coll.ensureIndex({'a$ap': 1})); // $ in middle is ok assert.commandWorked(coll.ensureIndex({'a.$id': 1})); // $id/$db/$ref are execptions coll.dropIndexes(); diff --git a/jstests/core/max_time_ms.js b/jstests/core/max_time_ms.js index e70ae8cb0fe..7762c8a439c 100644 --- a/jstests/core/max_time_ms.js +++ b/jstests/core/max_time_ms.js @@ -48,7 +48,7 @@ assert.doesNotThrow(function() { // t.drop(); -t.insert([{}, {}, {}]); // fast batch +t.insert([{}, {}, {}]); // fast batch t.insert([{slow: true}, {slow: true}, {slow: true}]); // slow batch cursor = t.find({ $where: function() { @@ -79,7 +79,7 @@ assert.throws(function() { // t.drop(); -t.insert([{}, {}, {}]); // fast batch +t.insert([{}, {}, {}]); // fast batch t.insert([{}, {}, {slow: true}]); // slow batch cursor = t.find({ $where: function() { @@ -348,7 +348,7 @@ assert.eq(1, t.getDB().adminCommand({configureFailPoint: "maxTimeAlwaysTimeOut", // maxTimeNeverTimeOut positive test for getmore. t.drop(); -t.insert([{}, {}, {}]); // fast batch +t.insert([{}, {}, {}]); // fast batch t.insert([{slow: true}, {slow: true}, {slow: true}]); // slow batch cursor = t.find({ $where: function() { diff --git a/jstests/core/mr_killop.js b/jstests/core/mr_killop.js index c4d8b666f11..52865eacbe4 100644 --- a/jstests/core/mr_killop.js +++ b/jstests/core/mr_killop.js @@ -111,7 +111,7 @@ function runMRTests(loop, childLoop) { test(loop, // map function(k, v) { return v[0]; - }, // reduce + }, // reduce null, // finalize null, // scope childLoop); @@ -120,7 +120,7 @@ function runMRTests(loop, childLoop) { test( function() { emit(this.a, 1); - }, // map + }, // map loop, // reduce null, // finalize null, // scope @@ -133,8 +133,8 @@ function runMRTests(loop, childLoop) { }, // map function(k, v) { return v[0]; - }, // reduce - null, // finalize + }, // reduce + null, // finalize {loop: loop}, // scope childLoop); } @@ -148,7 +148,7 @@ function runFinalizeTests(loop, childLoop) { }, // map function(k, v) { return v[0]; - }, // reduce + }, // reduce loop, // finalize null, // scope childLoop); @@ -163,7 +163,7 @@ function runFinalizeTests(loop, childLoop) { }, // reduce function(a, b) { loop(); - }, // finalize + }, // finalize {loop: loop}, // scope childLoop); } diff --git a/jstests/core/regex5.js b/jstests/core/regex5.js index 36274f8b3ca..6d11fce5578 100644 --- a/jstests/core/regex5.js +++ b/jstests/core/regex5.js @@ -18,17 +18,17 @@ doit = function() { assert.eq(1, t.find({x: a}).count(), "A"); assert.eq(2, t.find({x: x}).count(), "B"); - assert.eq(2, t.find({x: {$in: [x]}}).count(), "C"); // SERVER-322 - assert.eq(1, t.find({x: {$in: [a, "xyz1"]}}).count(), "D"); // SERVER-322 - assert.eq(2, t.find({x: {$in: [a, "xyz2"]}}).count(), "E"); // SERVER-322 - assert.eq(1, t.find({x: {$all: [a, x]}}).count(), "F"); // SERVER-505 - assert.eq(1, t.find({x: {$all: [a, "abc"]}}).count(), "G"); // SERVER-505 - assert.eq(0, t.find({x: {$all: [a, "ac"]}}).count(), "H"); // SERVER-505 - assert.eq(10, t.find({x: {$nin: [x]}}).count(), "I"); // SERVER-322 + assert.eq(2, t.find({x: {$in: [x]}}).count(), "C"); // SERVER-322 + assert.eq(1, t.find({x: {$in: [a, "xyz1"]}}).count(), "D"); // SERVER-322 + assert.eq(2, t.find({x: {$in: [a, "xyz2"]}}).count(), "E"); // SERVER-322 + assert.eq(1, t.find({x: {$all: [a, x]}}).count(), "F"); // SERVER-505 + assert.eq(1, t.find({x: {$all: [a, "abc"]}}).count(), "G"); // SERVER-505 + assert.eq(0, t.find({x: {$all: [a, "ac"]}}).count(), "H"); // SERVER-505 + assert.eq(10, t.find({x: {$nin: [x]}}).count(), "I"); // SERVER-322 assert.eq(11, t.find({x: {$nin: [a, "xyz1"]}}).count(), "J"); // SERVER-322 assert.eq(10, t.find({x: {$nin: [a, "xyz2"]}}).count(), "K"); // SERVER-322 - assert.eq(2, t.find({x: {$not: {$nin: [x]}}}).count(), "L"); // SERVER-322 - assert.eq(11, t.find({x: {$nin: [/^a.c/]}}).count(), "M"); // SERVER-322 + assert.eq(2, t.find({x: {$not: {$nin: [x]}}}).count(), "L"); // SERVER-322 + assert.eq(11, t.find({x: {$nin: [/^a.c/]}}).count(), "M"); // SERVER-322 }; doit(); diff --git a/jstests/core/rename.js b/jstests/core/rename.js index 3287159f850..e9646091877 100644 --- a/jstests/core/rename.js +++ b/jstests/core/rename.js @@ -53,7 +53,7 @@ printjson(b.stats()); // while (res.hasNext()) printjson(res.next()); assert.eq(1, b.count({i: i - 1})); // make sure last is there -assert.eq(0, b.count({i: 9.1})); // make sure early one is gone +assert.eq(0, b.count({i: 9.1})); // make sure early one is gone assert(db.getCollectionNames().indexOf("jstests_rename_b") >= 0); assert(db.getCollectionNames().indexOf("jstests_rename_a") < 0); diff --git a/jstests/dur/closeall.js b/jstests/dur/closeall.js index f20449a5ef1..014bacc0423 100644 --- a/jstests/dur/closeall.js +++ b/jstests/dur/closeall.js @@ -96,7 +96,7 @@ if (_isWindows() && getBuildInfo().bits == 32) { } else { for (var variant = 0; variant < 4; variant++) { for (var quickCommits = 0; quickCommits <= 1; quickCommits++) { // false then true - for (var paranoid = 0; paranoid <= 1; paranoid++) { // false then true + for (var paranoid = 0; paranoid <= 1; paranoid++) { // false then true f(variant, quickCommits, paranoid); sleep(500); } diff --git a/jstests/gle/updated_existing.js b/jstests/gle/updated_existing.js index ff485530e35..3838cb33b6f 100644 --- a/jstests/gle/updated_existing.js +++ b/jstests/gle/updated_existing.js @@ -19,7 +19,7 @@ while (bigString.length < 1024 * 50) for (var i = 0; i < 10000; ++i) { testDB[coll].update({"shardkey1": "test" + i, "shardkey2": "test" + i}, {$set: {"test_upsert": bigString}}, - true, // upsert + true, // upsert false); // multi assert.eq(testDB.getLastErrorObj().updatedExisting, false); } diff --git a/jstests/libs/chunk_manipulation_util.js b/jstests/libs/chunk_manipulation_util.js index f03adb1714c..a334cbe8aec 100644 --- a/jstests/libs/chunk_manipulation_util.js +++ b/jstests/libs/chunk_manipulation_util.js @@ -57,7 +57,7 @@ function moveChunkParallel(staticMongod, mongosURL, findCriteria, bounds, ns, to var moveChunkStepNames = { parsedOptions: 1, gotDistLock: 2, - startedMoveChunk: 3, // called _recvChunkStart on recipient + startedMoveChunk: 3, // called _recvChunkStart on recipient reachedSteadyState: 4, // recipient reports state is "steady" committed: 5, done: 6 diff --git a/jstests/libs/election_timing_test.js b/jstests/libs/election_timing_test.js index f462d7f2dc5..f40ad5f931b 100644 --- a/jstests/libs/election_timing_test.js +++ b/jstests/libs/election_timing_test.js @@ -218,7 +218,7 @@ ElectionTimingTest.calculateElectionTimeoutLimitMillis = function(primary) { getParameterResult.replElectionTimeoutOffsetLimitFraction; } var assertSoonIntervalMillis = 200; // from assert.js - var applierDrainWaitMillis = 1000; // from SyncTail::tryPopAndWaitForMore() + var applierDrainWaitMillis = 1000; // from SyncTail::tryPopAndWaitForMore() var electionTimeoutLimitMillis = (1 + electionTimeoutOffsetLimitFraction) * electionTimeoutMillis + applierDrainWaitMillis + assertSoonIntervalMillis; diff --git a/jstests/libs/parallelTester.js b/jstests/libs/parallelTester.js index 3639ab84bc1..2bd0f48d012 100644 --- a/jstests/libs/parallelTester.js +++ b/jstests/libs/parallelTester.js @@ -150,11 +150,11 @@ if (typeof _threadInject != "undefined") { // this has a chance to see the message "connections_opened.js", // counts connections, globally "opcounters_write_cmd.js", - "currentop.js", // SERVER-8673, plus rwlock yielding issues - "set_param1.js", // changes global state - "geo_update_btree2.js", // SERVER-11132 test disables table scans - "update_setOnInsert.js", // SERVER-9982 - "max_time_ms.js", // Sensitive to query execution time, by design + "currentop.js", // SERVER-8673, plus rwlock yielding issues + "set_param1.js", // changes global state + "geo_update_btree2.js", // SERVER-11132 test disables table scans + "update_setOnInsert.js", // SERVER-9982 + "max_time_ms.js", // Sensitive to query execution time, by design "collection_info_cache_race.js", // Requires collection exists // This overwrites MinKey/MaxKey's singleton which breaks diff --git a/jstests/libs/test_background_ops.js b/jstests/libs/test_background_ops.js index 7c8ebc16883..ce21a636ff6 100644 --- a/jstests/libs/test_background_ops.js +++ b/jstests/libs/test_background_ops.js @@ -276,7 +276,7 @@ var RandomFunctionContext = function(context) { Random.randCluster = function() { var numShards = 2; // Random.randInt( 1, 10 ) - var rs = false; // Random.randBool() + var rs = false; // Random.randBool() var st = new ShardingTest({shards: numShards, mongos: 4, other: {rs: rs}}); return st; diff --git a/jstests/multiVersion/2_test_launching_cluster.js b/jstests/multiVersion/2_test_launching_cluster.js index 579ec45187d..875ce292ecd 100644 --- a/jstests/multiVersion/2_test_launching_cluster.js +++ b/jstests/multiVersion/2_test_launching_cluster.js @@ -7,8 +7,8 @@ load('./jstests/multiVersion/libs/verify_versions.js'); (function() { "use strict"; // Check our latest versions - var versionsToCheck = [ "last-stable", "latest" ]; - var versionsToCheckMongos = [ "last-stable" ]; + var versionsToCheck = ["last-stable", "latest"]; + var versionsToCheckMongos = ["last-stable"]; jsTest.log("Testing legacy versions..."); diff --git a/jstests/noPassthrough/indexbg1.js b/jstests/noPassthrough/indexbg1.js index c876851bdc1..62e3617fe9f 100644 --- a/jstests/noPassthrough/indexbg1.js +++ b/jstests/noPassthrough/indexbg1.js @@ -70,7 +70,7 @@ while (1) { // if indexing finishes before we can run checks, try indexing w/ m assert(ex.executionStats.totalKeysExamined < 1000, "took too long to find 100: " + tojson(ex)); - assert.writeOK(t.remove({i: 40}, true)); // table scan + assert.writeOK(t.remove({i: 40}, true)); // table scan assert.writeOK(t.update({i: 10}, {i: -10})); // should scan 10 var id = t.find().hint({$natural: -1}).next()._id; diff --git a/jstests/noPassthrough/shard_does_not_hang_on_bad_config_server.js b/jstests/noPassthrough/shard_does_not_hang_on_bad_config_server.js index bd2ff322169..d589186b37d 100644 --- a/jstests/noPassthrough/shard_does_not_hang_on_bad_config_server.js +++ b/jstests/noPassthrough/shard_does_not_hang_on_bad_config_server.js @@ -11,8 +11,8 @@ configdb: 'localhost:1',
fromShard: 'DummyFromShard',
toShard: 'DummyToShard',
- min: { Key: -1 },
- max: { Key: 1 },
+ min: {Key: -1},
+ max: {Key: 1},
maxChunkSizeBytes: 1024,
maxTimeMS: 10000
}));
@@ -24,8 +24,8 @@ configdb: 'localhost:1',
fromShard: 'DummyFromShard',
toShard: 'DummyToShard',
- min: { Key: -1 },
- max: { Key: 1 },
+ min: {Key: -1},
+ max: {Key: 1},
maxChunkSizeBytes: 1024,
maxTimeMS: 10000
}));
diff --git a/jstests/noPassthroughWithMongod/index_multi.js b/jstests/noPassthroughWithMongod/index_multi.js index 8d728fac8d9..68004f27678 100644 --- a/jstests/noPassthroughWithMongod/index_multi.js +++ b/jstests/noPassthroughWithMongod/index_multi.js @@ -51,7 +51,7 @@ for (var i = 90; i < 93; i++) { setupDBStr + "printjson(db.index_multi.createIndex(" + tojson(spec) + "," + "{ background: true }));" + "db.results.insert(Object.extend(" + "db.runCommand({ getlasterror: 1 }), " + tojson(spec) + ") );", - null, // port + null, // port true)); // noconnect specs.push(spec); multikey.push(i % 10 == 0 || (i + 1) % 10 == 0 || (i + 2) % 10 == 0); @@ -66,7 +66,7 @@ for (var i = 30; i < 90; i += 2) { setupDBStr + "printjson(db.index_multi.createIndex(" + tojson(spec) + ", " + "{ background: true }));" + "db.results.insert(Object.extend(" + "db.runCommand({ getlasterror: 1 }), " + tojson(spec) + ") );", - null, // port + null, // port true)); // noconnect specs.push(spec); multikey.push(i % 10 == 0 || (i + 1) % 10 == 0); @@ -80,7 +80,7 @@ for (var i = 0; i < 30; i++) { setupDBStr + "printjson(db.index_multi.createIndex(" + tojson(spec) + ", " + "{ background: true }));" + "db.results.insert(Object.extend(" + "db.runCommand({ getlasterror: 1 }), " + tojson(spec) + ") );", - null, // port + null, // port true)); // noconnect specs.push(spec); multikey.push(i % 10 == 0); diff --git a/jstests/noPassthroughWithMongod/ttl1.js b/jstests/noPassthroughWithMongod/ttl1.js index 60df6537023..6ab11e06f94 100644 --- a/jstests/noPassthroughWithMongod/ttl1.js +++ b/jstests/noPassthroughWithMongod/ttl1.js @@ -29,11 +29,11 @@ for (i = 0; i < 24; i++) { var past = new Date(now - (3600 * 1000 * i)); t.insert({x: past, y: past, z: past}); } -t.insert({a: 1}); // no x value -t.insert({x: null}); // non-date value -t.insert({x: true}); // non-date value -t.insert({x: "yo"}); // non-date value -t.insert({x: 3}); // non-date value +t.insert({a: 1}); // no x value +t.insert({x: null}); // non-date value +t.insert({x: true}); // non-date value +t.insert({x: "yo"}); // non-date value +t.insert({x: 3}); // non-date value t.insert({x: /foo/}); // non-date value assert.eq(30, t.count()); diff --git a/jstests/perf/mr_bench.js b/jstests/perf/mr_bench.js index c48a9ddbe36..ea506578d86 100644 --- a/jstests/perf/mr_bench.js +++ b/jstests/perf/mr_bench.js @@ -10,7 +10,7 @@ function getRandomStr(L) { return n; // 1-10 if (n < 36) return String.fromCharCode(n + 55); // A-Z - return String.fromCharCode(n + 61); // a-z + return String.fromCharCode(n + 61); // a-z }; while (s.length < L) s += randomchar(); diff --git a/jstests/readonly/aggregate.js b/jstests/readonly/aggregate.js index aba6a7aa3a3..d16f71ca001 100644 --- a/jstests/readonly/aggregate.js +++ b/jstests/readonly/aggregate.js @@ -69,17 +69,13 @@ runReadOnlyTest(function() { // Find titles nominated for the most awards. var mostAwardsPipeline = [ {$unwind: "$nominations"}, - {$group: { - _id: "$nominations.title", - count: {$sum: 1}}}, + {$group: {_id: "$nominations.title", count: {$sum: 1}}}, {$sort: {count: -1, _id: 1}}, {$limit: 2}, ]; - assert.docEq(readableCollection.aggregate(mostAwardsPipeline).toArray(), [ - {_id: "Spotlight", count: 3}, - {_id: "The Revenant", count: 3} - ]); + assert.docEq(readableCollection.aggregate(mostAwardsPipeline).toArray(), + [{_id: "Spotlight", count: 3}, {_id: "The Revenant", count: 3}]); // Check that pipelines fail with allowDiskUse true. We use runCommand manually because // the helper has conflicting error handling logic. diff --git a/jstests/readonly/catalog_ops.js b/jstests/readonly/catalog_ops.js index e41293b1d96..e219e2eb0c1 100644 --- a/jstests/readonly/catalog_ops.js +++ b/jstests/readonly/catalog_ops.js @@ -12,7 +12,8 @@ runReadOnlyTest(function() { // Catalog guarantees are neccessarily weaker in sharded systems since mongos is not // read-only aware. - if (TestData.fixture === "sharded") return; + if (TestData.fixture === "sharded") + return; var db = writableCollection.getDB(); @@ -31,16 +32,18 @@ runReadOnlyTest(function() { // Catalog guarantees are neccessarily weaker in sharded systems since mongos is not // read-only aware. - if (TestData.fixture === "sharded") return; + if (TestData.fixture === "sharded") + return; // Check that we can read our collections out. var db = readableCollection.getDB(); - var collections = db.getCollectionNames(); // runs listCollections internally. + var collections = db.getCollectionNames(); // runs listCollections internally. for (var collectionName of this.collectionNames) { - assert.contains(collectionName, collections, + assert.contains(collectionName, + collections, "expected to have a collection '" + collectionName + - "' in the output of listCollections, which was " + - tojson(collections)); + "' in the output of listCollections, which was " + + tojson(collections)); } assert.gte(collections.length, this.collectionNames.length); @@ -55,17 +58,16 @@ runReadOnlyTest(function() { // Check that we can read our indexes out. var indexes = readableCollection.getIndexes(); - var actualIndexes = indexes.map((fullSpec) => { return fullSpec.key; }); + var actualIndexes = indexes.map((fullSpec) => { + return fullSpec.key; + }); var expectedIndexes = Array.concat([{_id: 1}], this.indexSpecs); assert.docEq(actualIndexes, expectedIndexes); // Check that createIndexes fails. - assert.commandFailed(db.runCommand({createIndexes: this.name, - indexes: [ - {key: {d : 1}, - name: "foo"} - ]})); + assert.commandFailed( + db.runCommand({createIndexes: this.name, indexes: [{key: {d: 1}, name: "foo"}]})); } }; }()); diff --git a/jstests/readonly/geo.js b/jstests/readonly/geo.js index f8113dabb06..73e91c64eeb 100644 --- a/jstests/readonly/geo.js +++ b/jstests/readonly/geo.js @@ -35,10 +35,7 @@ runReadOnlyTest(function() { exec: function(readableCollection) { var res = readableCollection.runCommand({ geoNear: readableCollection.getName(), - near: { - type: "Point", - coordinates: [40.7211404, -73.9591494] - }, + near: {type: "Point", coordinates: [40.7211404, -73.9591494]}, spherical: true, limit: 1 }); diff --git a/jstests/readonly/lib/read_only_test.js b/jstests/readonly/lib/read_only_test.js index 0b9b1d44398..dd0b6229ace 100644 --- a/jstests/readonly/lib/read_only_test.js +++ b/jstests/readonly/lib/read_only_test.js @@ -52,11 +52,7 @@ function ShardedFixture() { } ShardedFixture.prototype.runLoadPhase = function runLoadPhase(test) { - this.shardingTest = new ShardingTest({ - nopreallocj: true, - mongos: 1, - shards: this.nShards - }); + this.shardingTest = new ShardingTest({nopreallocj: true, mongos: 1, shards: this.nShards}); this.paths = this.shardingTest.getDBPaths(); @@ -77,9 +73,11 @@ ShardedFixture.prototype.runExecPhase = function runExecPhase(test) { dbpath: this.paths[i] }; - this.shardingTest.restartMongod(i, opts, () => { - makeDirectoryReadOnly(this.paths[i]); - }); + this.shardingTest.restartMongod(i, + opts, + () => { + makeDirectoryReadOnly(this.paths[i]); + }); } jsTest.log("restarting mongos..."); @@ -101,7 +99,6 @@ ShardedFixture.prototype.runExecPhase = function runExecPhase(test) { }; function runReadOnlyTest(test) { - printjson(test); assert.eq(typeof(test.exec), "function"); diff --git a/jstests/repl/repl14.js b/jstests/repl/repl14.js index c9d39686034..5bd806ef92f 100644 --- a/jstests/repl/repl14.js +++ b/jstests/repl/repl14.js @@ -27,7 +27,7 @@ function testWithCollectionIndexIds(capped, sparse, useIds) { } assert.eq(mc.count(), 1); - s = rt.start(false); // slave + s = rt.start(false); // slave sc = s.getDB('d')['c']; // slave collection // Wait for the document to be cloned. diff --git a/jstests/repl/repl2.js b/jstests/repl/repl2.js index c23c2a994c4..37071fc1960 100644 --- a/jstests/repl/repl2.js +++ b/jstests/repl/repl2.js @@ -66,5 +66,5 @@ doTest = function(signal, extraOpts) { }; -doTest(15, {"vv": null}); // SIGTERM +doTest(15, {"vv": null}); // SIGTERM doTest(9, {"vv": null, journal: null}); // SIGKILL diff --git a/jstests/repl/repl3.js b/jstests/repl/repl3.js index fb31e9208f0..f75a0dd8299 100644 --- a/jstests/repl/repl3.js +++ b/jstests/repl/repl3.js @@ -47,6 +47,6 @@ doTest = function(signal) { }; doTest(15); // SIGTERM -doTest(9); // SIGKILL +doTest(9); // SIGKILL print("repl3.js OK"); diff --git a/jstests/repl/repl5.js b/jstests/repl/repl5.js index cb26c686b15..d6aa8c3789d 100644 --- a/jstests/repl/repl5.js +++ b/jstests/repl/repl5.js @@ -3,7 +3,7 @@ soonCountAtLeast = function(db, coll, count) { assert.soon(function() { try { - // print( "count: " + s.getDB( db )[ coll ].find().count() ); + // print( "count: " + s.getDB( db )[ coll ].find().count() ); return s.getDB(db)[coll].find().itcount() >= count; } catch (e) { return false; @@ -33,5 +33,5 @@ doTest = function(signal, extraOpts) { rt.stop(); }; -doTest(15); // SIGTERM +doTest(15); // SIGTERM doTest(9, {journal: null}); // SIGKILL diff --git a/jstests/repl/repl7.js b/jstests/repl/repl7.js index 790aef03420..b82e89cf5f7 100644 --- a/jstests/repl/repl7.js +++ b/jstests/repl/repl7.js @@ -51,5 +51,5 @@ doTest = function(signal, extraOpts) { rt.stop(); }; -doTest(15); // SIGTERM +doTest(15); // SIGTERM doTest(9, {journal: null}); // SIGKILL diff --git a/jstests/replsets/oplog_format.js b/jstests/replsets/oplog_format.js index 5dc60e33434..e597197fed0 100644 --- a/jstests/replsets/oplog_format.js +++ b/jstests/replsets/oplog_format.js @@ -99,8 +99,8 @@ var lastTS = assertLastOplog({$set: {"a.0": 3}}, {_id: 1}, msg); var msg = "bad $setOnInsert"; res = assert.writeOK(coll.update({}, {$setOnInsert: {a: -1}})); assert.eq(res.nMatched, 1, "update failed for '" + msg + "': " + res.toString()); -assert.docEq({_id: 1, a: [3]}, coll.findOne({}), msg); // No-op -var otherTS = assertLastOplog({$set: {"a.0": 3}}, {_id: 1}, msg); // Nothing new +assert.docEq({_id: 1, a: [3]}, coll.findOne({}), msg); // No-op +var otherTS = assertLastOplog({$set: {"a.0": 3}}, {_id: 1}, msg); // Nothing new assert.eq(lastTS, otherTS, "new oplog was not expected -- " + msg); // No new oplog entry coll.remove({}); @@ -111,7 +111,7 @@ res = assert.writeOK(coll.update({}, {$setOnInsert: {a: 200}}, {upsert: true})); assert.eq(res.nUpserted, 1, "update failed for '" + msg + "': " + res.toString()); var id = res.getUpsertedId()._id; assert.docEq({_id: id, a: 200}, coll.findOne({}), msg); // No-op -assertLastOplog({_id: id, a: 200}, null, msg); // No new oplog entry +assertLastOplog({_id: id, a: 200}, null, msg); // No new oplog entry coll.remove({}); assert.eq(coll.count(), 0, "collection not empty-2"); diff --git a/jstests/replsets/priority_takeover_two_nodes_equal_priority.js b/jstests/replsets/priority_takeover_two_nodes_equal_priority.js index 717e9b945e7..42134f9430e 100644 --- a/jstests/replsets/priority_takeover_two_nodes_equal_priority.js +++ b/jstests/replsets/priority_takeover_two_nodes_equal_priority.js @@ -30,7 +30,7 @@ load('jstests/replsets/rslib.js'); }, 'neither of the priority 3 nodes was elected primary', 60000, // timeout - 1000 // interval + 1000 // interval ); try { diff --git a/jstests/replsets/replsetprio1.js b/jstests/replsets/replsetprio1.js index c6c8b1f93da..a68bba37028 100644 --- a/jstests/replsets/replsetprio1.js +++ b/jstests/replsets/replsetprio1.js @@ -27,11 +27,11 @@ // do some writes on 1 var master = replTest.getPrimary(); for (var i = 0; i < 1000; i++) { - assert.writeOK(master.getDB("foo").bar.insert({i: i}, { writeConcern: { w: 'majority' } })); + assert.writeOK(master.getDB("foo").bar.insert({i: i}, {writeConcern: {w: 'majority'}})); } for (i = 0; i < 1000; i++) { - assert.writeOK(master.getDB("bar").baz.insert({i: i}, { writeConcern: { w: 'majority' } })); + assert.writeOK(master.getDB("bar").baz.insert({i: i}, {writeConcern: {w: 'majority'}})); } // bring 2 back up, 2 should wait until caught up and then become master diff --git a/jstests/sharding/all_config_hosts_down.js b/jstests/sharding/all_config_hosts_down.js index 3abd0d14feb..2376ca063b8 100644 --- a/jstests/sharding/all_config_hosts_down.js +++ b/jstests/sharding/all_config_hosts_down.js @@ -30,7 +30,7 @@ printjson(e); // Make sure we get a transport error, and not a no-primary error - assert(e.code == 8002 || // SCCC config down, for v3.0 compatibility. + assert(e.code == 8002 || // SCCC config down, for v3.0 compatibility. e.code == 10276 || // Transport error e.code == 13328 || // Connect error e.code == ErrorCodes.HostUnreachable || diff --git a/jstests/sharding/auth_no_config_primary.js b/jstests/sharding/auth_no_config_primary.js index 4c6d04d8b1e..a4be8806f66 100644 --- a/jstests/sharding/auth_no_config_primary.js +++ b/jstests/sharding/auth_no_config_primary.js @@ -6,45 +6,44 @@ * @tags: [requires_persistence] */ (function() { -"use strict"; + "use strict"; -var st = new ShardingTest({ shards: 1, keyFile: 'jstests/libs/key1' }); + var st = new ShardingTest({shards: 1, keyFile: 'jstests/libs/key1'}); -st.s.getDB('admin').createUser({ user: 'root', pwd: 'pass', roles: ['root']}); -st.s.getDB('admin').auth('root', 'pass'); -var testDB = st.s.getDB('test'); -testDB.user.insert({ hello: 'world' }); + st.s.getDB('admin').createUser({user: 'root', pwd: 'pass', roles: ['root']}); + st.s.getDB('admin').auth('root', 'pass'); + var testDB = st.s.getDB('test'); + testDB.user.insert({hello: 'world'}); -// Kill all secondaries, forcing the current primary to step down. -st.configRS.getSecondaries().forEach(function(secondaryConn) { - MongoRunner.stopMongod(secondaryConn.port); -}); + // Kill all secondaries, forcing the current primary to step down. + st.configRS.getSecondaries().forEach(function(secondaryConn) { + MongoRunner.stopMongod(secondaryConn.port); + }); -// Test authenticate through a fresh connection. -var newConn = new Mongo(st.s.host); + // Test authenticate through a fresh connection. + var newConn = new Mongo(st.s.host); -assert.commandFailedWithCode(newConn.getDB('test').runCommand({ find: 'user' }), - ErrorCodes.Unauthorized); + assert.commandFailedWithCode(newConn.getDB('test').runCommand({find: 'user'}), + ErrorCodes.Unauthorized); -newConn.getDB('admin').auth('root', 'pass'); + newConn.getDB('admin').auth('root', 'pass'); -var res = newConn.getDB('test').user.findOne(); -assert.neq(null, res); -assert.eq('world', res.hello); + var res = newConn.getDB('test').user.findOne(); + assert.neq(null, res); + assert.eq('world', res.hello); -// Test authenticate through new mongos. -var otherMongos = MongoRunner.runMongos({ keyFile : "jstests/libs/key1", - configdb : st.s.savedOptions.configdb }); + // Test authenticate through new mongos. + var otherMongos = MongoRunner.runMongos( + {keyFile: "jstests/libs/key1", configdb: st.s.savedOptions.configdb}); -assert.commandFailedWithCode(otherMongos.getDB('test').runCommand({ find: 'user' }), - ErrorCodes.Unauthorized); + assert.commandFailedWithCode(otherMongos.getDB('test').runCommand({find: 'user'}), + ErrorCodes.Unauthorized); -otherMongos.getDB('admin').auth('root', 'pass'); + otherMongos.getDB('admin').auth('root', 'pass'); -var res = otherMongos.getDB('test').user.findOne(); -assert.neq(null, res); -assert.eq('world', res.hello); + var res = otherMongos.getDB('test').user.findOne(); + assert.neq(null, res); + assert.eq('world', res.hello); -st.stop(); + st.stop(); })(); - diff --git a/jstests/sharding/coll_epoch_test1.js b/jstests/sharding/coll_epoch_test1.js index 3f6f3160b34..fdbe164c2be 100644 --- a/jstests/sharding/coll_epoch_test1.js +++ b/jstests/sharding/coll_epoch_test1.js @@ -29,7 +29,7 @@ admin.runCommand({enableSharding: coll.getDB() + ""}); // TODO(PM-85): Make sure we *always* move the primary after collection lifecyle project is complete st.ensurePrimaryShard(coll.getDB().getName(), 'shard0001'); admin.runCommand({shardCollection: coll + "", key: {_id: 1}}); -st.configRS.awaitLastOpCommitted(); // TODO: Remove after collection lifecyle project (PM-85) +st.configRS.awaitLastOpCommitted(); // TODO: Remove after collection lifecyle project (PM-85) var bulk = insertMongos.getCollection(coll + "").initializeUnorderedBulkOp(); for (var i = 0; i < 100; i++) { @@ -78,7 +78,7 @@ var getOtherShard = function(shard) { var otherShard = getOtherShard(config.databases.findOne({_id: coll.getDB() + ""}).primary); assert.commandWorked(admin.runCommand({movePrimary: coll.getDB() + "", to: otherShard})); -st.configRS.awaitLastOpCommitted(); // TODO: Remove after collection lifecyle project (PM-85) +st.configRS.awaitLastOpCommitted(); // TODO: Remove after collection lifecyle project (PM-85) jsTest.log("moved primary..."); diff --git a/jstests/sharding/merge_chunks_compound_shard_key.js b/jstests/sharding/merge_chunks_compound_shard_key.js index f6a2ef81ceb..9d6fc3aac14 100644 --- a/jstests/sharding/merge_chunks_compound_shard_key.js +++ b/jstests/sharding/merge_chunks_compound_shard_key.js @@ -4,90 +4,92 @@ // (function() { -'use strict'; - -var getShardVersion = function() { - var res = st.shard0.adminCommand({ getShardVersion: coll + "" }) - assert.commandWorked(res); - var version = res.global; - assert(version); - return version; -} - -// Merge two neighboring chunks and check post conditions. -var checkMergeWorked = function(lowerBound, upperBound) { - var oldVersion = getShardVersion(); - var numChunksBefore = chunks.find().itcount(); - - assert.commandWorked(admin.runCommand({ mergeChunks: coll + "", - bounds: [lowerBound, upperBound] })); - - assert.eq(numChunksBefore - 1, chunks.find().itcount()); - assert.eq(1, chunks.find({ min: lowerBound, max: upperBound }).itcount()); - - var newVersion = getShardVersion(); - assert.eq(newVersion.t, oldVersion.t); - assert.gt(newVersion.i, oldVersion.i); -} - -var st = new ShardingTest({ shards: 2, mongos: 1 }); - -var mongos = st.s; -var admin = mongos.getDB( "admin" ); -var shards = mongos.getCollection( "config.shards" ).find().toArray(); -var chunks = mongos.getCollection( "config.chunks" ); -var coll = mongos.getCollection( "foo.bar" ); - -jsTest.log("Create a sharded collection with a compound shard key."); -assert.commandWorked(admin.runCommand({ enableSharding: coll.getDB() + "" })); -printjson( admin.runCommand({ movePrimary: coll.getDB() + "", to: st.shard0.shardName }) ); -assert.commandWorked(admin.runCommand({ shardCollection: coll + "", key: { x: 1, y: 1 } })); - -// Chunks after splits: -// (MinKey, { x: 0, y: 1 }) -// ({ x: 0, y: 1 }, { x: 1, y: 0 }) -// ({ x: 1, y: 0 }, { x: 2, y: 0 }) -// ({ x: 2, y: 0 }, { x: 2, y: 1 }) -// ({ x: 2, y: 1 }, MaxKey) -jsTest.log("Create chunks."); -assert.commandWorked(admin.runCommand({ split: coll + "", middle: { x: 0, y: 1 } })); -assert.commandWorked(admin.runCommand({ split: coll + "", middle: { x: 1, y: 0 } })); -assert.commandWorked(admin.runCommand({ split: coll + "", middle: { x: 2, y: 0 } })); -assert.commandWorked(admin.runCommand({ split: coll + "", middle: { x: 2, y: 1 } })); - -jsTest.log("Insert some data into each of the chunk ranges."); -assert.writeOK(coll.insert({ x: -1, y: 2 })); -assert.writeOK(coll.insert({ x: 0, y: 2 })); -assert.writeOK(coll.insert({ x: 1, y: 2 })); -assert.writeOK(coll.insert({ x: 2, y: 1 })); -assert.writeOK(coll.insert({ x: 2, y: 3 })); - -// Chunks after merge: -// (MinKey, { x: 0, y: 1 }) -// ({ x: 0, y: 1 }, { x: 2, y: 0 }) -// ({ x: 2, y: 0 }, { x: 2, y: 1 }) -// ({ x: 2, y: 1 }, MaxKey) -jsTest.log("Merge chunks whose upper and lower bounds are compound shard keys."); -checkMergeWorked({ x: 0, y: 1 }, { x: 2, y: 0 }); - -// Chunks after merge: -// (MinKey, { x: 2, y: 0 }) -// ({ x: 2, y: 0 }, { x: 2, y: 1 }) -// ({ x: 2, y: 1 }, MaxKey) -jsTest.log("Merge chunks whose upper bound contains a compound shard key, lower bound is MinKey"); -checkMergeWorked({ x: MinKey, y: MinKey }, { x: 2, y: 0 }); - -// Chunks after merge: -// (MinKey, { x: 2, y: 0 }) -// ({ x: 2, y: 0 }, MaxKey) -jsTest.log("Merge chunks whose lower bound contains a compound shard key, upper bound is MaxKey"); -checkMergeWorked({ x: 2, y: 0 }, { x: MaxKey, y: MaxKey }); - -// Chunks after merge: -// (MinKey, MaxKey) -jsTest.log("Merge chunks whos bounds are MinKey/MaxKey, but which have a compound shard key"); -checkMergeWorked({ x: MinKey, y: MinKey }, { x: MaxKey, y: MaxKey }); - -st.stop(); + 'use strict'; + + var getShardVersion = function() { + var res = st.shard0.adminCommand({getShardVersion: coll + ""}); + assert.commandWorked(res); + var version = res.global; + assert(version); + return version; + }; + + // Merge two neighboring chunks and check post conditions. + var checkMergeWorked = function(lowerBound, upperBound) { + var oldVersion = getShardVersion(); + var numChunksBefore = chunks.find().itcount(); + + assert.commandWorked( + admin.runCommand({mergeChunks: coll + "", bounds: [lowerBound, upperBound]})); + + assert.eq(numChunksBefore - 1, chunks.find().itcount()); + assert.eq(1, chunks.find({min: lowerBound, max: upperBound}).itcount()); + + var newVersion = getShardVersion(); + assert.eq(newVersion.t, oldVersion.t); + assert.gt(newVersion.i, oldVersion.i); + }; + + var st = new ShardingTest({shards: 2, mongos: 1}); + + var mongos = st.s; + var admin = mongos.getDB("admin"); + var shards = mongos.getCollection("config.shards").find().toArray(); + var chunks = mongos.getCollection("config.chunks"); + var coll = mongos.getCollection("foo.bar"); + + jsTest.log("Create a sharded collection with a compound shard key."); + assert.commandWorked(admin.runCommand({enableSharding: coll.getDB() + ""})); + printjson(admin.runCommand({movePrimary: coll.getDB() + "", to: st.shard0.shardName})); + assert.commandWorked(admin.runCommand({shardCollection: coll + "", key: {x: 1, y: 1}})); + + // Chunks after splits: + // (MinKey, { x: 0, y: 1 }) + // ({ x: 0, y: 1 }, { x: 1, y: 0 }) + // ({ x: 1, y: 0 }, { x: 2, y: 0 }) + // ({ x: 2, y: 0 }, { x: 2, y: 1 }) + // ({ x: 2, y: 1 }, MaxKey) + jsTest.log("Create chunks."); + assert.commandWorked(admin.runCommand({split: coll + "", middle: {x: 0, y: 1}})); + assert.commandWorked(admin.runCommand({split: coll + "", middle: {x: 1, y: 0}})); + assert.commandWorked(admin.runCommand({split: coll + "", middle: {x: 2, y: 0}})); + assert.commandWorked(admin.runCommand({split: coll + "", middle: {x: 2, y: 1}})); + + jsTest.log("Insert some data into each of the chunk ranges."); + assert.writeOK(coll.insert({x: -1, y: 2})); + assert.writeOK(coll.insert({x: 0, y: 2})); + assert.writeOK(coll.insert({x: 1, y: 2})); + assert.writeOK(coll.insert({x: 2, y: 1})); + assert.writeOK(coll.insert({x: 2, y: 3})); + + // Chunks after merge: + // (MinKey, { x: 0, y: 1 }) + // ({ x: 0, y: 1 }, { x: 2, y: 0 }) + // ({ x: 2, y: 0 }, { x: 2, y: 1 }) + // ({ x: 2, y: 1 }, MaxKey) + jsTest.log("Merge chunks whose upper and lower bounds are compound shard keys."); + checkMergeWorked({x: 0, y: 1}, {x: 2, y: 0}); + + // Chunks after merge: + // (MinKey, { x: 2, y: 0 }) + // ({ x: 2, y: 0 }, { x: 2, y: 1 }) + // ({ x: 2, y: 1 }, MaxKey) + jsTest.log( + "Merge chunks whose upper bound contains a compound shard key, lower bound is MinKey"); + checkMergeWorked({x: MinKey, y: MinKey}, {x: 2, y: 0}); + + // Chunks after merge: + // (MinKey, { x: 2, y: 0 }) + // ({ x: 2, y: 0 }, MaxKey) + jsTest.log( + "Merge chunks whose lower bound contains a compound shard key, upper bound is MaxKey"); + checkMergeWorked({x: 2, y: 0}, {x: MaxKey, y: MaxKey}); + + // Chunks after merge: + // (MinKey, MaxKey) + jsTest.log("Merge chunks whos bounds are MinKey/MaxKey, but which have a compound shard key"); + checkMergeWorked({x: MinKey, y: MinKey}, {x: MaxKey, y: MaxKey}); + + st.stop(); })(); diff --git a/jstests/sharding/migration_failure.js b/jstests/sharding/migration_failure.js index 4ae6c880b84..d9e98581e5a 100644 --- a/jstests/sharding/migration_failure.js +++ b/jstests/sharding/migration_failure.js @@ -3,55 +3,57 @@ // when possible // (function() { -'use strict'; + 'use strict'; -var st = new ShardingTest({shards: 2, mongos: 1}); + var st = new ShardingTest({shards: 2, mongos: 1}); -var mongos = st.s0; -var admin = mongos.getDB("admin"); -var shards = mongos.getCollection("config.shards").find().toArray(); -var coll = mongos.getCollection("foo.bar"); + var mongos = st.s0; + var admin = mongos.getDB("admin"); + var shards = mongos.getCollection("config.shards").find().toArray(); + var coll = mongos.getCollection("foo.bar"); -assert(admin.runCommand({enableSharding: coll.getDB() + ""}).ok); -printjson(admin.runCommand({movePrimary: coll.getDB() + "", to: shards[0]._id})); -assert(admin.runCommand({shardCollection: coll + "", key: {_id: 1}}).ok); -assert(admin.runCommand({split: coll + "", middle: {_id: 0}}).ok); + assert(admin.runCommand({enableSharding: coll.getDB() + ""}).ok); + printjson(admin.runCommand({movePrimary: coll.getDB() + "", to: shards[0]._id})); + assert(admin.runCommand({shardCollection: coll + "", key: {_id: 1}}).ok); + assert(admin.runCommand({split: coll + "", middle: {_id: 0}}).ok); -st.printShardingStatus(); + st.printShardingStatus(); -jsTest.log("Testing failed migrations..."); + jsTest.log("Testing failed migrations..."); -var version = null; -var failVersion = null; + var version = null; + var failVersion = null; -// failMigrationCommit -assert.commandWorked(st.shard0.getDB("admin").runCommand( - {configureFailPoint: 'failMigrationCommit', mode: 'alwaysOn'})); + // failMigrationCommit + assert.commandWorked(st.shard0.getDB("admin").runCommand( + {configureFailPoint: 'failMigrationCommit', mode: 'alwaysOn'})); -version = st.shard0.getDB("admin").runCommand({getShardVersion: coll.toString()}); + version = st.shard0.getDB("admin").runCommand({getShardVersion: coll.toString()}); -assert.commandFailed(admin.runCommand({moveChunk: coll + "", find: {_id: 0}, to: shards[1]._id})); + assert.commandFailed( + admin.runCommand({moveChunk: coll + "", find: {_id: 0}, to: shards[1]._id})); -failVersion = st.shard0.getDB("admin").runCommand({getShardVersion: coll.toString()}); + failVersion = st.shard0.getDB("admin").runCommand({getShardVersion: coll.toString()}); -assert.commandWorked(st.shard0.getDB("admin") - .runCommand({configureFailPoint: 'failMigrationCommit', mode: 'off'})); + assert.commandWorked(st.shard0.getDB("admin").runCommand( + {configureFailPoint: 'failMigrationCommit', mode: 'off'})); -// failApplyChunkOps -assert.commandWorked(st.shard0.getDB("admin") - .runCommand({configureFailPoint: 'failApplyChunkOps', mode: 'alwaysOn'})); + // failApplyChunkOps + assert.commandWorked(st.shard0.getDB("admin").runCommand( + {configureFailPoint: 'failApplyChunkOps', mode: 'alwaysOn'})); -version = st.shard0.getDB("admin").runCommand({getShardVersion: coll.toString()}); + version = st.shard0.getDB("admin").runCommand({getShardVersion: coll.toString()}); -assert.commandWorked(admin.runCommand({moveChunk: coll + "", find: {_id: 0}, to: shards[1]._id})); + assert.commandWorked( + admin.runCommand({moveChunk: coll + "", find: {_id: 0}, to: shards[1]._id})); -failVersion = st.shard0.getDB("admin").runCommand({getShardVersion: coll.toString()}); + failVersion = st.shard0.getDB("admin").runCommand({getShardVersion: coll.toString()}); -assert.neq(version.global, failVersion.global); + assert.neq(version.global, failVersion.global); -assert.commandWorked(st.shard0.getDB("admin") - .runCommand({configureFailPoint: 'failApplyChunkOps', mode: 'off'})); + assert.commandWorked(st.shard0.getDB("admin") + .runCommand({configureFailPoint: 'failApplyChunkOps', mode: 'off'})); -st.stop(); + st.stop(); })(); diff --git a/jstests/sharding/move_stale_mongos.js b/jstests/sharding/move_stale_mongos.js index b38eb230870..5da7d1a3b56 100644 --- a/jstests/sharding/move_stale_mongos.js +++ b/jstests/sharding/move_stale_mongos.js @@ -16,8 +16,12 @@ var curShardIndex = 0; for (var i = 0; i < 100; i += 10) { assert.commandWorked(st.s0.getDB('admin').runCommand({split: testNs, middle: {_id: i}})); var nextShardIndex = (curShardIndex + 1) % shards.length; - assert.commandWorked(st.s1.getDB('admin').runCommand( - {moveChunk: testNs, find: {_id: i + 5}, to: shards[nextShardIndex], _waitForDelete: true})); + assert.commandWorked(st.s1.getDB('admin').runCommand({ + moveChunk: testNs, + find: {_id: i + 5}, + to: shards[nextShardIndex], + _waitForDelete: true + })); curShardIndex = nextShardIndex; } diff --git a/jstests/sharding/shard_targeting.js b/jstests/sharding/shard_targeting.js index 183adac8f1e..dddcb9035e7 100644 --- a/jstests/sharding/shard_targeting.js +++ b/jstests/sharding/shard_targeting.js @@ -20,7 +20,7 @@ s.shardColl("foo", {count: 1}, {count: ""}); for (var i = 0; i < 50; i++) { - db.foo.insert({count: i}); // chunk [MinKey, ""), including numbers + db.foo.insert({count: i}); // chunk [MinKey, ""), including numbers db.foo.insert({count: "" + i}); // chunk ["", MaxKey] } @@ -43,7 +43,7 @@ s.shardColl("foo", {mapReduce: 1}, {mapReduce: ""}); for (var i = 0; i < 50; i++) { - db.foo.insert({mapReduce: i}); // to the chunk including number + db.foo.insert({mapReduce: i}); // to the chunk including number db.foo.insert({mapReduce: "" + i}); // to the chunk including string } diff --git a/jstests/sharding/startup_with_all_configs_down.js b/jstests/sharding/startup_with_all_configs_down.js index f2eb4d67fd5..a2b47674777 100644 --- a/jstests/sharding/startup_with_all_configs_down.js +++ b/jstests/sharding/startup_with_all_configs_down.js @@ -56,7 +56,8 @@ st.restartConfigServer(i); } - // TODO: SERVER-23192 - restart mongos because it has deemend the CSRS config server set as unusable + // TODO: SERVER-23192 - restart mongos because it has deemend the CSRS config server set as + // unusable st.restartMongos(0); jsTestLog("Queries against the original mongos should work again"); diff --git a/jstests/tool/dumprestore4.js b/jstests/tool/dumprestore4.js index 58595f62383..5f0e63da52b 100644 --- a/jstests/tool/dumprestore4.js +++ b/jstests/tool/dumprestore4.js @@ -19,7 +19,7 @@ dbname2 = "NOT_" + dbname; db2 = db.getSisterDB(dbname2); -db.dropDatabase(); // make sure it's empty +db.dropDatabase(); // make sure it's empty db2.dropDatabase(); // make sure everybody's empty assert.eq(0, c.getIndexes().length, "setup1"); diff --git a/src/mongo/.clang-format b/src/mongo/.clang-format index a3b01b7f47d..04a485af039 100644 --- a/src/mongo/.clang-format +++ b/src/mongo/.clang-format @@ -63,15 +63,13 @@ UseTab: Never --- Language: JavaScript -# Disable JS formatting until the JS linter is in place (SERVER-22338) -DisableFormat: true # BasedOnStyle: Google # --- AccessModifierOffset: -1 AlignAfterOpenBracket: true AlignEscapedNewlinesLeft: true AlignOperands: false -AlignTrailingComments: false +AlignTrailingComments: true AllowAllParametersOfDeclarationOnNextLine: true AllowShortBlocksOnASingleLine: false AllowShortCaseLabelsOnASingleLine: false @@ -94,7 +92,7 @@ ConstructorInitializerIndentWidth: 4 ContinuationIndentWidth: 4 Cpp11BracedListStyle: true DerivePointerAlignment: true -#DisableFormat: false +DisableFormat: false ExperimentalAutoDetectBinPacking: false ForEachMacros: [ ] IndentCaseLabels: true diff --git a/src/mongo/shell/query.js b/src/mongo/shell/query.js index d18aeff12f6..b92f3b1f318 100644 --- a/src/mongo/shell/query.js +++ b/src/mongo/shell/query.js @@ -3,15 +3,15 @@ if (typeof DBQuery == "undefined") { DBQuery = function(mongo, db, collection, ns, query, fields, limit, skip, batchSize, options) { - this._mongo = mongo; // 0 - this._db = db; // 1 + this._mongo = mongo; // 0 + this._db = db; // 1 this._collection = collection; // 2 - this._ns = ns; // 3 + this._ns = ns; // 3 this._query = query || {}; // 4 - this._fields = fields; // 5 - this._limit = limit || 0; // 6 - this._skip = skip || 0; // 7 + this._fields = fields; // 5 + this._limit = limit || 0; // 6 + this._skip = skip || 0; // 7 this._batchSize = batchSize || 0; this._options = options || 0; diff --git a/src/mongo/shell/servers_misc.js b/src/mongo/shell/servers_misc.js index 8f3bff9d9e0..6f3defc54a3 100644 --- a/src/mongo/shell/servers_misc.js +++ b/src/mongo/shell/servers_misc.js @@ -201,7 +201,7 @@ allocatePorts = function(numPorts) { return ports; }; -function startParallelShell( jsCode, port, noConnect ) { +function startParallelShell(jsCode, port, noConnect) { var args = ["mongo"]; if (typeof db == "object") { diff --git a/src/mongo/shell/shardingtest.js b/src/mongo/shell/shardingtest.js index a167ac1c8e5..7f264dded30 100644 --- a/src/mongo/shell/shardingtest.js +++ b/src/mongo/shell/shardingtest.js @@ -31,7 +31,7 @@ * * config {number|Object|Array.<Object>}: number of config server or * config server configuration object(s)(*). @see MongoRunner.runMongod - * + * * (*) There are two ways For multiple configuration objects. * (1) Using the object format. Example: * @@ -57,7 +57,7 @@ * * shardOptions {Object}: same as the shards property above. * Can be used to specify options that are common all shards. - * + * * configOptions {Object}: same as the config property above. * Can be used to specify options that are common all config servers. * mongosOptions {Object}: same as the mongos property above. @@ -845,16 +845,18 @@ var ShardingTest = function(params) { this.stopMongod(n); if (otherParams.useBridge) { - var bridgeOptions = (opts !== mongod) ? opts.bridgeOptions - : mongod.fullOptions.bridgeOptions; + var bridgeOptions = + (opts !== mongod) ? opts.bridgeOptions : mongod.fullOptions.bridgeOptions; bridgeOptions = Object.merge(otherParams.bridgeOptions, bridgeOptions || {}); - bridgeOptions = Object.merge(bridgeOptions, { - hostName: otherParams.useHostname ? hostName : "localhost", - port: this._connections[n].port, - // The mongod processes identify themselves to mongobridge as host:port, where the - // host is the actual hostname of the machine and not localhost. - dest: hostName + ":" + opts.port, - }); + bridgeOptions = Object.merge( + bridgeOptions, + { + hostName: otherParams.useHostname ? hostName : "localhost", + port: this._connections[n].port, + // The mongod processes identify themselves to mongobridge as host:port, where the + // host is the actual hostname of the machine and not localhost. + dest: hostName + ":" + opts.port, + }); this._connections[n] = new MongoBridge(bridgeOptions); } @@ -1182,21 +1184,23 @@ var ShardingTest = function(params) { this._configServers = []; // Using replica set for config servers - var rstOptions = { useHostName : otherParams.useHostname, - useBridge : otherParams.useBridge, - bridgeOptions : otherParams.bridgeOptions, - keyFile : keyFile, - name: testName + "-configRS", - }; + var rstOptions = { + useHostName: otherParams.useHostname, + useBridge: otherParams.useBridge, + bridgeOptions: otherParams.bridgeOptions, + keyFile: keyFile, + name: testName + "-configRS", + }; // when using CSRS, always use wiredTiger as the storage engine - var startOptions = { pathOpts: pathOpts, - // Ensure that journaling is always enabled for config servers. - journal : "", - configsvr : "", - noJournalPrealloc : otherParams.nopreallocj, - storageEngine : "wiredTiger", - }; + var startOptions = { + pathOpts: pathOpts, + // Ensure that journaling is always enabled for config servers. + journal: "", + configsvr: "", + noJournalPrealloc: otherParams.nopreallocj, + storageEngine: "wiredTiger", + }; if (otherParams.configOptions && otherParams.configOptions.binVersion) { otherParams.configOptions.binVersion = @@ -1222,7 +1226,7 @@ var ShardingTest = function(params) { var initiateTimeout = otherParams.rsOptions && otherParams.rsOptions.initiateTimeout; this.configRS.initiate(config, null, initiateTimeout); - this.configRS.getPrimary(); // Wait for master to be elected before starting mongos + this.configRS.getPrimary(); // Wait for master to be elected before starting mongos this._configDB = this.configRS.getURL(); this._configServers = this.configRS.nodes; diff --git a/src/mongo/shell/types.js b/src/mongo/shell/types.js index 74019720909..d932b212cf9 100644 --- a/src/mongo/shell/types.js +++ b/src/mongo/shell/types.js @@ -98,8 +98,8 @@ ISODate = function(isoDateStr) { if (res[11] && res[11] != 'Z') { var ofs = 0; ofs += (parseInt(res[13], 10) || 0) * 60 * 60 * 1000; // hours - ofs += (parseInt(res[14], 10) || 0) * 60 * 1000; // mins - if (res[12] == '+') // if ahead subtract + ofs += (parseInt(res[14], 10) || 0) * 60 * 1000; // mins + if (res[12] == '+') // if ahead subtract ofs *= -1; time += ofs; |