diff options
author | Bikash Chandra <bikash.chandra@mongodb.com> | 2021-10-15 18:27:29 +0530 |
---|---|---|
committer | Evergreen Agent <no-reply@evergreen.mongodb.com> | 2021-10-26 12:47:36 +0000 |
commit | e1538a57f27bd6b9ae5e6606b89cf0bd158493c4 (patch) | |
tree | a9d601e4cd1f4c28fb58e1ed6ccbabdcadc5194f | |
parent | a5f9db790c87bd9bab410ee7d8daaa2b938c1d6f (diff) | |
download | mongo-e1538a57f27bd6b9ae5e6606b89cf0bd158493c4.tar.gz |
SERVER-60739 Reduce the chunk size in timeseries_balancer.js to avoid heavy resource consumption
(cherry picked from commit c9feb18654a0d3cf1040340777f5fc405dae5e04)
-rw-r--r-- | jstests/sharding/timeseries_balancer.js | 4 |
1 files changed, 2 insertions, 2 deletions
diff --git a/jstests/sharding/timeseries_balancer.js b/jstests/sharding/timeseries_balancer.js index b547f93a57d..fa044229d2c 100644 --- a/jstests/sharding/timeseries_balancer.js +++ b/jstests/sharding/timeseries_balancer.js @@ -21,7 +21,7 @@ const timeField = 'time'; const metaField = 'hostid'; // Connections. -const st = new ShardingTest({shards: 2, rs: {nodes: 2}}); +const st = new ShardingTest({shards: 2, rs: {nodes: 2}, other: {chunkSize: 1}}); const mongos = st.s0; // Sanity checks. @@ -65,7 +65,7 @@ function runTest(shardKey) { // Insert a large dataset so that the balancer is guranteed to split the chunks. let bulk = coll.initializeUnorderedBulkOp(); - const numDocs = 50000; + const numDocs = 1000; const firstBatch = generateBatch(numDocs); for (let doc of firstBatch) { bulk.insert(doc); |