summaryrefslogtreecommitdiff
path: root/jstests/sharding/shard_existing.js
diff options
context:
space:
mode:
authorclang-format-7.0.1 <adam.martin@10gen.com>2019-07-26 18:20:35 -0400
committerADAM David Alan Martin <adam.martin@10gen.com>2019-07-27 11:02:23 -0400
commit134a4083953270e8a11430395357fb70a29047ad (patch)
treedd428e1230e31d92b20b393dfdc17ffe7fa79cb6 /jstests/sharding/shard_existing.js
parent1e46b5049003f427047e723ea5fab15b5a9253ca (diff)
downloadmongo-134a4083953270e8a11430395357fb70a29047ad.tar.gz
SERVER-41772 Apply clang-format 7.0.1 to the codebase
Diffstat (limited to 'jstests/sharding/shard_existing.js')
-rw-r--r--jstests/sharding/shard_existing.js54
1 files changed, 27 insertions, 27 deletions
diff --git a/jstests/sharding/shard_existing.js b/jstests/sharding/shard_existing.js
index 1c8415662a7..8a5c19d1eb9 100644
--- a/jstests/sharding/shard_existing.js
+++ b/jstests/sharding/shard_existing.js
@@ -1,37 +1,37 @@
(function() {
- 'use strict';
+'use strict';
- var s = new ShardingTest({name: "shard_existing", shards: 2, mongos: 1, other: {chunkSize: 1}});
- var db = s.getDB("test");
+var s = new ShardingTest({name: "shard_existing", shards: 2, mongos: 1, other: {chunkSize: 1}});
+var db = s.getDB("test");
- var stringSize = 10000;
- var numDocs = 2000;
+var stringSize = 10000;
+var numDocs = 2000;
- // we want a lot of data, so lets make a string to cheat :)
- var bigString = new Array(stringSize).toString();
- var docSize = Object.bsonsize({_id: numDocs, s: bigString});
- var totalSize = docSize * numDocs;
- print("NumDocs: " + numDocs + " DocSize: " + docSize + " TotalSize: " + totalSize);
+// we want a lot of data, so lets make a string to cheat :)
+var bigString = new Array(stringSize).toString();
+var docSize = Object.bsonsize({_id: numDocs, s: bigString});
+var totalSize = docSize * numDocs;
+print("NumDocs: " + numDocs + " DocSize: " + docSize + " TotalSize: " + totalSize);
- var bulk = db.data.initializeUnorderedBulkOp();
- for (var i = 0; i < numDocs; i++) {
- bulk.insert({_id: i, s: bigString});
- }
- assert.writeOK(bulk.execute());
+var bulk = db.data.initializeUnorderedBulkOp();
+for (var i = 0; i < numDocs; i++) {
+ bulk.insert({_id: i, s: bigString});
+}
+assert.writeOK(bulk.execute());
- var avgObjSize = db.data.stats().avgObjSize;
- var dataSize = db.data.stats().size;
- assert.lte(totalSize, dataSize);
+var avgObjSize = db.data.stats().avgObjSize;
+var dataSize = db.data.stats().size;
+assert.lte(totalSize, dataSize);
- s.adminCommand({enablesharding: "test"});
- s.ensurePrimaryShard('test', s.shard1.shardName);
- var res = s.adminCommand({shardcollection: "test.data", key: {_id: 1}});
- printjson(res);
+s.adminCommand({enablesharding: "test"});
+s.ensurePrimaryShard('test', s.shard1.shardName);
+var res = s.adminCommand({shardcollection: "test.data", key: {_id: 1}});
+printjson(res);
- // number of chunks should be approx equal to the total data size / half the chunk size
- var numChunks = s.config.chunks.find({ns: 'test.data'}).itcount();
- var guess = Math.ceil(dataSize / (512 * 1024 + avgObjSize));
- assert(Math.abs(numChunks - guess) < 2, "not right number of chunks");
+// number of chunks should be approx equal to the total data size / half the chunk size
+var numChunks = s.config.chunks.find({ns: 'test.data'}).itcount();
+var guess = Math.ceil(dataSize / (512 * 1024 + avgObjSize));
+assert(Math.abs(numChunks - guess) < 2, "not right number of chunks");
- s.stop();
+s.stop();
})();