summaryrefslogtreecommitdiff
path: root/jstests/sharding/return_partial_shards_down.js
diff options
context:
space:
mode:
Diffstat (limited to 'jstests/sharding/return_partial_shards_down.js')
-rw-r--r--jstests/sharding/return_partial_shards_down.js35
1 files changed, 11 insertions, 24 deletions
diff --git a/jstests/sharding/return_partial_shards_down.js b/jstests/sharding/return_partial_shards_down.js
index d2519f0ae5e..a8eca975283 100644
--- a/jstests/sharding/return_partial_shards_down.js
+++ b/jstests/sharding/return_partial_shards_down.js
@@ -2,9 +2,7 @@
// Tests that zero results are correctly returned with returnPartial and shards down
//
-var st = new ShardingTest({shards : 3,
- mongos : 1,
- other : {mongosOptions : {verbose : 2}}});
+var st = new ShardingTest({shards: 3, mongos: 1, other: {mongosOptions: {verbose: 2}}});
// Stop balancer, we're doing our own manual chunk distribution
st.stopBalancer();
@@ -14,42 +12,31 @@ var config = mongos.getDB("config");
var admin = mongos.getDB("admin");
var shards = config.shards.find().toArray();
-for ( var i = 0; i < shards.length; i++) {
+for (var i = 0; i < shards.length; i++) {
shards[i].conn = new Mongo(shards[i].host);
}
var collOneShard = mongos.getCollection("foo.collOneShard");
var collAllShards = mongos.getCollection("foo.collAllShards");
-printjson(admin.runCommand({enableSharding : collOneShard.getDB() + ""}));
-printjson(admin.runCommand({movePrimary : collOneShard.getDB() + "",
- to : shards[0]._id}));
+printjson(admin.runCommand({enableSharding: collOneShard.getDB() + ""}));
+printjson(admin.runCommand({movePrimary: collOneShard.getDB() + "", to: shards[0]._id}));
-printjson(admin.runCommand({shardCollection : collOneShard + "",
- key : {_id : 1}}));
-printjson(admin.runCommand({shardCollection : collAllShards + "",
- key : {_id : 1}}));
+printjson(admin.runCommand({shardCollection: collOneShard + "", key: {_id: 1}}));
+printjson(admin.runCommand({shardCollection: collAllShards + "", key: {_id: 1}}));
// Split and move the "both shard" collection to both shards
-printjson(admin.runCommand({split : collAllShards + "",
- middle : {_id : 0}}));
-printjson(admin.runCommand({split : collAllShards + "",
- middle : {_id : 1000}}));
-printjson(admin.runCommand({moveChunk : collAllShards + "",
- find : {_id : 0},
- to : shards[1]._id}));
-printjson(admin.runCommand({moveChunk : collAllShards + "",
- find : {_id : 1000},
- to : shards[2]._id}));
+printjson(admin.runCommand({split: collAllShards + "", middle: {_id: 0}}));
+printjson(admin.runCommand({split: collAllShards + "", middle: {_id: 1000}}));
+printjson(admin.runCommand({moveChunk: collAllShards + "", find: {_id: 0}, to: shards[1]._id}));
+printjson(admin.runCommand({moveChunk: collAllShards + "", find: {_id: 1000}, to: shards[2]._id}));
// Collections are now distributed correctly
jsTest.log("Collections now distributed correctly.");
st.printShardingStatus();
-var inserts = [{_id : -1},
- {_id : 1},
- {_id : 1000}];
+var inserts = [{_id: -1}, {_id: 1}, {_id: 1000}];
collOneShard.insert(inserts);
assert.writeOK(collAllShards.insert(inserts));