summaryrefslogtreecommitdiff
path: root/jstests/sharding/split_with_force_small.js
blob: 388f1e853006e5b85f1061bbfde7041e9386996e (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
//
// Tests splitVector locations with force : true, for small collections
//
(function() {
'use strict';

var st = new ShardingTest({shards: 1, mongos: 1, other: {chunkSize: 1}});

var mongos = st.s0;
var admin = mongos.getDB("admin");
var config = mongos.getDB("config");
var shardAdmin = st.shard0.getDB("admin");
var coll = mongos.getCollection("foo.bar");

assert.commandWorked(admin.runCommand({enableSharding: coll.getDB() + ""}));
assert.commandWorked(admin.runCommand({shardCollection: coll + "", key: {_id: 1}}));
assert.commandWorked(admin.runCommand({split: coll + "", middle: {_id: 0}}));

jsTest.log("Insert a bunch of data into the low chunk of a collection," +
           " to prevent relying on stats.");

var data128k = "x";
for (var i = 0; i < 7; i++)
    data128k += data128k;

var bulk = coll.initializeUnorderedBulkOp();
for (var i = 0; i < 1024; i++) {
    bulk.insert({_id: -(i + 1)});
}
assert.commandWorked(bulk.execute());

jsTest.log("Insert 32 docs into the high chunk of a collection");

bulk = coll.initializeUnorderedBulkOp();
for (var i = 0; i < 32; i++) {
    bulk.insert({_id: i});
}
assert.commandWorked(bulk.execute());

jsTest.log("Split off MaxKey chunk...");

assert.commandWorked(admin.runCommand({split: coll + "", middle: {_id: 32}}));

jsTest.log("Keep splitting chunk multiple times...");

st.printShardingStatus();

for (var i = 0; i < 5; i++) {
    assert.commandWorked(admin.runCommand({split: coll + "", find: {_id: 0}}));
    st.printShardingStatus();
}

// Make sure we can't split further than 5 (2^5) times
assert.commandFailed(admin.runCommand({split: coll + "", find: {_id: 0}}));

var chunks = config.chunks.find({'min._id': {$gte: 0, $lt: 32}}).sort({min: 1}).toArray();
printjson(chunks);

// Make sure the chunks grow by 2x (except the first)
var nextSize = 1;
for (var i = 0; i < chunks.size; i++) {
    assert.eq(coll.count({_id: {$gte: chunks[i].min._id, $lt: chunks[i].max._id}}), nextSize);
    if (i != 0)
        nextSize += nextSize;
}

st.stop();
})();