summaryrefslogtreecommitdiff
path: root/jstests/sharding/split_with_force_small.js
blob: 86fb466713276507cac25e2382d44d5074e65ddc (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
//
// Tests autosplit locations with force : true, for small collections
//

var options = { chunkSize: 1, // MB
                mongosOptions : { noAutoSplit : "" }
              };

var st = new ShardingTest({ shards : 1, mongos : 1, other : options });
st.stopBalancer();

var mongos = st.s0;
var admin = mongos.getDB( "admin" );
var config = mongos.getDB( "config" );
var shardAdmin = st.shard0.getDB( "admin" );
var coll = mongos.getCollection( "foo.bar" );

assert( admin.runCommand({ enableSharding : coll.getDB() + "" }).ok );
assert( admin.runCommand({ shardCollection : coll + "", key : { _id : 1 } }).ok );
assert( admin.runCommand({ split : coll + "", middle : { _id : 0 } }).ok );

jsTest.log( "Insert a bunch of data into the low chunk of a collection," +
            " to prevent relying on stats." );

var data128k = "x";
for ( var i = 0; i < 7; i++ ) data128k += data128k;

var bulk = coll.initializeUnorderedBulkOp();
for ( var i = 0; i < 1024; i++ ) {
    bulk.insert({ _id : -(i + 1) });
}
assert.writeOK(bulk.execute());

jsTest.log( "Insert 32 docs into the high chunk of a collection" );

bulk = coll.initializeUnorderedBulkOp();
for ( var i = 0; i < 32; i++ ) {
    bulk.insert({ _id : i });
}
assert.writeOK(bulk.execute());

jsTest.log( "Split off MaxKey chunk..." );

assert( admin.runCommand({ split : coll + "", middle : { _id : 32 } }).ok );

jsTest.log( "Keep splitting chunk multiple times..." );

st.printShardingStatus();

for ( var i = 0; i < 5; i++ ) {
    assert( admin.runCommand({ split : coll + "", find : { _id : 0 } }).ok );
    st.printShardingStatus();
}

// Make sure we can't split further than 5 (2^5) times
assert( !admin.runCommand({ split : coll + "", find : { _id : 0 } }).ok );

var chunks = config.chunks.find({ 'min._id' : { $gte : 0, $lt : 32 } }).sort({ min : 1 }).toArray();
printjson( chunks );

// Make sure the chunks grow by 2x (except the first)
var nextSize = 1;
for ( var i = 0; i < chunks.size; i++ ) {
    assert.eq( coll.count({ _id : { $gte : chunks[i].min._id, $lt : chunks[i].max._id } }), 
               nextSize );
    if ( i != 0 ) nextSize += nextSize;
}

st.stop();