summaryrefslogtreecommitdiff
path: root/jstests/sharding/defragment_large_collection.js
blob: 20210762258090a4b06512b2347ec9ec61629e50 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
/**
 *
 * @tags: [
 *  requires_fcv_53,
 *  featureFlagPerCollBalancingSettings,
 * ]
 */

(function() {
'use strict';

load("jstests/sharding/libs/find_chunks_util.js");
load("jstests/sharding/libs/defragmentation_util.js");

Random.setRandomSeed();

// Test parameters
const numShards = Random.randInt(7) + 1;
const maxChunkFillMB = 20;
const maxChunkSizeMB = 30;
const chunkSpacing = 1000;

jsTest.log("Creating new sharding test with " + numShards + " shards.");

const st = new ShardingTest({
    mongos: 1,
    shards: numShards,
    other: {
        enableBalancer: true,
        configOptions: {
            setParameter: {
                logComponentVerbosity: tojson({sharding: {verbosity: 3}}),
                chunkDefragmentationThrottlingMS: 0
            }
        },
    }
});

let runTest = function(numCollections, dbName) {
    jsTest.log("Running test with " + numCollections + " collections.");
    // setup the database for the test
    assert.commandWorked(st.s.adminCommand({enableSharding: dbName}));
    const db = st.getDB(dbName);
    const coll_prefix = "testColl_";

    let collections = [];

    for (let i = 0; i < numCollections; ++i) {
        const numChunks = Random.randInt(28) + 2;
        const numZones = Random.randInt(numChunks / 2);
        const docSizeBytes = Random.randInt(1024 * 1024) + 50;

        const coll = db[coll_prefix + i];

        defragmentationUtil.createFragmentedCollection(st.s,
                                                       coll.getFullName(),
                                                       numChunks,
                                                       maxChunkFillMB,
                                                       numZones,
                                                       docSizeBytes,
                                                       chunkSpacing,
                                                       true);

        collections.push(coll);
    }

    st.printShardingStatus();

    collections.forEach((coll) => {
        assert.commandWorked(st.s.adminCommand({
            configureCollectionBalancing: coll.getFullName(),
            defragmentCollection: true,
            chunkSize: maxChunkSizeMB,
        }));
    });

    collections.forEach((coll) => {
        const ns = coll.getFullName();

        // Wait for defragmentation to end and check collection final state
        defragmentationUtil.waitForEndOfDefragmentation(st.s, ns);
        const finalNumberChunks = findChunksUtil.countChunksForNs(st.s.getDB('config'), ns);
        jsTest.log("Finished defragmentation of collection " + coll + " with " + finalNumberChunks +
                   " chunks.");
        defragmentationUtil.checkPostDefragmentationState(
            st.configRS.getPrimary(), st.s, ns, maxChunkSizeMB, "key");
    });

    st.printShardingStatus();
};

runTest(1, "singleCollection");
runTest(3, "threeCollections");

st.stop();
})();