summaryrefslogtreecommitdiff
path: root/jstests/sharding/findandmodify2.js
blob: afa727e77b979e61a21a3d10d975db8f0e6eee68 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
var s = new ShardingTest(
    {name: "find_and_modify_sharded_2", shards: 2, mongos: 1, other: {chunkSize: 1}});
s.adminCommand({enablesharding: "test"});

var db = s.getDB("test");
s.ensurePrimaryShard('test', 'shard0001');
var primary = s.getPrimaryShard("test").getDB("test");
var secondary = s.getOther(primary).getDB("test");

var n = 100;
var collection = "stuff";
var minChunks = 2;

var col_update = collection + '_col_update';
var col_update_upsert = col_update + '_upsert';
var col_fam = collection + '_col_fam';
var col_fam_upsert = col_fam + '_upsert';

var big = "x";

print("---------- Creating large payload...");
for (var i = 0; i < 15; i++) {
    big += big;
}
print("---------- Done.");

// drop the collection
db[col_update].drop();
db[col_update_upsert].drop();
db[col_fam].drop();
db[col_fam_upsert].drop();

// shard the collection on _id
s.adminCommand({shardcollection: 'test.' + col_update, key: {_id: 1}});
s.adminCommand({shardcollection: 'test.' + col_update_upsert, key: {_id: 1}});
s.adminCommand({shardcollection: 'test.' + col_fam, key: {_id: 1}});
s.adminCommand({shardcollection: 'test.' + col_fam_upsert, key: {_id: 1}});

// update via findAndModify
function via_fam() {
    for (var i = 0; i < n; i++) {
        db[col_fam].save({_id: i});
    }

    for (var i = 0; i < n; i++) {
        db[col_fam].findAndModify({query: {_id: i}, update: {$set: {big: big}}});
    }
}

// upsert via findAndModify
function via_fam_upsert() {
    for (var i = 0; i < n; i++) {
        db[col_fam_upsert].findAndModify(
            {query: {_id: i}, update: {$set: {big: big}}, upsert: true});
    }
}

// update data using basic update
function via_update() {
    for (var i = 0; i < n; i++) {
        db[col_update].save({_id: i});
    }

    for (var i = 0; i < n; i++) {
        db[col_update].update({_id: i}, {$set: {big: big}});
    }
}

// upsert data using basic update
function via_update_upsert() {
    for (var i = 0; i < n; i++) {
        db[col_update_upsert].update({_id: i}, {$set: {big: big}}, true);
    }
}

print("---------- Update via findAndModify...");
via_fam();
print("---------- Done.");

print("---------- Upsert via findAndModify...");
via_fam_upsert();
print("---------- Done.");

print("---------- Basic update...");
via_update();
print("---------- Done.");

print("---------- Basic update with upsert...");
via_update_upsert();
print("---------- Done.");

print("---------- Printing chunks:");
s.printChunks();

print("---------- Verifying that both codepaths resulted in splits...");
assert.gte(s.config.chunks.count({"ns": "test." + col_fam}),
           minChunks,
           "findAndModify update code path didn't result in splits");
assert.gte(s.config.chunks.count({"ns": "test." + col_fam_upsert}),
           minChunks,
           "findAndModify upsert code path didn't result in splits");
assert.gte(s.config.chunks.count({"ns": "test." + col_update}),
           minChunks,
           "update code path didn't result in splits");
assert.gte(s.config.chunks.count({"ns": "test." + col_update_upsert}),
           minChunks,
           "upsert code path didn't result in splits");

printjson(db[col_update].stats());

// ensure that all chunks are smaller than chunksize
// make sure not teensy
// test update without upsert and with upsert

s.stop();