blob: 6a1bb0294b0c3a8dc83982d572b39ee74dda1d41 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
|
/**
* There is no limit for the total size or the number of unique terms for text index.
*
* Each insert involves creating big text index which runs slowly enough to get
* killed by a stepdown before any attempt can finish on slower variants.
* @tags: [does_not_support_stepdowns]
*/
(function() {
"use strict";
const replTest = new ReplSetTest({nodes: 3});
replTest.startSet();
replTest.initiate();
const db = replTest.getPrimary().getDB("test");
var t = db.text_index_limits;
t.drop();
assert.commandWorked(t.createIndex({comments: "text"}));
replTest.awaitReplication();
// 1. Test number of unique terms exceeds 400,000
let commentsWithALotOfUniqueWords = "";
// 26^4 = 456,976 > 400,000
for (let ch1 = 97; ch1 < 123; ch1++) {
for (let ch2 = 97; ch2 < 123; ch2++) {
for (let ch3 = 97; ch3 < 123; ch3++) {
for (let ch4 = 97; ch4 < 123; ch4++) {
let word = String.fromCharCode(ch1, ch2, ch3, ch4);
commentsWithALotOfUniqueWords += word + " ";
}
}
}
}
assert.commandWorked(db.runCommand(
{insert: t.getName(), documents: [{_id: 1, comments: commentsWithALotOfUniqueWords}]}));
// 2. Test total size of index keys for unique terms exceeds 4MB
// 26^3 = 17576 < 400,000
let prefix = "a".repeat(400);
let commentsWithWordsOfLargeSize = "";
for (let ch1 = 97; ch1 < 123; ch1++) {
for (let ch2 = 97; ch2 < 123; ch2++) {
for (let ch3 = 97; ch3 < 123; ch3++) {
let word = String.fromCharCode(ch1, ch2, ch3);
commentsWithWordsOfLargeSize += prefix + word + " ";
}
}
}
assert.commandWorked(db.runCommand(
{insert: t.getName(), documents: [{_id: 2, comments: commentsWithWordsOfLargeSize}]}));
replTest.stopSet();
})();
|