summaryrefslogtreecommitdiff
path: root/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js
blob: cabbc694ec4c16898c66a90ddd3260f55e901d41 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
require('~/extensions/array');
require('~/filtered_search/filtered_search_token_keys');
require('~/filtered_search/filtered_search_tokenizer');

(() => {
  describe('Filtered Search Tokenizer', () => {
    describe('processTokens', () => {
      it('returns for input containing only search value', () => {
        const results = gl.FilteredSearchTokenizer.processTokens('searchTerm');
        expect(results.searchToken).toBe('searchTerm');
        expect(results.tokens.length).toBe(0);
        expect(results.lastToken).toBe(results.searchToken);
      });

      it('returns for input containing only tokens', () => {
        const results = gl.FilteredSearchTokenizer
          .processTokens('author:@root label:~"Very Important" milestone:%v1.0 assignee:none');
        expect(results.searchToken).toBe('');
        expect(results.tokens.length).toBe(4);
        expect(results.tokens[3]).toBe(results.lastToken);

        expect(results.tokens[0].key).toBe('author');
        expect(results.tokens[0].value).toBe('root');
        expect(results.tokens[0].symbol).toBe('@');

        expect(results.tokens[1].key).toBe('label');
        expect(results.tokens[1].value).toBe('"Very Important"');
        expect(results.tokens[1].symbol).toBe('~');

        expect(results.tokens[2].key).toBe('milestone');
        expect(results.tokens[2].value).toBe('v1.0');
        expect(results.tokens[2].symbol).toBe('%');

        expect(results.tokens[3].key).toBe('assignee');
        expect(results.tokens[3].value).toBe('none');
        expect(results.tokens[3].symbol).toBe('');
      });

      it('returns for input starting with search value and ending with tokens', () => {
        const results = gl.FilteredSearchTokenizer
          .processTokens('searchTerm anotherSearchTerm milestone:none');
        expect(results.searchToken).toBe('searchTerm anotherSearchTerm');
        expect(results.tokens.length).toBe(1);
        expect(results.tokens[0]).toBe(results.lastToken);
        expect(results.tokens[0].key).toBe('milestone');
        expect(results.tokens[0].value).toBe('none');
        expect(results.tokens[0].symbol).toBe('');
      });

      it('returns for input starting with tokens and ending with search value', () => {
        const results = gl.FilteredSearchTokenizer
          .processTokens('assignee:@user searchTerm');

        expect(results.searchToken).toBe('searchTerm');
        expect(results.tokens.length).toBe(1);
        expect(results.tokens[0].key).toBe('assignee');
        expect(results.tokens[0].value).toBe('user');
        expect(results.tokens[0].symbol).toBe('@');
        expect(results.lastToken).toBe(results.searchToken);
      });

      it('returns for input containing search value wrapped between tokens', () => {
        const results = gl.FilteredSearchTokenizer
          .processTokens('author:@root label:~"Won\'t fix" searchTerm anotherSearchTerm milestone:none');

        expect(results.searchToken).toBe('searchTerm anotherSearchTerm');
        expect(results.tokens.length).toBe(3);
        expect(results.tokens[2]).toBe(results.lastToken);

        expect(results.tokens[0].key).toBe('author');
        expect(results.tokens[0].value).toBe('root');
        expect(results.tokens[0].symbol).toBe('@');

        expect(results.tokens[1].key).toBe('label');
        expect(results.tokens[1].value).toBe('"Won\'t fix"');
        expect(results.tokens[1].symbol).toBe('~');

        expect(results.tokens[2].key).toBe('milestone');
        expect(results.tokens[2].value).toBe('none');
        expect(results.tokens[2].symbol).toBe('');
      });

      it('returns for input containing search value in between tokens', () => {
        const results = gl.FilteredSearchTokenizer
          .processTokens('author:@root searchTerm assignee:none anotherSearchTerm label:~Doing');
        expect(results.searchToken).toBe('searchTerm anotherSearchTerm');
        expect(results.tokens.length).toBe(3);
        expect(results.tokens[2]).toBe(results.lastToken);

        expect(results.tokens[0].key).toBe('author');
        expect(results.tokens[0].value).toBe('root');
        expect(results.tokens[0].symbol).toBe('@');

        expect(results.tokens[1].key).toBe('assignee');
        expect(results.tokens[1].value).toBe('none');
        expect(results.tokens[1].symbol).toBe('');

        expect(results.tokens[2].key).toBe('label');
        expect(results.tokens[2].value).toBe('Doing');
        expect(results.tokens[2].symbol).toBe('~');
      });

      it('returns search value for invalid tokens', () => {
        const results = gl.FilteredSearchTokenizer.processTokens('fake:token');
        expect(results.lastToken).toBe('fake:token');
        expect(results.searchToken).toBe('fake:token');
        expect(results.tokens.length).toEqual(0);
      });

      it('returns search value and token for mix of valid and invalid tokens', () => {
        const results = gl.FilteredSearchTokenizer.processTokens('label:real fake:token');
        expect(results.tokens.length).toEqual(1);
        expect(results.tokens[0].key).toBe('label');
        expect(results.tokens[0].value).toBe('real');
        expect(results.tokens[0].symbol).toBe('');
        expect(results.lastToken).toBe('fake:token');
        expect(results.searchToken).toBe('fake:token');
      });

      it('returns search value for invalid symbols', () => {
        const results = gl.FilteredSearchTokenizer.processTokens('std::includes');
        expect(results.lastToken).toBe('std::includes');
        expect(results.searchToken).toBe('std::includes');
      });

      it('removes duplicated values', () => {
        const results = gl.FilteredSearchTokenizer.processTokens('label:~foo label:~foo');
        expect(results.tokens.length).toBe(1);
        expect(results.tokens[0].key).toBe('label');
        expect(results.tokens[0].value).toBe('foo');
        expect(results.tokens[0].symbol).toBe('~');
      });
    });
  });
})();