summaryrefslogtreecommitdiff
path: root/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js.es6
blob: ac7f8e9cbcd59efe00089d6f9df0f723045f7174 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
//= require extensions/array
//= require filtered_search/filtered_search_token_keys
//= require filtered_search/filtered_search_tokenizer

(() => {
  describe('Filtered Search Tokenizer', () => {
    describe('processTokens', () => {
      it('returns for input containing only search value', () => {
        const results = gl.FilteredSearchTokenizer.processTokens('searchTerm');
        expect(results.searchToken).toBe('searchTerm');
        expect(results.tokens.length).toBe(0);
        expect(results.lastToken).toBe(results.searchToken);
      });

      it('returns for input containing only tokens', () => {
        const results = gl.FilteredSearchTokenizer
          .processTokens('author:@root label:~"Very Important" milestone:%v1.0 assignee:none');
        expect(results.searchToken).toBe('');
        expect(results.tokens.length).toBe(4);
        expect(results.tokens[3]).toBe(results.lastToken);

        expect(results.tokens[0].key).toBe('author');
        expect(results.tokens[0].value).toBe('root');
        expect(results.tokens[0].symbol).toBe('@');

        expect(results.tokens[1].key).toBe('label');
        expect(results.tokens[1].value).toBe('"Very Important"');
        expect(results.tokens[1].symbol).toBe('~');

        expect(results.tokens[2].key).toBe('milestone');
        expect(results.tokens[2].value).toBe('v1.0');
        expect(results.tokens[2].symbol).toBe('%');

        expect(results.tokens[3].key).toBe('assignee');
        expect(results.tokens[3].value).toBe('none');
        expect(results.tokens[3].symbol).toBe('');
      });

      it('returns for input starting with search value and ending with tokens', () => {
        const results = gl.FilteredSearchTokenizer
          .processTokens('searchTerm anotherSearchTerm milestone:none');
        expect(results.searchToken).toBe('searchTerm anotherSearchTerm');
        expect(results.tokens.length).toBe(1);
        expect(results.tokens[0]).toBe(results.lastToken);
        expect(results.tokens[0].key).toBe('milestone');
        expect(results.tokens[0].value).toBe('none');
        expect(results.tokens[0].symbol).toBe('');
      });

      it('returns for input starting with tokens and ending with search value', () => {
        const results = gl.FilteredSearchTokenizer
          .processTokens('assignee:@user searchTerm');

        expect(results.searchToken).toBe('searchTerm');
        expect(results.tokens.length).toBe(1);
        expect(results.tokens[0].key).toBe('assignee');
        expect(results.tokens[0].value).toBe('user');
        expect(results.tokens[0].symbol).toBe('@');
        expect(results.lastToken).toBe(results.searchToken);
      });

      it('returns for input containing search value wrapped between tokens', () => {
        const results = gl.FilteredSearchTokenizer
          .processTokens('author:@root label:~"Won\'t fix" searchTerm anotherSearchTerm milestone:none');

        expect(results.searchToken).toBe('searchTerm anotherSearchTerm');
        expect(results.tokens.length).toBe(3);
        expect(results.tokens[2]).toBe(results.lastToken);

        expect(results.tokens[0].key).toBe('author');
        expect(results.tokens[0].value).toBe('root');
        expect(results.tokens[0].symbol).toBe('@');

        expect(results.tokens[1].key).toBe('label');
        expect(results.tokens[1].value).toBe('"Won\'t fix"');
        expect(results.tokens[1].symbol).toBe('~');

        expect(results.tokens[2].key).toBe('milestone');
        expect(results.tokens[2].value).toBe('none');
        expect(results.tokens[2].symbol).toBe('');
      });

      it('returns for input containing search value in between tokens', () => {
        const results = gl.FilteredSearchTokenizer
          .processTokens('author:@root searchTerm assignee:none anotherSearchTerm label:~Doing');
        expect(results.searchToken).toBe('searchTerm anotherSearchTerm');
        expect(results.tokens.length).toBe(3);
        expect(results.tokens[2]).toBe(results.lastToken);

        expect(results.tokens[0].key).toBe('author');
        expect(results.tokens[0].value).toBe('root');
        expect(results.tokens[0].symbol).toBe('@');

        expect(results.tokens[1].key).toBe('assignee');
        expect(results.tokens[1].value).toBe('none');
        expect(results.tokens[1].symbol).toBe('');

        expect(results.tokens[2].key).toBe('label');
        expect(results.tokens[2].value).toBe('Doing');
        expect(results.tokens[2].symbol).toBe('~');
      });
    });
  });
})();