diff options
Diffstat (limited to 'spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js')
-rw-r--r-- | spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js | 22 |
1 files changed, 11 insertions, 11 deletions
diff --git a/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js b/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js index bf8b66f1110..465f5f79931 100644 --- a/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js +++ b/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js @@ -1,19 +1,19 @@ import FilteredSearchTokenKeys from '~/filtered_search/filtered_search_token_keys'; -import '~/filtered_search/filtered_search_tokenizer'; +import FilteredSearchTokenizer from '~/filtered_search/filtered_search_tokenizer'; describe('Filtered Search Tokenizer', () => { const allowedKeys = FilteredSearchTokenKeys.getKeys(); describe('processTokens', () => { it('returns for input containing only search value', () => { - const results = gl.FilteredSearchTokenizer.processTokens('searchTerm', allowedKeys); + const results = FilteredSearchTokenizer.processTokens('searchTerm', allowedKeys); expect(results.searchToken).toBe('searchTerm'); expect(results.tokens.length).toBe(0); expect(results.lastToken).toBe(results.searchToken); }); it('returns for input containing only tokens', () => { - const results = gl.FilteredSearchTokenizer + const results = FilteredSearchTokenizer .processTokens('author:@root label:~"Very Important" milestone:%v1.0 assignee:none', allowedKeys); expect(results.searchToken).toBe(''); expect(results.tokens.length).toBe(4); @@ -37,7 +37,7 @@ describe('Filtered Search Tokenizer', () => { }); it('returns for input starting with search value and ending with tokens', () => { - const results = gl.FilteredSearchTokenizer + const results = FilteredSearchTokenizer .processTokens('searchTerm anotherSearchTerm milestone:none', allowedKeys); expect(results.searchToken).toBe('searchTerm anotherSearchTerm'); expect(results.tokens.length).toBe(1); @@ -48,7 +48,7 @@ describe('Filtered Search Tokenizer', () => { }); it('returns for input starting with tokens and ending with search value', () => { - const results = gl.FilteredSearchTokenizer + const results = FilteredSearchTokenizer .processTokens('assignee:@user searchTerm', allowedKeys); expect(results.searchToken).toBe('searchTerm'); @@ -60,7 +60,7 @@ describe('Filtered Search Tokenizer', () => { }); it('returns for input containing search value wrapped between tokens', () => { - const results = gl.FilteredSearchTokenizer + const results = FilteredSearchTokenizer .processTokens('author:@root label:~"Won\'t fix" searchTerm anotherSearchTerm milestone:none', allowedKeys); expect(results.searchToken).toBe('searchTerm anotherSearchTerm'); @@ -81,7 +81,7 @@ describe('Filtered Search Tokenizer', () => { }); it('returns for input containing search value in between tokens', () => { - const results = gl.FilteredSearchTokenizer + const results = FilteredSearchTokenizer .processTokens('author:@root searchTerm assignee:none anotherSearchTerm label:~Doing', allowedKeys); expect(results.searchToken).toBe('searchTerm anotherSearchTerm'); expect(results.tokens.length).toBe(3); @@ -101,14 +101,14 @@ describe('Filtered Search Tokenizer', () => { }); it('returns search value for invalid tokens', () => { - const results = gl.FilteredSearchTokenizer.processTokens('fake:token', allowedKeys); + const results = FilteredSearchTokenizer.processTokens('fake:token', allowedKeys); expect(results.lastToken).toBe('fake:token'); expect(results.searchToken).toBe('fake:token'); expect(results.tokens.length).toEqual(0); }); it('returns search value and token for mix of valid and invalid tokens', () => { - const results = gl.FilteredSearchTokenizer.processTokens('label:real fake:token', allowedKeys); + const results = FilteredSearchTokenizer.processTokens('label:real fake:token', allowedKeys); expect(results.tokens.length).toEqual(1); expect(results.tokens[0].key).toBe('label'); expect(results.tokens[0].value).toBe('real'); @@ -118,13 +118,13 @@ describe('Filtered Search Tokenizer', () => { }); it('returns search value for invalid symbols', () => { - const results = gl.FilteredSearchTokenizer.processTokens('std::includes', allowedKeys); + const results = FilteredSearchTokenizer.processTokens('std::includes', allowedKeys); expect(results.lastToken).toBe('std::includes'); expect(results.searchToken).toBe('std::includes'); }); it('removes duplicated values', () => { - const results = gl.FilteredSearchTokenizer.processTokens('label:~foo label:~foo', allowedKeys); + const results = FilteredSearchTokenizer.processTokens('label:~foo label:~foo', allowedKeys); expect(results.tokens.length).toBe(1); expect(results.tokens[0].key).toBe('label'); expect(results.tokens[0].value).toBe('foo'); |