summaryrefslogtreecommitdiff
path: root/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js
diff options
context:
space:
mode:
Diffstat (limited to 'spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js')
-rw-r--r--spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js22
1 files changed, 12 insertions, 10 deletions
diff --git a/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js b/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js
index 9561580c839..e4a15c83c23 100644
--- a/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_tokenizer_spec.js
@@ -3,9 +3,11 @@ import '~/filtered_search/filtered_search_token_keys';
import '~/filtered_search/filtered_search_tokenizer';
describe('Filtered Search Tokenizer', () => {
+ const allowedKeys = gl.FilteredSearchTokenKeys.getKeys();
+
describe('processTokens', () => {
it('returns for input containing only search value', () => {
- const results = gl.FilteredSearchTokenizer.processTokens('searchTerm');
+ const results = gl.FilteredSearchTokenizer.processTokens('searchTerm', allowedKeys);
expect(results.searchToken).toBe('searchTerm');
expect(results.tokens.length).toBe(0);
expect(results.lastToken).toBe(results.searchToken);
@@ -13,7 +15,7 @@ describe('Filtered Search Tokenizer', () => {
it('returns for input containing only tokens', () => {
const results = gl.FilteredSearchTokenizer
- .processTokens('author:@root label:~"Very Important" milestone:%v1.0 assignee:none');
+ .processTokens('author:@root label:~"Very Important" milestone:%v1.0 assignee:none', allowedKeys);
expect(results.searchToken).toBe('');
expect(results.tokens.length).toBe(4);
expect(results.tokens[3]).toBe(results.lastToken);
@@ -37,7 +39,7 @@ describe('Filtered Search Tokenizer', () => {
it('returns for input starting with search value and ending with tokens', () => {
const results = gl.FilteredSearchTokenizer
- .processTokens('searchTerm anotherSearchTerm milestone:none');
+ .processTokens('searchTerm anotherSearchTerm milestone:none', allowedKeys);
expect(results.searchToken).toBe('searchTerm anotherSearchTerm');
expect(results.tokens.length).toBe(1);
expect(results.tokens[0]).toBe(results.lastToken);
@@ -48,7 +50,7 @@ describe('Filtered Search Tokenizer', () => {
it('returns for input starting with tokens and ending with search value', () => {
const results = gl.FilteredSearchTokenizer
- .processTokens('assignee:@user searchTerm');
+ .processTokens('assignee:@user searchTerm', allowedKeys);
expect(results.searchToken).toBe('searchTerm');
expect(results.tokens.length).toBe(1);
@@ -60,7 +62,7 @@ describe('Filtered Search Tokenizer', () => {
it('returns for input containing search value wrapped between tokens', () => {
const results = gl.FilteredSearchTokenizer
- .processTokens('author:@root label:~"Won\'t fix" searchTerm anotherSearchTerm milestone:none');
+ .processTokens('author:@root label:~"Won\'t fix" searchTerm anotherSearchTerm milestone:none', allowedKeys);
expect(results.searchToken).toBe('searchTerm anotherSearchTerm');
expect(results.tokens.length).toBe(3);
@@ -81,7 +83,7 @@ describe('Filtered Search Tokenizer', () => {
it('returns for input containing search value in between tokens', () => {
const results = gl.FilteredSearchTokenizer
- .processTokens('author:@root searchTerm assignee:none anotherSearchTerm label:~Doing');
+ .processTokens('author:@root searchTerm assignee:none anotherSearchTerm label:~Doing', allowedKeys);
expect(results.searchToken).toBe('searchTerm anotherSearchTerm');
expect(results.tokens.length).toBe(3);
expect(results.tokens[2]).toBe(results.lastToken);
@@ -100,14 +102,14 @@ describe('Filtered Search Tokenizer', () => {
});
it('returns search value for invalid tokens', () => {
- const results = gl.FilteredSearchTokenizer.processTokens('fake:token');
+ const results = gl.FilteredSearchTokenizer.processTokens('fake:token', allowedKeys);
expect(results.lastToken).toBe('fake:token');
expect(results.searchToken).toBe('fake:token');
expect(results.tokens.length).toEqual(0);
});
it('returns search value and token for mix of valid and invalid tokens', () => {
- const results = gl.FilteredSearchTokenizer.processTokens('label:real fake:token');
+ const results = gl.FilteredSearchTokenizer.processTokens('label:real fake:token', allowedKeys);
expect(results.tokens.length).toEqual(1);
expect(results.tokens[0].key).toBe('label');
expect(results.tokens[0].value).toBe('real');
@@ -117,13 +119,13 @@ describe('Filtered Search Tokenizer', () => {
});
it('returns search value for invalid symbols', () => {
- const results = gl.FilteredSearchTokenizer.processTokens('std::includes');
+ const results = gl.FilteredSearchTokenizer.processTokens('std::includes', allowedKeys);
expect(results.lastToken).toBe('std::includes');
expect(results.searchToken).toBe('std::includes');
});
it('removes duplicated values', () => {
- const results = gl.FilteredSearchTokenizer.processTokens('label:~foo label:~foo');
+ const results = gl.FilteredSearchTokenizer.processTokens('label:~foo label:~foo', allowedKeys);
expect(results.tokens.length).toBe(1);
expect(results.tokens[0].key).toBe('label');
expect(results.tokens[0].value).toBe('foo');