summaryrefslogtreecommitdiff
path: root/app/assets/javascripts/filtered_search/filtered_search_tokenizer.js
diff options
context:
space:
mode:
authorSimon Knox <psimyn@gmail.com>2018-10-29 10:09:53 +1100
committerSimon Knox <psimyn@gmail.com>2018-10-29 10:09:53 +1100
commit8783eb33a01549403eb90769f6e45ef33caa374d (patch)
treec5b612a13cb42e496d5ebd8ee55ab32bb5be8053 /app/assets/javascripts/filtered_search/filtered_search_tokenizer.js
parent67f28ccdd85fa6c80e52dd5799c10893e6d9e0a8 (diff)
parent04184b32ec77c073757218f02f2796a24e88b79b (diff)
downloadgitlab-ce-8783eb33a01549403eb90769f6e45ef33caa374d.tar.gz
Merge branch 'master' of gitlab.com:gitlab-org/gitlab-ce into backport-ee-7203-sticky-logs-topbarbackport-ee-7203-sticky-logs-topbar
Diffstat (limited to 'app/assets/javascripts/filtered_search/filtered_search_tokenizer.js')
-rw-r--r--app/assets/javascripts/filtered_search/filtered_search_tokenizer.js63
1 files changed, 35 insertions, 28 deletions
diff --git a/app/assets/javascripts/filtered_search/filtered_search_tokenizer.js b/app/assets/javascripts/filtered_search/filtered_search_tokenizer.js
index d75610f6d68..b5c4cb15aac 100644
--- a/app/assets/javascripts/filtered_search/filtered_search_tokenizer.js
+++ b/app/assets/javascripts/filtered_search/filtered_search_tokenizer.js
@@ -4,41 +4,48 @@ export default class FilteredSearchTokenizer {
static processTokens(input, allowedKeys) {
// Regex extracts `(token):(symbol)(value)`
// Values that start with a double quote must end in a double quote (same for single)
- const tokenRegex = new RegExp(`(${allowedKeys.join('|')}):([~%@]?)(?:('[^']*'{0,1})|("[^"]*"{0,1})|(\\S+))`, 'g');
+ const tokenRegex = new RegExp(
+ `(${allowedKeys.join('|')}):([~%@]?)(?:('[^']*'{0,1})|("[^"]*"{0,1})|(\\S+))`,
+ 'g',
+ );
const tokens = [];
const tokenIndexes = []; // stores key+value for simple search
let lastToken = null;
- const searchToken = input.replace(tokenRegex, (match, key, symbol, v1, v2, v3) => {
- let tokenValue = v1 || v2 || v3;
- let tokenSymbol = symbol;
- let tokenIndex = '';
-
- if (tokenValue === '~' || tokenValue === '%' || tokenValue === '@') {
- tokenSymbol = tokenValue;
- tokenValue = '';
- }
-
- tokenIndex = `${key}:${tokenValue}`;
-
- // Prevent adding duplicates
- if (tokenIndexes.indexOf(tokenIndex) === -1) {
- tokenIndexes.push(tokenIndex);
-
- tokens.push({
- key,
- value: tokenValue || '',
- symbol: tokenSymbol || '',
- });
- }
-
- return '';
- }).replace(/\s{2,}/g, ' ').trim() || '';
+ const searchToken =
+ input
+ .replace(tokenRegex, (match, key, symbol, v1, v2, v3) => {
+ let tokenValue = v1 || v2 || v3;
+ let tokenSymbol = symbol;
+ let tokenIndex = '';
+
+ if (tokenValue === '~' || tokenValue === '%' || tokenValue === '@') {
+ tokenSymbol = tokenValue;
+ tokenValue = '';
+ }
+
+ tokenIndex = `${key}:${tokenValue}`;
+
+ // Prevent adding duplicates
+ if (tokenIndexes.indexOf(tokenIndex) === -1) {
+ tokenIndexes.push(tokenIndex);
+
+ tokens.push({
+ key,
+ value: tokenValue || '',
+ symbol: tokenSymbol || '',
+ });
+ }
+
+ return '';
+ })
+ .replace(/\s{2,}/g, ' ')
+ .trim() || '';
if (tokens.length > 0) {
const last = tokens[tokens.length - 1];
const lastString = `${last.key}:${last.symbol}${last.value}`;
- lastToken = input.lastIndexOf(lastString) ===
- input.length - lastString.length ? last : searchToken;
+ lastToken =
+ input.lastIndexOf(lastString) === input.length - lastString.length ? last : searchToken;
} else {
lastToken = searchToken;
}