summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGrzegorz Bizon <grzesiek.bizon@gmail.com>2018-03-01 12:16:58 +0100
committerGrzegorz Bizon <grzesiek.bizon@gmail.com>2018-03-01 12:16:58 +0100
commit26167c24f6f82e7586a03ecbfa9979a74825585d (patch)
treee9edd69ea6e35f3a316c3a19169518dc972a5285
parent886988c9e1ab6020da9636b935522fac55a19c1e (diff)
downloadgitlab-ce-26167c24f6f82e7586a03ecbfa9979a74825585d.tar.gz
Improve pipeline expressions lexer
-rw-r--r--lib/gitlab/ci/pipeline/expression/lexer.rb42
-rw-r--r--spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb4
2 files changed, 29 insertions, 17 deletions
diff --git a/lib/gitlab/ci/pipeline/expression/lexer.rb b/lib/gitlab/ci/pipeline/expression/lexer.rb
index 067ad5fd312..cabb91d0dd7 100644
--- a/lib/gitlab/ci/pipeline/expression/lexer.rb
+++ b/lib/gitlab/ci/pipeline/expression/lexer.rb
@@ -3,6 +3,8 @@ module Gitlab
module Pipeline
module Expression
class Lexer
+ include ::Gitlab::Utils::StrongMemoize
+
LEXEMES = [
Expression::Lexeme::Variable,
Expression::Lexeme::String,
@@ -10,34 +12,44 @@ module Gitlab
Expression::Lexeme::Equals
].freeze
- MAX_CYCLES = 5
SyntaxError = Class.new(Statement::StatementError)
+ MAX_TOKENS = 100
+
def initialize(statement)
@scanner = StringScanner.new(statement)
- @tokens = []
end
- def tokens
- return @tokens if @tokens.any?
+ def tokens(max: MAX_TOKENS)
+ strong_memoize(:tokens) { tokenize(max) }
+ end
+
+ def lexemes
+ tokens.map(&:to_lexeme)
+ end
+
+ private
- MAX_CYCLES.times do
- LEXEMES.each do |lexeme|
- @scanner.skip(/\s+/) # ignore whitespace
+ def tokenize(max_tokens)
+ tokens = []
- lexeme.scan(@scanner).tap do |token|
- @tokens.push(token) if token.present?
+ max_tokens.times do
+ @scanner.skip(/\s+/) # ignore whitespace
+
+ return tokens if @scanner.eos?
+
+ lexeme = LEXEMES.find do |type|
+ type.scan(@scanner).tap do |token|
+ tokens.push(token) if token.present?
end
+ end
- return @tokens if @scanner.eos?
+ unless lexeme.present?
+ raise Lexer::SyntaxError, 'Unknown lexeme found!'
end
end
- raise Lexer::SyntaxError unless @scanner.eos?
- end
-
- def lexemes
- tokens.map(&:to_lexeme)
+ raise Lexer::SyntaxError, 'Too many tokens!'
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb
index 0d70ca6c906..65f90b0caf3 100644
--- a/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb
@@ -45,10 +45,10 @@ describe Gitlab::Ci::Pipeline::Expression::Lexer do
expect(tokens.third.value).to eq '"text"'
end
- it 'limits statement to 5 tokens' do
+ it 'limits statement to specified amount of tokens' do
lexer = described_class.new("$V1 $V2 $V3 $V4 $V5 $V6")
- expect { lexer.tokens }
+ expect { lexer.tokens(max: 5) }
.to raise_error described_class::SyntaxError
end