From cee3be29ec4a2b31c7e67cd66a36a5c95e8f3b05 Mon Sep 17 00:00:00 2001 From: Grzegorz Bizon Date: Wed, 21 Feb 2018 12:51:56 +0100 Subject: Add a lexeme for a string pipeline expression --- lib/gitlab/ci/pipeline/expression/lexer.rb | 30 ++++++++++++---------- lib/gitlab/ci/pipeline/expression/string.rb | 2 +- lib/gitlab/ci/pipeline/expression/token.rb | 2 ++ .../gitlab/ci/pipeline/expression/lexer_spec.rb | 16 ++++++++++++ 4 files changed, 35 insertions(+), 15 deletions(-) diff --git a/lib/gitlab/ci/pipeline/expression/lexer.rb b/lib/gitlab/ci/pipeline/expression/lexer.rb index 26895174881..8432b36b066 100644 --- a/lib/gitlab/ci/pipeline/expression/lexer.rb +++ b/lib/gitlab/ci/pipeline/expression/lexer.rb @@ -2,32 +2,34 @@ module Gitlab module Ci module Pipeline module Expression - LEXEMES = [ - Expression::Variable - ] + class Lexer + LEXEMES = [ + Expression::Variable, + Expression::String + ] - MAX_CYCLES = 5 + MAX_CYCLES = 5 + SyntaxError = Class.new(StandardError) - class Lexer def initialize(statement) @scanner = StringScanner.new(statement) @tokens = [] end def tokenize - @tokens.tap do - MAX_CYCLES.times do - LEXEMES.each do |lexeme| - @scanner.scan(/\s+/) # ignore whitespace - - lexeme.scan(@scanner).tap do |token| - @tokens.push(token) if token.present? - end + MAX_CYCLES.times do + LEXEMES.each do |lexeme| + @scanner.scan(/\s+/) # ignore whitespace - return @tokens if @scanner.eos? + lexeme.scan(@scanner).tap do |token| + @tokens.push(token) if token.present? end + + return @tokens if @scanner.eos? end end + + raise Lexer::SyntaxError unless @scanner.eos? end end end diff --git a/lib/gitlab/ci/pipeline/expression/string.rb b/lib/gitlab/ci/pipeline/expression/string.rb index a603ef6cf4c..e6cc50c56d8 100644 --- a/lib/gitlab/ci/pipeline/expression/string.rb +++ b/lib/gitlab/ci/pipeline/expression/string.rb @@ -3,7 +3,7 @@ module Gitlab module Pipeline module Expression class String < Expression::Lexeme - PATTERN = /("|')(?.+)('|")/.freeze + PATTERN = /"(?.+?)"/.freeze def initialize(value) @value = value diff --git a/lib/gitlab/ci/pipeline/expression/token.rb b/lib/gitlab/ci/pipeline/expression/token.rb index c800d1f0c08..03a47a17d40 100644 --- a/lib/gitlab/ci/pipeline/expression/token.rb +++ b/lib/gitlab/ci/pipeline/expression/token.rb @@ -3,6 +3,8 @@ module Gitlab module Pipeline module Expression class Token + attr_reader :value, :type + def initialize(value, type) @value = value @type = type diff --git a/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb b/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb index 84d54c5acf2..d0a616d5c2f 100644 --- a/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/expression/lexer_spec.rb @@ -26,5 +26,21 @@ describe Gitlab::Ci::Pipeline::Expression::Lexer do expect(tokens.size).to eq 2 expect(tokens).to all(be_an_instance_of(token_class)) end + + it 'tokenizes multiple values with different tokens' do + tokens = described_class.new('$VARIABLE "text" "value"').tokenize + + expect(tokens.size).to eq 3 + expect(tokens.first.value).to eq '$VARIABLE' + expect(tokens.second.value).to eq '"text"' + expect(tokens.third.value).to eq '"value"' + end + + it 'limits statement to 5 tokens' do + lexer = described_class.new("$V1 $V2 $V3 $V4 $V5 $V6") + + expect { lexer.tokenize } + .to raise_error described_class::SyntaxError + end end end -- cgit v1.2.1