summaryrefslogtreecommitdiff
path: root/lib/rubygems/request_set/lockfile/tokenizer.rb
diff options
context:
space:
mode:
Diffstat (limited to 'lib/rubygems/request_set/lockfile/tokenizer.rb')
-rw-r--r--lib/rubygems/request_set/lockfile/tokenizer.rb38
1 files changed, 24 insertions, 14 deletions
diff --git a/lib/rubygems/request_set/lockfile/tokenizer.rb b/lib/rubygems/request_set/lockfile/tokenizer.rb
index 6918e8e1a5..65cef3baa0 100644
--- a/lib/rubygems/request_set/lockfile/tokenizer.rb
+++ b/lib/rubygems/request_set/lockfile/tokenizer.rb
@@ -1,5 +1,7 @@
# frozen_string_literal: true
-require 'rubygems/request_set/lockfile/parser'
+
+# ) frozen_string_literal: true
+require_relative "parser"
class Gem::RequestSet::Lockfile::Tokenizer
Token = Struct.new :type, :value, :column, :line
@@ -26,7 +28,7 @@ class Gem::RequestSet::Lockfile::Tokenizer
end
def skip(type)
- @tokens.shift while not @tokens.empty? and peek.type == type
+ @tokens.shift while !@tokens.empty? && peek.type == type
end
##
@@ -48,7 +50,7 @@ class Gem::RequestSet::Lockfile::Tokenizer
def next_token
@tokens.shift
end
- alias :shift :next_token
+ alias_method :shift, :next_token
def peek
@tokens.first || EOF
@@ -57,7 +59,7 @@ class Gem::RequestSet::Lockfile::Tokenizer
private
def tokenize(input)
- require 'strscan'
+ require "strscan"
s = StringScanner.new input
until s.eos? do
@@ -73,13 +75,14 @@ class Gem::RequestSet::Lockfile::Tokenizer
end
@tokens <<
- case
- when s.scan(/\r?\n/) then
+ if s.scan(/\r?\n/)
+
token = Token.new(:newline, nil, *token_pos(pos))
@line_pos = s.pos
@line += 1
token
- when s.scan(/[A-Z]+/) then
+ elsif s.scan(/[A-Z]+/)
+
if leading_whitespace
text = s.matched
text += s.scan(/[^\s)]*/).to_s # in case of no match
@@ -87,20 +90,27 @@ class Gem::RequestSet::Lockfile::Tokenizer
else
Token.new(:section, s.matched, *token_pos(pos))
end
- when s.scan(/([a-z]+):\s/) then
+ elsif s.scan(/([a-z]+):\s/)
+
s.pos -= 1 # rewind for possible newline
Token.new(:entry, s[1], *token_pos(pos))
- when s.scan(/\(/) then
+ elsif s.scan(/\(/)
+
Token.new(:l_paren, nil, *token_pos(pos))
- when s.scan(/\)/) then
+ elsif s.scan(/\)/)
+
Token.new(:r_paren, nil, *token_pos(pos))
- when s.scan(/<=|>=|=|~>|<|>|!=/) then
+ elsif s.scan(/<=|>=|=|~>|<|>|!=/)
+
Token.new(:requirement, s.matched, *token_pos(pos))
- when s.scan(/,/) then
+ elsif s.scan(/,/)
+
Token.new(:comma, nil, *token_pos(pos))
- when s.scan(/!/) then
+ elsif s.scan(/!/)
+
Token.new(:bang, nil, *token_pos(pos))
- when s.scan(/[^\s),!]*/) then
+ elsif s.scan(/[^\s),!]*/)
+
Token.new(:text, s.matched, *token_pos(pos))
else
raise "BUG: can't create token for: #{s.string[s.pos..-1].inspect}"