summaryrefslogtreecommitdiff
path: root/ext/ripper/lib
diff options
context:
space:
mode:
authoraamine <aamine@b2dd03c8-39d4-4d8f-98ff-823fe69b080e>2004-09-20 08:42:00 +0000
committeraamine <aamine@b2dd03c8-39d4-4d8f-98ff-823fe69b080e>2004-09-20 08:42:00 +0000
commit2c750e368d28af3a14a4e2ab1f7cb20244a41838 (patch)
tree2ec5881e01d632d76147ceffee390c362349a5e6 /ext/ripper/lib
parentb2a700f13eb406145983ff2f2308ce0f6b1ebbe5 (diff)
* test/ripper/test_scanner_events.rb: tokens must be reordered.
* ext/ripper/lib/ripper/tokenizer.rb: ditto. git-svn-id: svn+ssh://ci.ruby-lang.org/ruby/trunk@6937 b2dd03c8-39d4-4d8f-98ff-823fe69b080e
Diffstat (limited to 'ext/ripper/lib')
-rw-r--r--ext/ripper/lib/ripper/tokenizer.rb61
1 files changed, 54 insertions, 7 deletions
diff --git a/ext/ripper/lib/ripper/tokenizer.rb b/ext/ripper/lib/ripper/tokenizer.rb
index 134de9d1e0..e69e5ba519 100644
--- a/ext/ripper/lib/ripper/tokenizer.rb
+++ b/ext/ripper/lib/ripper/tokenizer.rb
@@ -16,22 +16,69 @@ class Ripper
Tokenizer.tokenize(str)
end
+
class Tokenizer < ::Ripper
- def Tokenizer.tokenize(str)
- new(str).tokenize
+
+ def Tokenizer.tokenize(str, filename = '-', lineno = 1)
+ new(str, filename, lineno).tokenize
+ end
+
+ def initialize(src, filename = '-', lineno = 1)
+ @src = src
+ @__filename = filename
+ @__linestart = lineno
+ @__line = nil
+ @__col = nil
+ end
+
+ def filename
+ @__filename
+ end
+
+ def lineno
+ @__line
+ end
+
+ def column
+ @__col
end
def tokenize
- @tokens = []
- parse
- @tokens.sort_by {|tok, pos| pos }.map {|tok,| tok }
+ _exec_tokenizer().map {|pos, event, tok| tok }
+ end
+
+ def parse
+ _exec_tokenizer().each do |pos, event, tok|
+ @__line, @__col = *pos
+ on__scan(event, tok)
+ __send__(event, tok)
+ end
+ data
end
private
- def on__scan(type, tok)
- @tokens.push [tok, [lineno(),column()]]
+ def _exec_tokenizer
+ TokenSorter.new(@src, @__filename, @__linestart).parse
end
+
+ end
+
+
+ class TokenSorter < ::Ripper #:nodoc: internal use only
+
+ def parse
+ @data = []
+ super
+ @data.sort_by {|pos, event, tok| pos }
+ end
+
+ private
+
+ def on__scan(event, tok)
+ @data.push [[lineno(),column()], event, tok]
+ end
+
end
end