summaryrefslogtreecommitdiff
path: root/ext/ripper/lib/ripper/tokenizer.rb
blob: de10d4666463791496904f80b0272ee903e5467d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
#
# ripper/tokenizer.rb
#
# Copyright (C) 2004 Minero Aoki
#
# This program is free software.
# You can distribute and/or modify this program under the Ruby License.
# For details of Ruby License, see ruby/COPYING.
#

require 'ripper'

class Ripper

  def Ripper.tokenize(str)
    Tokenizer.tokenize(str)
  end


  class Tokenizer < ::Ripper

    def Tokenizer.tokenize(str, filename = '-', lineno = 1)
      new(str, filename, lineno).tokenize
    end

    def initialize(src, filename = '-', lineno = 1)
      @src = src
      @__filename = filename
      @__linestart = lineno
      @__line = nil
      @__col = nil
    end

    def filename
      @__filename
    end

    def lineno
      @__line
    end

    def column
      @__col
    end

    def tokenize
      _exec_tokenizer().map {|pos, event, tok| tok }
    end

    def parse
      _exec_tokenizer().each do |pos, event, tok|
        @__line, @__col = *pos
        on__scan(event, tok)
        __send__(event, tok)
      end
    end

    private

    def _exec_tokenizer
      TokenSorter.new(@src, @__filename, @__linestart).parse
    end

  end


  class TokenSorter < ::Ripper   #:nodoc: internal use only

    def parse
      @data = []
      super
      @data.sort_by {|pos, event, tok| pos }
    end

    private

    def on__scan(event, tok)
      @data.push [[lineno(),column()], event, tok]
    end

  end

end