summaryrefslogtreecommitdiff
path: root/lib/rdoc/parser/ruby_tools.rb
blob: 3f6190884e8d4fd34dc0e9dd66c6c1ff8546d1aa (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
##
# Collection of methods for writing parsers against RDoc::RubyLex and
# RDoc::RubyToken

module RDoc::Parser::RubyTools

  include RDoc::RubyToken

  ##
  # Adds a token listener +obj+, but you should probably use token_listener

  def add_token_listener(obj)
    @token_listeners ||= []
    @token_listeners << obj
  end

  ##
  # Fetches the next token from the scanner

  def get_tk
    tk = nil

    if @tokens.empty? then
      tk = @scanner.token
      @read.push @scanner.get_readed
      puts "get_tk1 => #{tk.inspect}" if $TOKEN_DEBUG
    else
      @read.push @unget_read.shift
      tk = @tokens.shift
      puts "get_tk2 => #{tk.inspect}" if $TOKEN_DEBUG
    end

    tk = nil if TkEND_OF_SCRIPT === tk

    if TkSYMBEG === tk then
      set_token_position tk.line_no, tk.char_no

      case tk1 = get_tk
      when TkId, TkOp, TkSTRING, TkDSTRING, TkSTAR, TkAMPER then
        if tk1.respond_to?(:name) then
          tk = Token(TkSYMBOL).set_text(":" + tk1.name)
        else
          tk = Token(TkSYMBOL).set_text(":" + tk1.text)
        end

        # remove the identifier we just read (we're about to replace it with a
        # symbol)
        @token_listeners.each do |obj|
          obj.pop_token
        end if @token_listeners
      else
        tk = tk1
      end
    end

    # inform any listeners of our shiny new token
    @token_listeners.each do |obj|
      obj.add_token(tk)
    end if @token_listeners

    tk
  end

  ##
  # Reads and returns all tokens up to one of +tokens+.  Leaves the matched
  # token in the token list.

  def get_tk_until(*tokens)
    read = []

    loop do
      tk = get_tk
      case tk when *tokens then unget_tk tk; break end
      read << tk
    end

    read
  end

  ##
  # Retrieves a String representation of the read tokens

  def get_tkread
    read = @read.join("")
    @read = []
    read
  end

  ##
  # Peek equivalent for get_tkread

  def peek_read
    @read.join('')
  end

  ##
  # Peek at the next token, but don't remove it from the stream

  def peek_tk
    unget_tk(tk = get_tk)
    tk
  end

  ##
  # Removes the token listener +obj+

  def remove_token_listener(obj)
    @token_listeners.delete(obj)
  end

  ##
  # Resets the tools

  def reset
    @read       = []
    @tokens     = []
    @unget_read = []
    @nest = 0
  end

  ##
  # Skips whitespace tokens including newlines if +skip_nl+ is true

  def skip_tkspace(skip_nl = true) # HACK dup
    tokens = []

    while TkSPACE === (tk = get_tk) or (skip_nl and TkNL === tk) do
      tokens.push tk
    end

    unget_tk tk
    tokens
  end

  ##
  # Has +obj+ listen to tokens

  def token_listener(obj)
    add_token_listener obj
    yield
  ensure
    remove_token_listener obj
  end

  ##
  # Returns +tk+ to the scanner

  def unget_tk(tk)
    @tokens.unshift tk
    @unget_read.unshift @read.pop

    # Remove this token from any listeners
    @token_listeners.each do |obj|
      obj.pop_token
    end if @token_listeners
  end

end