summaryrefslogtreecommitdiff
path: root/tool/lrama/lib/lrama/lexer.rb
blob: 5c9583327bc31c69e3cdbb0fcb9690b06905e17a (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
require "strscan"
require "lrama/report"

module Lrama
  # Lexer for parse.y
  class Lexer
    include Lrama::Report::Duration

    # s_value is semantic value
    Token = Struct.new(:type, :s_value, keyword_init: true) do
      Type = Struct.new(:id, :name, keyword_init: true)

      attr_accessor :line, :column, :referred
      # For User_code
      attr_accessor :references

      def to_s
        "#{super} line: #{line}, column: #{column}"
      end

      @i = 0
      @types = []

      def self.define_type(name)
        type = Type.new(id: @i, name: name.to_s)
        const_set(name, type)
        @types << type
        @i += 1
      end

      # Token types
      define_type(:P_expect)         # %expect
      define_type(:P_define)         # %define
      define_type(:P_printer)        # %printer
      define_type(:P_lex_param)      # %lex-param
      define_type(:P_parse_param)    # %parse-param
      define_type(:P_initial_action) # %initial-action
      define_type(:P_union)          # %union
      define_type(:P_token)          # %token
      define_type(:P_type)           # %type
      define_type(:P_nonassoc)       # %nonassoc
      define_type(:P_left)           # %left
      define_type(:P_right)          # %right
      define_type(:P_prec)           # %prec
      define_type(:User_code)        # { ... }
      define_type(:Tag)              # <int>
      define_type(:Number)           # 0
      define_type(:Ident_Colon)      # k_if:, k_if  : (spaces can be there)
      define_type(:Ident)            # api.pure, tNUMBER
      define_type(:Semicolon)        # ;
      define_type(:Bar)              # |
      define_type(:String)           # "str"
      define_type(:Char)             # '+'
    end

    # States
    #
    # See: https://www.gnu.org/software/bison/manual/html_node/Grammar-Outline.html
    Initial = 0
    Prologue = 1
    BisonDeclarations = 2
    GrammarRules = 3
    Epilogue = 4

    # Token types

    attr_reader :prologue, :bison_declarations, :grammar_rules, :epilogue,
                :bison_declarations_tokens, :grammar_rules_tokens

    def initialize(text)
      @text = text
      @state = Initial
      # Array of texts
      @prologue = []
      @bison_declarations = []
      @grammar_rules = []
      @epilogue = []

      #
      @bison_declarations_tokens = []
      @grammar_rules_tokens = []

      @debug = false

      report_duration(:lex) do
        lex_text
        lex_bison_declarations_tokens
        lex_grammar_rules_tokens
      end
    end

    private

    def create_token(type, s_value, line, column)
      t = Token.new(type: type, s_value: s_value)
      t.line = line
      t.column = column

      return t
    end

    # TODO: Remove this
    def lex_text
      @text.each_line.with_index(1) do |string, lineno|
        case @state
        when Initial
          # Skip until "%{"
          if string == "%{\n"
            @state = Prologue
            @prologue << ["", lineno]
            next
          end
        when Prologue
          # Between "%{" and "%}"
          if string == "%}\n"
            @state = BisonDeclarations
            @prologue << ["", lineno]
            next
          end

          @prologue << [string, lineno]
        when BisonDeclarations
          if string == "%%\n"
            @state = GrammarRules
            next
          end

          @bison_declarations << [string, lineno]
        when GrammarRules
          # Between "%%" and "%%"
          if string == "%%\n"
            @state = Epilogue
            next
          end

          @grammar_rules << [string, lineno]
        when Epilogue
          @epilogue << [string, lineno]
        else
          raise "Unknown state: #{@state}"
        end
      end
    end

    # See:
    #   * https://www.gnu.org/software/bison/manual/html_node/Decl-Summary.html
    #   * https://www.gnu.org/software/bison/manual/html_node/Symbol-Decls.html
    #   * https://www.gnu.org/software/bison/manual/html_node/Empty-Rules.html
    def lex_common(lines, tokens)
      line = lines.first[1]
      column = 0
      ss = StringScanner.new(lines.map(&:first).join)

      while !ss.eos? do
        case
        when ss.scan(/\n/)
          line += 1
          column = ss.pos
        when ss.scan(/\s+/)
          # skip
        when ss.scan(/;/)
          tokens << create_token(Token::Semicolon, ss[0], line, ss.pos - column)
        when ss.scan(/\|/)
          tokens << create_token(Token::Bar, ss[0], line, ss.pos - column)
        when ss.scan(/(\d+)/)
          tokens << create_token(Token::Number, Integer(ss[0]), line, ss.pos - column)
        when ss.scan(/(<[a-zA-Z0-9_]+>)/)
          tokens << create_token(Token::Tag, ss[0], line, ss.pos - column)
        when ss.scan(/([a-zA-Z_.][-a-zA-Z0-9_.]*)\s*:/)
          tokens << create_token(Token::Ident_Colon, ss[1], line, ss.pos - column)
        when ss.scan(/([a-zA-Z_.][-a-zA-Z0-9_.]*)/)
          tokens << create_token(Token::Ident, ss[0], line, ss.pos - column)
        when ss.scan(/%expect/)
          tokens << create_token(Token::P_expect, ss[0], line, ss.pos - column)
        when ss.scan(/%define/)
          tokens << create_token(Token::P_define, ss[0], line, ss.pos - column)
        when ss.scan(/%printer/)
          tokens << create_token(Token::P_printer, ss[0], line, ss.pos - column)
        when ss.scan(/%lex-param/)
          tokens << create_token(Token::P_lex_param, ss[0], line, ss.pos - column)
        when ss.scan(/%parse-param/)
          tokens << create_token(Token::P_parse_param, ss[0], line, ss.pos - column)
        when ss.scan(/%initial-action/)
          tokens << create_token(Token::P_initial_action, ss[0], line, ss.pos - column)
        when ss.scan(/%union/)
          tokens << create_token(Token::P_union, ss[0], line, ss.pos - column)
        when ss.scan(/%token/)
          tokens << create_token(Token::P_token, ss[0], line, ss.pos - column)
        when ss.scan(/%type/)
          tokens << create_token(Token::P_type, ss[0], line, ss.pos - column)
        when ss.scan(/%nonassoc/)
          tokens << create_token(Token::P_nonassoc, ss[0], line, ss.pos - column)
        when ss.scan(/%left/)
          tokens << create_token(Token::P_left, ss[0], line, ss.pos - column)
        when ss.scan(/%right/)
          tokens << create_token(Token::P_right, ss[0], line, ss.pos - column)
        when ss.scan(/%prec/)
          tokens << create_token(Token::P_prec, ss[0], line, ss.pos - column)
        when ss.scan(/{/)
          token, line = lex_user_code(ss, line, ss.pos - column, lines)
          tokens << token
        when ss.scan(/"/)
          string, line = lex_string(ss, "\"", line, lines)
          token = create_token(Token::String, string, line, ss.pos - column)
          tokens << token
        when ss.scan(/\/\*/)
          # TODO: Need to keep comment?
          line = lex_comment(ss, line, lines, "")
        when ss.scan(/'(.)'/)
          tokens << create_token(Token::Char, ss[0], line, ss.pos - column)
        when ss.scan(/'\\(.)'/) # '\\', '\t'
          tokens << create_token(Token::Char, ss[0], line, ss.pos - column)
        when ss.scan(/'\\(\d+)'/) # '\13'
          tokens << create_token(Token::Char, ss[0], line, ss.pos - column)
        when ss.scan(/%empty/)
          # skip
        else
          l = line - lines.first[1]
          split = ss.string.split("\n")
          col = ss.pos - split[0...l].join("\n").length
          raise "Parse error (unknow token): #{split[l]} \"#{ss.string[ss.pos]}\" (#{line}: #{col})"
        end
      end
    end

    def lex_bison_declarations_tokens
      lex_common(@bison_declarations, @bison_declarations_tokens)
    end

    def lex_user_code(ss, line, column, lines)
      first_line = line
      first_column = column
      debug("Enter lex_user_code: #{line}")
      brace_count = 1
      str = "{"
      # Array of [type, $n, tag, first column, last column]
      # TODO: Is it better to keep string, like "$$", and use gsub?
      references = []

      while !ss.eos? do
        case
        when ss.scan(/\n/)
          line += 1
        when ss.scan(/"/)
          string, line = lex_string(ss, "\"", line, lines)
          str << string
          next
        when ss.scan(/'/)
          string, line = lex_string(ss, "'", line, lines)
          str << string
          next
        when ss.scan(/\$(<[a-zA-Z0-9_]+>)?\$/) # $$, $<long>$
          tag = ss[1] ? create_token(Token::Tag, ss[1], line, str.length) : nil
          references << [:dollar, "$", tag, str.length, str.length + ss[0].length - 1]
        when ss.scan(/\$(<[a-zA-Z0-9_]+>)?(\d+)/) # $1, $2, $<long>1
          tag = ss[1] ? create_token(Token::Tag, ss[1], line, str.length) : nil
          references << [:dollar, Integer(ss[2]), tag, str.length, str.length + ss[0].length - 1]
        when ss.scan(/@\$/) # @$
          references << [:at, "$", nil, str.length, str.length + ss[0].length - 1]
        when ss.scan(/@(\d)+/) # @1
          references << [:at, Integer(ss[1]), nil, str.length, str.length + ss[0].length - 1]
        when ss.scan(/{/)
          brace_count += 1
        when ss.scan(/}/)
          brace_count -= 1

          debug("Return lex_user_code: #{line}")
          if brace_count == 0
            str << ss[0]
            user_code = Token.new(type: Token::User_code, s_value: str.freeze)
            user_code.line = first_line
            user_code.column = first_column
            user_code.references = references
            return [user_code, line]
          end
        when ss.scan(/\/\*/)
          str << ss[0]
          line = lex_comment(ss, line, lines, str)
        else
          # noop, just consume char
          str << ss.getch
          next
        end

        str << ss[0]
      end

      # Reach to end of input but brace does not match
      l = line - lines.first[1]
      raise "Parse error (brace mismatch): #{ss.string.split("\n")[l]} \"#{ss.string[ss.pos]}\" (#{line}: #{ss.pos})"
    end

    def lex_string(ss, terminator, line, lines)
      debug("Enter lex_string: #{line}")

      str = terminator.dup

      while (c = ss.getch) do
        str << c

        case c
        when "\n"
          line += 1
        when terminator
          debug("Return lex_string: #{line}")
          return [str, line]
        else
          # noop
        end
      end

      # Reach to end of input but quote does not match
      l = line - lines.first[1]
      raise "Parse error (quote mismatch): #{ss.string.split("\n")[l]} \"#{ss.string[ss.pos]}\" (#{line}: #{ss.pos})"
    end

    # TODO: Need to handle // style comment
    #
    # /*  */ style comment
    def lex_comment(ss, line, lines, str)
      while !ss.eos? do
        case
        when ss.scan(/\n/)
          line += 1
        when ss.scan(/\*\//)
          return line
        else
          str << ss.getch
          next
        end

        str << ss[0]
      end

      # Reach to end of input but quote does not match
      l = line - lines.first[1]
      raise "Parse error (comment mismatch): #{ss.string.split("\n")[l]} \"#{ss.string[ss.pos]}\" (#{line}: #{ss.pos})"
    end

    def lex_grammar_rules_tokens
      lex_common(@grammar_rules, @grammar_rules_tokens)
    end

    def debug(msg)
      return unless @debug
      puts "#{msg}\n"
    end
  end
end