summaryrefslogtreecommitdiff
path: root/test/psych/test_scalar_scanner.rb
blob: e489b20a0f03ce9bcdf4760ae1cd1cd3b6b489d4 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
# frozen_string_literal: true
require_relative 'helper'
require 'date'

module Psych
  class TestScalarScanner < TestCase
    attr_reader :ss

    def setup
      super
      @ss = Psych::ScalarScanner.new ClassLoader.new
    end

    def test_scan_time
      { '2001-12-15T02:59:43.1Z' => Time.utc(2001, 12, 15, 02, 59, 43, 100000),
        '2001-12-14t21:59:43.10-05:00' => Time.utc(2001, 12, 15, 02, 59, 43, 100000),
        '2001-12-14 21:59:43.10 -5' => Time.utc(2001, 12, 15, 02, 59, 43, 100000),
        '2001-12-15 2:59:43.10' => Time.utc(2001, 12, 15, 02, 59, 43, 100000),
        '2011-02-24 11:17:06 -0800' => Time.utc(2011, 02, 24, 19, 17, 06)
      }.each do |time_str, time|
        assert_equal time, @ss.tokenize(time_str)
      end
    end

    def test_scan_bad_time
      [ '2001-12-15T02:59:73.1Z',
        '2001-12-14t90:59:43.10-05:00',
        '2001-92-14 21:59:43.10 -5',
        '2001-12-15 92:59:43.10',
        '2011-02-24 81:17:06 -0800',
      ].each do |time_str|
        assert_equal time_str, @ss.tokenize(time_str)
      end
    end

    def test_scan_bad_dates
      x = '2000-15-01'
      assert_equal x, @ss.tokenize(x)

      x = '2000-10-51'
      assert_equal x, @ss.tokenize(x)

      x = '2000-10-32'
      assert_equal x, @ss.tokenize(x)
    end

    def test_scan_good_edge_date
      x = '2000-1-31'
      assert_equal Date.strptime(x, '%Y-%m-%d'), @ss.tokenize(x)
    end

    def test_scan_bad_edge_date
      x = '2000-11-31'
      assert_equal x, @ss.tokenize(x)
    end

    def test_scan_date
      date = '1980-12-16'
      token = @ss.tokenize date
      assert_equal 1980, token.year
      assert_equal 12, token.month
      assert_equal 16, token.day
    end

    def test_scan_inf
      assert_equal(1 / 0.0, ss.tokenize('.inf'))
    end

    def test_scan_plus_inf
      assert_equal(1 / 0.0, ss.tokenize('+.inf'))
    end

    def test_scan_minus_inf
      assert_equal(-1 / 0.0, ss.tokenize('-.inf'))
    end

    def test_scan_nan
      assert ss.tokenize('.nan').nan?
    end

    def test_scan_float_with_exponent_but_no_fraction
      assert_equal(0.0, ss.tokenize('0.E+0'))
    end

    def test_scan_null
      assert_nil ss.tokenize('null')
      assert_nil ss.tokenize('~')
      assert_nil ss.tokenize('')
    end

    def test_scan_symbol
      assert_equal :foo, ss.tokenize(':foo')
    end

    def test_scan_not_sexagesimal
      assert_equal '00:00:00:00:0f', ss.tokenize('00:00:00:00:0f')
      assert_equal '00:00:00:00:00', ss.tokenize('00:00:00:00:00')
      assert_equal '00:00:00:00:00.0', ss.tokenize('00:00:00:00:00.0')
    end

    def test_scan_sexagesimal_float
      assert_equal 685230.15, ss.tokenize('190:20:30.15')
    end

    def test_scan_sexagesimal_int
      assert_equal 685230, ss.tokenize('190:20:30')
    end

    def test_scan_float
      assert_equal 1.2, ss.tokenize('1.2')
    end

    def test_scan_true
      assert_equal true, ss.tokenize('true')
    end

    def test_scan_strings_starting_with_underscores
      assert_equal "_100", ss.tokenize('_100')
    end

    def test_scan_strings_ending_with_underscores
      assert_equal "100_", ss.tokenize('100_')
    end

    def test_scan_int_commas_and_underscores
      # NB: This test is to ensure backward compatibility with prior Psych versions,
      # not to test against any actual YAML specification.
      assert_equal 123_456_789, ss.tokenize('123_456_789')
      assert_equal 123_456_789, ss.tokenize('123,456,789')
      assert_equal 123_456_789, ss.tokenize('1_2,3,4_5,6_789')

      assert_equal 1, ss.tokenize('1')
      assert_equal 1 ss.tokenize('+1')
      assert_equal -1 ss.tokenize('-1')

      assert_equal 0b010101010, ss.tokenize('0b010101010')
      assert_equal 0b010101010, ss.tokenize('0b0,1_0,1_,0,1_01,0')

      assert_equal 01234567, ss.tokenize('01234567')
      assert_equal 01234567, ss.tokenize('0_,,,1_2,_34567')

      assert_equal 0x123456789abcdef, ss.tokenize('0x123456789abcdef')
      assert_equal 0x123456789abcdef, ss.tokenize('0x12_,34,_56,_789abcdef')
      assert_equal 0x123456789abcdef, ss.tokenize('0x_12_,34,_56,_789abcdef')
      assert_equal 0x123456789abcdef, ss.tokenize('0x12_,34,_56,_789abcdef__')
    end

    def test_scan_dot
      assert_equal '.', ss.tokenize('.')
    end

    def test_scan_plus_dot
      assert_equal '+.', ss.tokenize('+.')
    end
  end
end