зеркало из https://github.com/github/ruby.git
[ruby/psych] fix parsing integer values with '_' at the end
https://github.com/ruby/psych/commit/e0bb853014
This commit is contained in:
Родитель
31ba0921f8
Коммит
48b50cb4fe
|
@ -15,7 +15,7 @@ module Psych
|
||||||
# Taken from http://yaml.org/type/int.html
|
# Taken from http://yaml.org/type/int.html
|
||||||
INTEGER = /^(?:[-+]?0b[0-1_,]+ (?# base 2)
|
INTEGER = /^(?:[-+]?0b[0-1_,]+ (?# base 2)
|
||||||
|[-+]?0[0-7_,]+ (?# base 8)
|
|[-+]?0[0-7_,]+ (?# base 8)
|
||||||
|[-+]?(?:0|[1-9][0-9_,]*) (?# base 10)
|
|[-+]?(?:\d|[1-9][0-9_,]*[^_]) (?# base 10)
|
||||||
|[-+]?0x[0-9a-fA-F_,]+ (?# base 16))$/x
|
|[-+]?0x[0-9a-fA-F_,]+ (?# base 16))$/x
|
||||||
|
|
||||||
attr_reader :class_loader
|
attr_reader :class_loader
|
||||||
|
|
|
@ -118,13 +118,20 @@ module Psych
|
||||||
assert_equal "_100", ss.tokenize('_100')
|
assert_equal "_100", ss.tokenize('_100')
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def test_scan_strings_ending_with_underscores
|
||||||
|
assert_equal "100_", ss.tokenize('100_')
|
||||||
|
end
|
||||||
|
|
||||||
def test_scan_int_commas_and_underscores
|
def test_scan_int_commas_and_underscores
|
||||||
# NB: This test is to ensure backward compatibility with prior Psych versions,
|
# NB: This test is to ensure backward compatibility with prior Psych versions,
|
||||||
# not to test against any actual YAML specification.
|
# not to test against any actual YAML specification.
|
||||||
assert_equal 123_456_789, ss.tokenize('123_456_789')
|
assert_equal 123_456_789, ss.tokenize('123_456_789')
|
||||||
assert_equal 123_456_789, ss.tokenize('123,456,789')
|
assert_equal 123_456_789, ss.tokenize('123,456,789')
|
||||||
assert_equal 123_456_789, ss.tokenize('1_2,3,4_5,6_789')
|
assert_equal 123_456_789, ss.tokenize('1_2,3,4_5,6_789')
|
||||||
assert_equal 123_456_789, ss.tokenize('1_2,3,4_5,6_789_')
|
|
||||||
|
assert_equal 1, ss.tokenize('1')
|
||||||
|
assert_equal 1 ss.tokenize('+1')
|
||||||
|
assert_equal -1 ss.tokenize('-1')
|
||||||
|
|
||||||
assert_equal 0b010101010, ss.tokenize('0b010101010')
|
assert_equal 0b010101010, ss.tokenize('0b010101010')
|
||||||
assert_equal 0b010101010, ss.tokenize('0b0,1_0,1_,0,1_01,0')
|
assert_equal 0b010101010, ss.tokenize('0b0,1_0,1_,0,1_01,0')
|
||||||
|
|
Загрузка…
Ссылка в новой задаче