Commit ec10595b65
Changed files (2)
lib
std
lib/std/zig/parser_test.zig
@@ -4153,13 +4153,13 @@ test "zig fmt: decimal float literals with underscore separators" {
try testTransform(
\\pub fn main() void {
\\ const a:f64=(10.0e-0+(10.e+0))+10_00.00_00e-2+00_00.00_10e+4;
- \\ const b:f64=010.0--0_10.+0_1_0.0_0+1e2;
+ \\ const b:f64=010.0--0_10.0+0_1_0.0_0+1e2;
\\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b });
\\}
,
\\pub fn main() void {
\\ const a: f64 = (10.0e-0 + (10.e+0)) + 10_00.00_00e-2 + 00_00.00_10e+4;
- \\ const b: f64 = 010.0 - -0_10. + 0_1_0.0_0 + 1e2;
+ \\ const b: f64 = 010.0 - -0_10.0 + 0_1_0.0_0 + 1e2;
\\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b });
\\}
\\
@@ -4170,13 +4170,13 @@ test "zig fmt: hexadeciaml float literals with underscore separators" {
try testTransform(
\\pub fn main() void {
\\ const a: f64 = (0x10.0p-0+(0x10.p+0))+0x10_00.00_00p-8+0x00_00.00_10p+16;
- \\ const b: f64 = 0x0010.0--0x00_10.+0x10.00+0x1p4;
+ \\ const b: f64 = 0x0010.0--0x00_10.0+0x10.00+0x1p4;
\\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b });
\\}
,
\\pub fn main() void {
\\ const a: f64 = (0x10.0p-0 + (0x10.p+0)) + 0x10_00.00_00p-8 + 0x00_00.00_10p+16;
- \\ const b: f64 = 0x0010.0 - -0x00_10. + 0x10.00 + 0x1p4;
+ \\ const b: f64 = 0x0010.0 - -0x00_10.0 + 0x10.00 + 0x1p4;
\\ std.debug.warn("a: {}, b: {} -> a+b: {}\n", .{ a, b, a + b });
\\}
\\
lib/std/zig/tokenizer.zig
@@ -1160,7 +1160,7 @@ pub const Tokenizer = struct {
},
'.' => {
state = .num_dot_dec;
- result.tag = .float_literal;
+ result.tag = .invalid;
},
'e', 'E' => {
state = .float_exponent_unsigned;
@@ -1189,7 +1189,7 @@ pub const Tokenizer = struct {
},
'.' => {
state = .num_dot_hex;
- result.tag = .float_literal;
+ result.tag = .invalid;
},
'p', 'P' => {
state = .float_exponent_unsigned;
@@ -1211,9 +1211,11 @@ pub const Tokenizer = struct {
break;
},
'e', 'E' => {
+ result.tag = .float_literal;
state = .float_exponent_unsigned;
},
'0'...'9' => {
+ result.tag = .float_literal;
state = .float_fraction_dec;
},
else => {
@@ -1231,6 +1233,7 @@ pub const Tokenizer = struct {
break;
},
'p', 'P' => {
+ result.tag = .float_literal;
state = .float_exponent_unsigned;
},
'0'...'9', 'a'...'f', 'A'...'F' => {
@@ -1852,7 +1855,6 @@ test "tokenizer - number literals decimal" {
try testTokenize("0_0_f_00", &.{ .invalid, .identifier });
try testTokenize("1_,", &.{ .invalid, .comma });
- try testTokenize("1.", &.{.float_literal});
try testTokenize("0.0", &.{.float_literal});
try testTokenize("1.0", &.{.float_literal});
try testTokenize("10.0", &.{.float_literal});
@@ -1864,8 +1866,8 @@ test "tokenizer - number literals decimal" {
try testTokenize("1.0e+100", &.{.float_literal});
try testTokenize("1.0e-100", &.{.float_literal});
try testTokenize("1_0_0_0.0_0_0_0_0_1e1_0_0_0", &.{.float_literal});
- try testTokenize("1.+", &.{ .float_literal, .plus });
+ try testTokenize("1.", &.{.invalid});
try testTokenize("1e", &.{.invalid});
try testTokenize("1.0e1f0", &.{ .invalid, .identifier });
try testTokenize("1.0p100", &.{ .invalid, .identifier });
@@ -1877,6 +1879,7 @@ test "tokenizer - number literals decimal" {
try testTokenize("1.a", &.{ .invalid, .identifier });
try testTokenize("1.z", &.{ .invalid, .identifier });
try testTokenize("1._0", &.{ .invalid, .identifier });
+ try testTokenize("1.+", &.{ .invalid, .plus });
try testTokenize("1._+", &.{ .invalid, .identifier, .plus });
try testTokenize("1._e", &.{ .invalid, .identifier });
try testTokenize("1.0e", &.{.invalid});
@@ -2004,16 +2007,18 @@ test "tokenizer - number literals hexadecimal" {
try testTokenize("0x0_1_", &.{.invalid});
try testTokenize("0x_,", &.{ .invalid, .identifier, .comma });
- try testTokenize("0x1.", &.{.float_literal});
try testTokenize("0x1.0", &.{.float_literal});
- try testTokenize("0xF.", &.{.float_literal});
try testTokenize("0xF.0", &.{.float_literal});
try testTokenize("0xF.F", &.{.float_literal});
try testTokenize("0xF.Fp0", &.{.float_literal});
try testTokenize("0xF.FP0", &.{.float_literal});
try testTokenize("0x1p0", &.{.float_literal});
try testTokenize("0xfp0", &.{.float_literal});
- try testTokenize("0x1.+0xF.", &.{ .float_literal, .plus, .float_literal });
+ try testTokenize("0x1.0+0xF.0", &.{ .float_literal, .plus, .float_literal });
+
+ try testTokenize("0x1.", &.{.invalid});
+ try testTokenize("0xF.", &.{.invalid});
+ try testTokenize("0x1.+0xF.", &.{ .invalid, .plus, .invalid });
try testTokenize("0x0123456.789ABCDEF", &.{.float_literal});
try testTokenize("0x0_123_456.789_ABC_DEF", &.{.float_literal});