Commit c1e8fdf812
Changed files (8)
doc
lib
test
stage1
behavior
bugs
doc/docgen.zig
@@ -917,6 +917,7 @@ fn tokenizeAndPrintRaw(docgen_tokenizer: *Tokenizer, out: var, source_token: Tok
.LBracket,
.RBracket,
.Period,
+ .PeriodAsterisk,
.Ellipsis2,
.Ellipsis3,
.Caret,
lib/std/zig/parse.zig
@@ -2363,6 +2363,10 @@ fn parseSuffixOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
};
}
+ if (eatToken(it, .PeriodAsterisk)) |period_asterisk| {
+ break :blk OpAndToken{ .op = Op{ .Deref = {} }, .token = period_asterisk };
+ }
+
if (eatToken(it, .Period)) |period| {
if (try parseIdentifier(arena, it, tree)) |identifier| {
// TODO: It's a bit weird to return an InfixOp from the SuffixOp parser.
@@ -2378,9 +2382,6 @@ fn parseSuffixOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
};
return &node.base;
}
- if (eatToken(it, .Asterisk)) |asterisk| {
- break :blk OpAndToken{ .op = Op{ .Deref = {} }, .token = asterisk };
- }
if (eatToken(it, .QuestionMark)) |question_mark| {
break :blk OpAndToken{ .op = Op{ .UnwrapOptional = {} }, .token = question_mark };
}
lib/std/zig/render.zig
@@ -617,10 +617,15 @@ fn renderExpression(
return renderToken(tree, stream, rbracket, indent, start_col, space); // ]
},
- ast.Node.SuffixOp.Op.Deref, ast.Node.SuffixOp.Op.UnwrapOptional => {
+ ast.Node.SuffixOp.Op.Deref => {
+ try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.lhs, Space.None);
+ return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space); // .*
+ },
+
+ ast.Node.SuffixOp.Op.UnwrapOptional => {
try renderExpression(allocator, stream, tree, indent, start_col, suffix_op.lhs, Space.None);
try renderToken(tree, stream, tree.prevToken(suffix_op.rtoken), indent, start_col, Space.None); // .
- return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space); // * or ?
+ return renderToken(tree, stream, suffix_op.rtoken, indent, start_col, space); // ?
},
@TagType(ast.Node.SuffixOp.Op).Slice => |range| {
lib/std/zig/tokenizer.zig
@@ -103,6 +103,7 @@ pub const Token = struct {
LBracket,
RBracket,
Period,
+ PeriodAsterisk,
Ellipsis2,
Ellipsis3,
Caret,
@@ -231,6 +232,7 @@ pub const Token = struct {
.LBracket => "[",
.RBracket => "]",
.Period => ".",
+ .PeriodAsterisk => ".*",
.Ellipsis2 => "..",
.Ellipsis3 => "...",
.Caret => "^",
@@ -1033,6 +1035,11 @@ pub const Tokenizer = struct {
'.' => {
state = State.Period2;
},
+ '*' => {
+ result.id = Token.Id.PeriodAsterisk;
+ self.index += 1;
+ break;
+ },
else => {
result.id = Token.Id.Period;
break;
src/parser.cpp
@@ -2693,10 +2693,12 @@ static AstNode *ast_parse_suffix_op(ParseContext *pc) {
return res;
}
+ Token *dot_asterisk = eat_token_if(pc, TokenIdDotStar);
+ if (dot_asterisk != nullptr)
+ return ast_create_node(pc, NodeTypePtrDeref, dot_asterisk);
+
Token *dot = eat_token_if(pc, TokenIdDot);
if (dot != nullptr) {
- if (eat_token_if(pc, TokenIdStar) != nullptr)
- return ast_create_node(pc, NodeTypePtrDeref, dot);
if (eat_token_if(pc, TokenIdQuestion) != nullptr)
return ast_create_node(pc, NodeTypeUnwrapOptional, dot);
src/tokenizer.cpp
@@ -583,6 +583,11 @@ void tokenize(Buf *buf, Tokenization *out) {
t.state = TokenizeStateSawDotDot;
set_token_id(&t, t.cur_tok, TokenIdEllipsis2);
break;
+ case '*':
+ t.state = TokenizeStateStart;
+ set_token_id(&t, t.cur_tok, TokenIdDotStar);
+ end_token(&t);
+ break;
default:
t.pos -= 1;
end_token(&t);
@@ -1603,6 +1608,7 @@ const char * token_name(TokenId id) {
case TokenIdDivEq: return "/=";
case TokenIdDocComment: return "DocComment";
case TokenIdDot: return ".";
+ case TokenIdDotStar: return ".*";
case TokenIdEllipsis2: return "..";
case TokenIdEllipsis3: return "...";
case TokenIdEof: return "EOF";
src/tokenizer.hpp
@@ -44,6 +44,7 @@ enum TokenId {
TokenIdDivEq,
TokenIdDocComment,
TokenIdDot,
+ TokenIdDotStar,
TokenIdEllipsis2,
TokenIdEllipsis3,
TokenIdEof,
test/stage1/behavior/bugs/3468.zig
@@ -0,0 +1,6 @@
+// zig fmt: off
+test "pointer deref next to assignment" {
+ var a:i32=2;
+ var b=&a;
+ b.*=3;
+}