Commit 88f5315ddf
Changed files (16)
lib
lib/std/zig/Ast.zig
@@ -241,6 +241,11 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void {
token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
});
},
+ .expected_expr_or_var_decl => {
+ return stream.print("expected expression or var decl, found '{s}'", .{
+ token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ });
+ },
.expected_fn => {
return stream.print("expected function, found '{s}'", .{
token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
@@ -584,6 +589,13 @@ pub fn firstToken(tree: Ast, node: Node.Index) TokenIndex {
.error_union,
=> n = datas[n].lhs,
+ .assign_destructure => {
+ const extra_idx = datas[n].lhs;
+ const lhs_len = tree.extra_data[extra_idx];
+ assert(lhs_len > 0);
+ n = tree.extra_data[extra_idx + 1];
+ },
+
.fn_decl,
.fn_proto_simple,
.fn_proto_multi,
@@ -816,6 +828,7 @@ pub fn lastToken(tree: Ast, node: Node.Index) TokenIndex {
.assign_add_sat,
.assign_sub_sat,
.assign,
+ .assign_destructure,
.merge_error_sets,
.mul,
.div,
@@ -2846,6 +2859,7 @@ pub const Error = struct {
expected_container_members,
expected_expr,
expected_expr_or_assignment,
+ expected_expr_or_var_decl,
expected_fn,
expected_inlinable,
expected_labelable,
@@ -3006,6 +3020,20 @@ pub const Node = struct {
assign_sub_sat,
/// `lhs = rhs`. main_token is op.
assign,
+ /// `a, b, ... = rhs`. main_token is op. lhs is index into `extra_data`
+ /// of an lhs elem count followed by an array of that many `Node.Index`,
+ /// with each node having one of the following types:
+ /// * `global_var_decl`
+ /// * `local_var_decl`
+ /// * `simple_var_decl`
+ /// * `aligned_var_decl`
+ /// * Any expression node
+ /// The first 3 types correspond to a `var` or `const` lhs node (note
+ /// that their `rhs` is always 0). An expression node corresponds to a
+ /// standard assignment LHS (which must be evaluated as an lvalue).
+ /// There may be a preceding `comptime` token, which does not create a
+ /// corresponding `comptime` node so must be manually detected.
+ assign_destructure,
/// `lhs || rhs`. main_token is the `||`.
merge_error_sets,
/// `lhs * rhs`. main_token is the `*`.
lib/std/zig/Parse.zig
@@ -658,9 +658,8 @@ fn expectTopLevelDecl(p: *Parse) !Node.Index {
}
const thread_local_token = p.eatToken(.keyword_threadlocal);
- const var_decl = try p.parseVarDecl();
+ const var_decl = try p.parseGlobalVarDecl();
if (var_decl != 0) {
- try p.expectSemicolon(.expected_semi_after_decl, false);
return var_decl;
}
if (thread_local_token != null) {
@@ -792,8 +791,9 @@ fn parseFnProto(p: *Parse) !Node.Index {
}
}
-/// VarDecl <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? AddrSpace? LinkSection? (EQUAL Expr)? SEMICOLON
-fn parseVarDecl(p: *Parse) !Node.Index {
+/// VarDeclProto <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? AddrSpace? LinkSection?
+/// Returns a `*_var_decl` node with its rhs (init expression) initialized to 0.
+fn parseVarDeclProto(p: *Parse) !Node.Index {
const mut_token = p.eatToken(.keyword_const) orelse
p.eatToken(.keyword_var) orelse
return null_node;
@@ -803,18 +803,7 @@ fn parseVarDecl(p: *Parse) !Node.Index {
const align_node = try p.parseByteAlign();
const addrspace_node = try p.parseAddrSpace();
const section_node = try p.parseLinkSection();
- const init_node: Node.Index = switch (p.token_tags[p.tok_i]) {
- .equal_equal => blk: {
- try p.warn(.wrong_equal_var_decl);
- p.tok_i += 1;
- break :blk try p.expectExpr();
- },
- .equal => blk: {
- p.tok_i += 1;
- break :blk try p.expectExpr();
- },
- else => 0,
- };
+
if (section_node == 0 and addrspace_node == 0) {
if (align_node == 0) {
return p.addNode(.{
@@ -822,31 +811,33 @@ fn parseVarDecl(p: *Parse) !Node.Index {
.main_token = mut_token,
.data = .{
.lhs = type_node,
- .rhs = init_node,
+ .rhs = 0,
},
});
- } else if (type_node == 0) {
+ }
+
+ if (type_node == 0) {
return p.addNode(.{
.tag = .aligned_var_decl,
.main_token = mut_token,
.data = .{
.lhs = align_node,
- .rhs = init_node,
- },
- });
- } else {
- return p.addNode(.{
- .tag = .local_var_decl,
- .main_token = mut_token,
- .data = .{
- .lhs = try p.addExtra(Node.LocalVarDecl{
- .type_node = type_node,
- .align_node = align_node,
- }),
- .rhs = init_node,
+ .rhs = 0,
},
});
}
+
+ return p.addNode(.{
+ .tag = .local_var_decl,
+ .main_token = mut_token,
+ .data = .{
+ .lhs = try p.addExtra(Node.LocalVarDecl{
+ .type_node = type_node,
+ .align_node = align_node,
+ }),
+ .rhs = 0,
+ },
+ });
} else {
return p.addNode(.{
.tag = .global_var_decl,
@@ -858,12 +849,38 @@ fn parseVarDecl(p: *Parse) !Node.Index {
.addrspace_node = addrspace_node,
.section_node = section_node,
}),
- .rhs = init_node,
+ .rhs = 0,
},
});
}
}
+/// GlobalVarDecl <- VarDeclProto (EQUAL Expr?) SEMICOLON
+fn parseGlobalVarDecl(p: *Parse) !Node.Index {
+ const var_decl = try p.parseVarDeclProto();
+ if (var_decl == 0) {
+ return null_node;
+ }
+
+ const init_node: Node.Index = switch (p.token_tags[p.tok_i]) {
+ .equal_equal => blk: {
+ try p.warn(.wrong_equal_var_decl);
+ p.tok_i += 1;
+ break :blk try p.expectExpr();
+ },
+ .equal => blk: {
+ p.tok_i += 1;
+ break :blk try p.expectExpr();
+ },
+ else => 0,
+ };
+
+ p.nodes.items(.data)[var_decl].rhs = init_node;
+
+ try p.expectSemicolon(.expected_semi_after_decl, false);
+ return var_decl;
+}
+
/// ContainerField
/// <- doc_comment? KEYWORD_comptime? IDENTIFIER (COLON TypeExpr)? ByteAlign? (EQUAL Expr)?
/// / doc_comment? KEYWORD_comptime? (IDENTIFIER COLON)? !KEYWORD_fn TypeExpr ByteAlign? (EQUAL Expr)?
@@ -918,8 +935,7 @@ fn expectContainerField(p: *Parse) !Node.Index {
}
/// Statement
-/// <- KEYWORD_comptime? VarDecl
-/// / KEYWORD_comptime BlockExprStatement
+/// <- KEYWORD_comptime ComptimeStatement
/// / KEYWORD_nosuspend BlockExprStatement
/// / KEYWORD_suspend BlockExprStatement
/// / KEYWORD_defer BlockExprStatement
@@ -927,27 +943,28 @@ fn expectContainerField(p: *Parse) !Node.Index {
/// / IfStatement
/// / LabeledStatement
/// / SwitchExpr
-/// / AssignExpr SEMICOLON
-fn parseStatement(p: *Parse, allow_defer_var: bool) Error!Node.Index {
- const comptime_token = p.eatToken(.keyword_comptime);
-
- if (allow_defer_var) {
- const var_decl = try p.parseVarDecl();
- if (var_decl != 0) {
- try p.expectSemicolon(.expected_semi_after_decl, true);
- return var_decl;
+/// / VarDeclExprStatement
+fn expectStatement(p: *Parse, allow_defer_var: bool) Error!Node.Index {
+ if (p.eatToken(.keyword_comptime)) |comptime_token| {
+ const block_expr = try p.parseBlockExpr();
+ if (block_expr != 0) {
+ return p.addNode(.{
+ .tag = .@"comptime",
+ .main_token = comptime_token,
+ .data = .{
+ .lhs = block_expr,
+ .rhs = undefined,
+ },
+ });
}
- }
- if (comptime_token) |token| {
- return p.addNode(.{
- .tag = .@"comptime",
- .main_token = token,
- .data = .{
- .lhs = try p.expectBlockExprStatement(),
- .rhs = undefined,
- },
- });
+ if (allow_defer_var) {
+ return p.expectVarDeclExprStatement(comptime_token);
+ } else {
+ const assign = try p.expectAssignExpr();
+ try p.expectSemicolon(.expected_semi_after_stmt, true);
+ return assign;
+ }
}
switch (p.token_tags[p.tok_i]) {
@@ -1011,21 +1028,145 @@ fn parseStatement(p: *Parse, allow_defer_var: bool) Error!Node.Index {
const labeled_statement = try p.parseLabeledStatement();
if (labeled_statement != 0) return labeled_statement;
- const assign_expr = try p.parseAssignExpr();
- if (assign_expr != 0) {
+ if (allow_defer_var) {
+ return p.expectVarDeclExprStatement(null);
+ } else {
+ const assign = try p.expectAssignExpr();
try p.expectSemicolon(.expected_semi_after_stmt, true);
- return assign_expr;
+ return assign;
}
+}
- return null_node;
+/// ComptimeStatement
+/// <- BlockExpr
+/// / VarDeclExprStatement
+fn expectComptimeStatement(p: *Parse, comptime_token: TokenIndex) !Node.Index {
+ const block_expr = try p.parseBlockExpr();
+ if (block_expr != 0) {
+ return p.addNode(.{
+ .tag = .@"comptime",
+ .main_token = comptime_token,
+ .data = .{ .lhs = block_expr, .rhs = undefined },
+ });
+ }
+ return p.expectVarDeclExprStatement(comptime_token);
}
-fn expectStatement(p: *Parse, allow_defer_var: bool) !Node.Index {
- const statement = try p.parseStatement(allow_defer_var);
- if (statement == 0) {
- return p.fail(.expected_statement);
+/// VarDeclExprStatement
+/// <- VarDeclProto (COMMA (VarDeclProto / Expr))* EQUAL Expr SEMICOLON
+/// / Expr (AssignOp Expr / (COMMA (VarDeclProto / Expr))+ EQUAL Expr)? SEMICOLON
+fn expectVarDeclExprStatement(p: *Parse, comptime_token: ?TokenIndex) !Node.Index {
+ const scratch_top = p.scratch.items.len;
+ defer p.scratch.shrinkRetainingCapacity(scratch_top);
+
+ while (true) {
+ const var_decl_proto = try p.parseVarDeclProto();
+ if (var_decl_proto != 0) {
+ try p.scratch.append(p.gpa, var_decl_proto);
+ } else {
+ const expr = try p.parseExpr();
+ if (expr == 0) {
+ if (p.scratch.items.len == scratch_top) {
+ // We parsed nothing
+ return p.fail(.expected_statement);
+ } else {
+ // We've had at least one LHS, but had a bad comma
+ return p.fail(.expected_expr_or_var_decl);
+ }
+ }
+ try p.scratch.append(p.gpa, expr);
+ }
+ _ = p.eatToken(.comma) orelse break;
+ }
+
+ const lhs_count = p.scratch.items.len - scratch_top;
+ assert(lhs_count > 0);
+
+ const equal_token = p.eatToken(.equal) orelse eql: {
+ if (lhs_count > 1) {
+ // Definitely a destructure, so allow recovering from ==
+ if (p.eatToken(.equal_equal)) |tok| {
+ try p.warnMsg(.{ .tag = .wrong_equal_var_decl, .token = tok });
+ break :eql tok;
+ }
+ return p.failExpected(.equal);
+ }
+ const lhs = p.scratch.items[scratch_top];
+ switch (p.nodes.items(.tag)[lhs]) {
+ .global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => {
+ // Definitely a var decl, so allow recovering from ==
+ if (p.eatToken(.equal_equal)) |tok| {
+ try p.warnMsg(.{ .tag = .wrong_equal_var_decl, .token = tok });
+ break :eql tok;
+ }
+ return p.failExpected(.equal);
+ },
+ else => {},
+ }
+
+ const expr = try p.finishAssignExpr(lhs);
+ try p.expectSemicolon(.expected_semi_after_stmt, true);
+ if (comptime_token) |t| {
+ return p.addNode(.{
+ .tag = .@"comptime",
+ .main_token = t,
+ .data = .{
+ .lhs = expr,
+ .rhs = undefined,
+ },
+ });
+ } else {
+ return expr;
+ }
+ };
+
+ const rhs = try p.expectExpr();
+ try p.expectSemicolon(.expected_semi_after_stmt, true);
+
+ if (lhs_count == 1) {
+ const lhs = p.scratch.items[scratch_top];
+ switch (p.nodes.items(.tag)[lhs]) {
+ .global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => {
+ p.nodes.items(.data)[lhs].rhs = rhs;
+ // Don't need to wrap in comptime
+ return lhs;
+ },
+ else => {},
+ }
+ const expr = try p.addNode(.{
+ .tag = .assign,
+ .main_token = equal_token,
+ .data = .{ .lhs = lhs, .rhs = rhs },
+ });
+ if (comptime_token) |t| {
+ return p.addNode(.{
+ .tag = .@"comptime",
+ .main_token = t,
+ .data = .{
+ .lhs = expr,
+ .rhs = undefined,
+ },
+ });
+ } else {
+ return expr;
+ }
}
- return statement;
+
+ // An actual destructure! No need for any `comptime` wrapper here.
+
+ const extra_start = p.extra_data.items.len;
+ try p.extra_data.ensureUnusedCapacity(p.gpa, lhs_count + 1);
+ p.extra_data.appendAssumeCapacity(@intCast(lhs_count));
+ p.extra_data.appendSliceAssumeCapacity(p.scratch.items[scratch_top..]);
+
+ return p.addNode(.{
+ .tag = .assign_destructure,
+ .main_token = equal_token,
+ .data = .{
+ .lhs = @intCast(extra_start),
+ .rhs = rhs,
+ },
+ });
}
/// If a parse error occurs, reports an error, but then finds the next statement
@@ -1345,7 +1486,7 @@ fn parseBlockExpr(p: *Parse) Error!Node.Index {
}
}
-/// AssignExpr <- Expr (AssignOp Expr)?
+/// AssignExpr <- Expr (AssignOp Expr / (COMMA Expr)+ EQUAL Expr)?
///
/// AssignOp
/// <- ASTERISKEQUAL
@@ -1369,8 +1510,40 @@ fn parseBlockExpr(p: *Parse) Error!Node.Index {
fn parseAssignExpr(p: *Parse) !Node.Index {
const expr = try p.parseExpr();
if (expr == 0) return null_node;
+ return p.finishAssignExpr(expr);
+}
- const tag: Node.Tag = switch (p.token_tags[p.tok_i]) {
+/// SingleAssignExpr <- Expr (AssignOp Expr)?
+fn parseSingleAssignExpr(p: *Parse) !Node.Index {
+ const lhs = try p.parseExpr();
+ if (lhs == 0) return null_node;
+ const tag = assignOpNode(p.token_tags[p.tok_i]) orelse return lhs;
+ return p.addNode(.{
+ .tag = tag,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = lhs,
+ .rhs = try p.expectExpr(),
+ },
+ });
+}
+
+fn finishAssignExpr(p: *Parse, lhs: Node.Index) !Node.Index {
+ const tok = p.token_tags[p.tok_i];
+ if (tok == .comma) return p.finishAssignDestructureExpr(lhs);
+ const tag = assignOpNode(tok) orelse return lhs;
+ return p.addNode(.{
+ .tag = tag,
+ .main_token = p.nextToken(),
+ .data = .{
+ .lhs = lhs,
+ .rhs = try p.expectExpr(),
+ },
+ });
+}
+
+fn assignOpNode(tok: Token.Tag) ?Node.Tag {
+ return switch (tok) {
.asterisk_equal => .assign_mul,
.slash_equal => .assign_div,
.percent_equal => .assign_mod,
@@ -1389,18 +1562,51 @@ fn parseAssignExpr(p: *Parse) !Node.Index {
.plus_pipe_equal => .assign_add_sat,
.minus_pipe_equal => .assign_sub_sat,
.equal => .assign,
- else => return expr,
+ else => null,
};
+}
+
+fn finishAssignDestructureExpr(p: *Parse, first_lhs: Node.Index) !Node.Index {
+ const scratch_top = p.scratch.items.len;
+ defer p.scratch.shrinkRetainingCapacity(scratch_top);
+
+ try p.scratch.append(p.gpa, first_lhs);
+
+ while (p.eatToken(.comma)) |_| {
+ const expr = try p.expectExpr();
+ try p.scratch.append(p.gpa, expr);
+ }
+
+ const equal_token = try p.expectToken(.equal);
+
+ const rhs = try p.expectExpr();
+
+ const lhs_count = p.scratch.items.len - scratch_top;
+ assert(lhs_count > 1); // we already had first_lhs, and must have at least one more lvalue
+
+ const extra_start = p.extra_data.items.len;
+ try p.extra_data.ensureUnusedCapacity(p.gpa, lhs_count + 1);
+ p.extra_data.appendAssumeCapacity(@intCast(lhs_count));
+ p.extra_data.appendSliceAssumeCapacity(p.scratch.items[scratch_top..]);
+
return p.addNode(.{
- .tag = tag,
- .main_token = p.nextToken(),
+ .tag = .assign_destructure,
+ .main_token = equal_token,
.data = .{
- .lhs = expr,
- .rhs = try p.expectExpr(),
+ .lhs = @intCast(extra_start),
+ .rhs = rhs,
},
});
}
+fn expectSingleAssignExpr(p: *Parse) !Node.Index {
+ const expr = try p.parseSingleAssignExpr();
+ if (expr == 0) {
+ return p.fail(.expected_expr_or_assignment);
+ }
+ return expr;
+}
+
fn expectAssignExpr(p: *Parse) !Node.Index {
const expr = try p.parseAssignExpr();
if (expr == 0) {
@@ -3260,7 +3466,7 @@ fn parseSwitchProng(p: *Parse) !Node.Index {
.main_token = arrow_token,
.data = .{
.lhs = 0,
- .rhs = try p.expectAssignExpr(),
+ .rhs = try p.expectSingleAssignExpr(),
},
}),
1 => return p.addNode(.{
@@ -3268,7 +3474,7 @@ fn parseSwitchProng(p: *Parse) !Node.Index {
.main_token = arrow_token,
.data = .{
.lhs = items[0],
- .rhs = try p.expectAssignExpr(),
+ .rhs = try p.expectSingleAssignExpr(),
},
}),
else => return p.addNode(.{
@@ -3276,7 +3482,7 @@ fn parseSwitchProng(p: *Parse) !Node.Index {
.main_token = arrow_token,
.data = .{
.lhs = try p.addExtra(try p.listToSpan(items)),
- .rhs = try p.expectAssignExpr(),
+ .rhs = try p.expectSingleAssignExpr(),
},
}),
}
lib/std/zig/parser_test.zig
@@ -4348,12 +4348,12 @@ test "zig fmt: invalid else branch statement" {
\\ for ("") |_| {} else defer {}
\\}
, &[_]Error{
- .expected_statement,
- .expected_statement,
- .expected_statement,
- .expected_statement,
- .expected_statement,
- .expected_statement,
+ .expected_expr_or_assignment,
+ .expected_expr_or_assignment,
+ .expected_expr_or_assignment,
+ .expected_expr_or_assignment,
+ .expected_expr_or_assignment,
+ .expected_expr_or_assignment,
});
}
@@ -6078,7 +6078,7 @@ test "recovery: missing for payload" {
try testError(
\\comptime {
\\ const a = for(a) {};
- \\ const a: for(a) blk: {};
+ \\ const a: for(a) blk: {} = {};
\\ for(a) {}
\\}
, &[_]Error{
lib/std/zig/render.zig
@@ -164,7 +164,7 @@ fn renderMember(
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
- => return renderVarDecl(gpa, ais, tree, tree.fullVarDecl(decl).?),
+ => return renderVarDecl(gpa, ais, tree, tree.fullVarDecl(decl).?, false, .semicolon),
.test_decl => {
const test_token = main_tokens[decl];
@@ -427,6 +427,42 @@ fn renderExpression(gpa: Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index,
return renderExpression(gpa, ais, tree, infix.rhs, space);
},
+ .assign_destructure => {
+ const lhs_count = tree.extra_data[datas[node].lhs];
+ assert(lhs_count > 1);
+ const lhs_exprs = tree.extra_data[datas[node].lhs + 1 ..][0..lhs_count];
+ const rhs = datas[node].rhs;
+
+ const maybe_comptime_token = tree.firstToken(node) - 1;
+ if (token_tags[maybe_comptime_token] == .keyword_comptime) {
+ try renderToken(ais, tree, maybe_comptime_token, .space);
+ }
+
+ for (lhs_exprs, 0..) |lhs_node, i| {
+ const lhs_space: Space = if (i == lhs_exprs.len - 1) .space else .comma_space;
+ switch (node_tags[lhs_node]) {
+ .global_var_decl,
+ .local_var_decl,
+ .simple_var_decl,
+ .aligned_var_decl,
+ => {
+ try renderVarDecl(gpa, ais, tree, tree.fullVarDecl(lhs_node).?, true, lhs_space);
+ },
+ else => try renderExpression(gpa, ais, tree, lhs_node, lhs_space),
+ }
+ }
+ const equal_token = main_tokens[node];
+ if (tree.tokensOnSameLine(equal_token, equal_token + 1)) {
+ try renderToken(ais, tree, equal_token, .space);
+ } else {
+ ais.pushIndent();
+ try renderToken(ais, tree, equal_token, .newline);
+ ais.popIndent();
+ }
+ ais.pushIndentOneShot();
+ return renderExpression(gpa, ais, tree, rhs, space);
+ },
+
.bit_not,
.bool_not,
.negation,
@@ -943,7 +979,16 @@ fn renderAsmInput(
return renderToken(ais, tree, datas[asm_input].rhs, space); // rparen
}
-fn renderVarDecl(gpa: Allocator, ais: *Ais, tree: Ast, var_decl: Ast.full.VarDecl) Error!void {
+fn renderVarDecl(
+ gpa: Allocator,
+ ais: *Ais,
+ tree: Ast,
+ var_decl: Ast.full.VarDecl,
+ /// Destructures intentionally ignore leading `comptime` tokens.
+ ignore_comptime_token: bool,
+ /// `comma_space` and `space` are used for destructure LHS decls.
+ space: Space,
+) Error!void {
if (var_decl.visib_token) |visib_token| {
try renderToken(ais, tree, visib_token, Space.space); // pub
}
@@ -960,21 +1005,31 @@ fn renderVarDecl(gpa: Allocator, ais: *Ais, tree: Ast, var_decl: Ast.full.VarDec
try renderToken(ais, tree, thread_local_token, Space.space); // threadlocal
}
- if (var_decl.comptime_token) |comptime_token| {
- try renderToken(ais, tree, comptime_token, Space.space); // comptime
+ if (!ignore_comptime_token) {
+ if (var_decl.comptime_token) |comptime_token| {
+ try renderToken(ais, tree, comptime_token, Space.space); // comptime
+ }
}
try renderToken(ais, tree, var_decl.ast.mut_token, .space); // var
- const name_space = if (var_decl.ast.type_node == 0 and
- (var_decl.ast.align_node != 0 or
- var_decl.ast.addrspace_node != 0 or
- var_decl.ast.section_node != 0 or
- var_decl.ast.init_node != 0))
- Space.space
- else
- Space.none;
- try renderIdentifier(ais, tree, var_decl.ast.mut_token + 1, name_space, .preserve_when_shadowing); // name
+ if (var_decl.ast.type_node != 0 or var_decl.ast.align_node != 0 or
+ var_decl.ast.addrspace_node != 0 or var_decl.ast.section_node != 0 or
+ var_decl.ast.init_node != 0)
+ {
+ const name_space = if (var_decl.ast.type_node == 0 and
+ (var_decl.ast.align_node != 0 or
+ var_decl.ast.addrspace_node != 0 or
+ var_decl.ast.section_node != 0 or
+ var_decl.ast.init_node != 0))
+ Space.space
+ else
+ Space.none;
+
+ try renderIdentifier(ais, tree, var_decl.ast.mut_token + 1, name_space, .preserve_when_shadowing); // name
+ } else {
+ return renderIdentifier(ais, tree, var_decl.ast.mut_token + 1, space, .preserve_when_shadowing); // name
+ }
if (var_decl.ast.type_node != 0) {
try renderToken(ais, tree, var_decl.ast.mut_token + 2, Space.space); // :
@@ -983,9 +1038,7 @@ fn renderVarDecl(gpa: Allocator, ais: *Ais, tree: Ast, var_decl: Ast.full.VarDec
{
try renderExpression(gpa, ais, tree, var_decl.ast.type_node, .space);
} else {
- try renderExpression(gpa, ais, tree, var_decl.ast.type_node, .none);
- const semicolon = tree.lastToken(var_decl.ast.type_node) + 1;
- return renderToken(ais, tree, semicolon, Space.newline); // ;
+ return renderExpression(gpa, ais, tree, var_decl.ast.type_node, space);
}
}
@@ -1001,8 +1054,7 @@ fn renderVarDecl(gpa: Allocator, ais: *Ais, tree: Ast, var_decl: Ast.full.VarDec
{
try renderToken(ais, tree, rparen, .space); // )
} else {
- try renderToken(ais, tree, rparen, .none); // )
- return renderToken(ais, tree, rparen + 1, Space.newline); // ;
+ return renderToken(ais, tree, rparen, space); // )
}
}
@@ -1031,23 +1083,21 @@ fn renderVarDecl(gpa: Allocator, ais: *Ais, tree: Ast, var_decl: Ast.full.VarDec
if (var_decl.ast.init_node != 0) {
try renderToken(ais, tree, rparen, .space); // )
} else {
- try renderToken(ais, tree, rparen, .none); // )
- return renderToken(ais, tree, rparen + 1, Space.newline); // ;
+ return renderToken(ais, tree, rparen, space); // )
}
}
- if (var_decl.ast.init_node != 0) {
- const eq_token = tree.firstToken(var_decl.ast.init_node) - 1;
- const eq_space: Space = if (tree.tokensOnSameLine(eq_token, eq_token + 1)) .space else .newline;
- {
- ais.pushIndent();
- try renderToken(ais, tree, eq_token, eq_space); // =
- ais.popIndent();
- }
- ais.pushIndentOneShot();
- return renderExpression(gpa, ais, tree, var_decl.ast.init_node, .semicolon); // ;
+ assert(var_decl.ast.init_node != 0);
+
+ const eq_token = tree.firstToken(var_decl.ast.init_node) - 1;
+ const eq_space: Space = if (tree.tokensOnSameLine(eq_token, eq_token + 1)) .space else .newline;
+ {
+ ais.pushIndent();
+ try renderToken(ais, tree, eq_token, eq_space); // =
+ ais.popIndent();
}
- return renderToken(ais, tree, var_decl.ast.mut_token + 2, .newline); // ;
+ ais.pushIndentOneShot();
+ return renderExpression(gpa, ais, tree, var_decl.ast.init_node, space); // ;
}
fn renderIf(gpa: Allocator, ais: *Ais, tree: Ast, if_node: Ast.full.If, space: Space) Error!void {
@@ -1825,7 +1875,7 @@ fn renderBlock(
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
- => try renderVarDecl(gpa, ais, tree, tree.fullVarDecl(stmt).?),
+ => try renderVarDecl(gpa, ais, tree, tree.fullVarDecl(stmt).?, false, .semicolon),
else => try renderExpression(gpa, ais, tree, stmt, .semicolon),
}
}
src/AstGen.zig
@@ -280,6 +280,20 @@ const ResultInfo = struct {
/// The result instruction from the expression must be ignored.
/// Always an instruction with tag `alloc_inferred`.
inferred_ptr: Zir.Inst.Ref,
+ /// The expression has a sequence of pointers to store its results into due to a destructure
+ /// operation. Each of these pointers may or may not have an inferred type.
+ destructure: struct {
+ /// The AST node of the destructure operation itself.
+ src_node: Ast.Node.Index,
+ /// The pointers to store results into.
+ components: []const DestructureComponent,
+ },
+
+ const DestructureComponent = union(enum) {
+ typed_ptr: PtrResultLoc,
+ inferred_ptr: Zir.Inst.Ref,
+ discard,
+ };
const PtrResultLoc = struct {
inst: Zir.Inst.Ref,
@@ -298,6 +312,12 @@ const ResultInfo = struct {
const ptr_ty = try gz.addUnNode(.typeof, ptr.inst, node);
return gz.addUnNode(.elem_type, ptr_ty, node);
},
+ .destructure => |destructure| {
+ return astgen.failNodeNotes(node, "{s} must have a known result type", .{builtin_name}, &.{
+ try astgen.errNoteNode(destructure.src_node, "destructure expressions do not provide a single result type", .{}),
+ try astgen.errNoteNode(node, "use @as to provide explicit result type", .{}),
+ });
+ },
}
return astgen.failNodeNotes(node, "{s} must have a known result type", .{builtin_name}, &.{
@@ -399,6 +419,7 @@ fn lvalExpr(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Ins
.asm_input => unreachable,
.assign,
+ .assign_destructure,
.assign_bit_and,
.assign_bit_or,
.assign_shl,
@@ -621,6 +642,13 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
return rvalue(gz, ri, .void_value, node);
},
+ .assign_destructure => {
+ // Note that this variant does not declare any new var/const: that
+ // variant is handled by `blockExprStmts`.
+ try assignDestructure(gz, scope, node);
+ return rvalue(gz, ri, .void_value, node);
+ },
+
.assign_shl => {
try assignShift(gz, scope, node, .shl);
return rvalue(gz, ri, .void_value, node);
@@ -1478,6 +1506,33 @@ fn arrayInitExpr(
return arrayInitExprRlPtr(gz, scope, node, ptr_inst, array_init.ast.elements, types.array);
}
},
+ .destructure => |destructure| {
+ if (types.array != .none) {
+ // We have a specific type, so there may be things like default
+ // field values messing with us. Do this as a standard typed
+ // init followed by an rvalue destructure.
+ const result = try arrayInitExprInner(gz, scope, node, array_init.ast.elements, types.array, types.elem, .array_init);
+ return rvalue(gz, ri, result, node);
+ }
+ // Untyped init - destructure directly into result pointers
+ if (array_init.ast.elements.len != destructure.components.len) {
+ return astgen.failNodeNotes(node, "expected {} elements for destructure, found {}", .{
+ destructure.components.len,
+ array_init.ast.elements.len,
+ }, &.{
+ try astgen.errNoteNode(destructure.src_node, "result destructured here", .{}),
+ });
+ }
+ for (array_init.ast.elements, destructure.components) |elem_init, ds_comp| {
+ const elem_ri: ResultInfo = .{ .rl = switch (ds_comp) {
+ .typed_ptr => |ptr_rl| .{ .ptr = ptr_rl },
+ .inferred_ptr => |ptr_inst| .{ .inferred_ptr = ptr_inst },
+ .discard => .discard,
+ } };
+ _ = try expr(gz, scope, elem_ri, elem_init);
+ }
+ return .void_value;
+ },
}
}
@@ -1707,6 +1762,23 @@ fn structInitExpr(
return structInitExprRlPtr(gz, scope, node, struct_init, ptr_inst);
}
},
+ .destructure => |destructure| {
+ if (struct_init.ast.type_expr == 0) {
+ // This is an untyped init, so is an actual struct, which does
+ // not support destructuring.
+ return astgen.failNodeNotes(node, "struct value cannot be destructured", .{}, &.{
+ try astgen.errNoteNode(destructure.src_node, "result destructured here", .{}),
+ });
+ }
+ // You can init tuples using struct init syntax and numeric field
+ // names, but as with array inits, we could be bitten by default
+ // fields. Therefore, we do a normal typed init then an rvalue
+ // destructure.
+ const ty_inst = try typeExpr(gz, scope, struct_init.ast.type_expr);
+ _ = try gz.addUnNode(.validate_struct_init_ty, ty_inst, node);
+ const result = try structInitExprRlTy(gz, scope, node, struct_init, ty_inst, .struct_init);
+ return rvalue(gz, ri, result, node);
+ },
}
}
@@ -1968,6 +2040,7 @@ fn restoreErrRetIndex(
// TODO: Update this to do a proper load from the rl_ptr, once Sema can support it.
break :blk .none;
},
+ .destructure => return, // value must be a tuple or array, so never restore/pop
else => result,
},
else => .none, // always restore/pop
@@ -2340,6 +2413,8 @@ fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const Ast.Nod
.simple_var_decl,
.aligned_var_decl, => scope = try varDecl(gz, scope, statement, block_arena_allocator, tree.fullVarDecl(statement).?),
+ .assign_destructure => scope = try assignDestructureMaybeDecls(gz, scope, statement, block_arena_allocator),
+
.@"defer" => scope = try deferStmt(gz, scope, statement, block_arena_allocator, .defer_normal),
.@"errdefer" => scope = try deferStmt(gz, scope, statement, block_arena_allocator, .defer_error),
@@ -2481,6 +2556,7 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.elem_ptr_node,
.elem_ptr_imm,
.elem_val_node,
+ .elem_val_imm,
.field_ptr,
.field_ptr_init,
.field_val,
@@ -2686,6 +2762,7 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.validate_array_init_ty,
.validate_struct_init_ty,
.validate_deref,
+ .validate_destructure,
.save_err_ret_index,
.restore_err_ret_index,
=> break :b true,
@@ -3227,6 +3304,301 @@ fn assign(gz: *GenZir, scope: *Scope, infix_node: Ast.Node.Index) InnerError!voi
} } }, rhs);
}
+/// Handles destructure assignments where no LHS is a `const` or `var` decl.
+fn assignDestructure(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!void {
+ try emitDbgNode(gz, node);
+ const astgen = gz.astgen;
+ const tree = astgen.tree;
+ const token_tags = tree.tokens.items(.tag);
+ const node_datas = tree.nodes.items(.data);
+ const main_tokens = tree.nodes.items(.main_token);
+ const node_tags = tree.nodes.items(.tag);
+
+ const extra_index = node_datas[node].lhs;
+ const lhs_count = tree.extra_data[extra_index];
+ const lhs_nodes: []const Ast.Node.Index = @ptrCast(tree.extra_data[extra_index + 1 ..][0..lhs_count]);
+ const rhs = node_datas[node].rhs;
+
+ const maybe_comptime_token = tree.firstToken(node) - 1;
+ const declared_comptime = token_tags[maybe_comptime_token] == .keyword_comptime;
+
+ if (declared_comptime and gz.is_comptime) {
+ return astgen.failNode(node, "redundant comptime keyword in already comptime scope", .{});
+ }
+
+ // If this expression is marked comptime, we must wrap the whole thing in a comptime block.
+ var gz_buf: GenZir = undefined;
+ const inner_gz = if (declared_comptime) bs: {
+ gz_buf = gz.makeSubBlock(scope);
+ gz_buf.is_comptime = true;
+ break :bs &gz_buf;
+ } else gz;
+ defer if (declared_comptime) inner_gz.unstack();
+
+ const rl_components = try astgen.arena.alloc(ResultInfo.Loc.DestructureComponent, lhs_nodes.len);
+ for (rl_components, lhs_nodes) |*lhs_rl, lhs_node| {
+ if (node_tags[lhs_node] == .identifier) {
+ // This intentionally does not support `@"_"` syntax.
+ const ident_name = tree.tokenSlice(main_tokens[lhs_node]);
+ if (mem.eql(u8, ident_name, "_")) {
+ lhs_rl.* = .discard;
+ continue;
+ }
+ }
+ lhs_rl.* = .{ .typed_ptr = .{
+ .inst = try lvalExpr(inner_gz, scope, lhs_node),
+ .src_node = lhs_node,
+ } };
+ }
+
+ const ri: ResultInfo = .{ .rl = .{ .destructure = .{
+ .src_node = node,
+ .components = rl_components,
+ } } };
+
+ _ = try expr(inner_gz, scope, ri, rhs);
+
+ if (declared_comptime) {
+ const comptime_block_inst = try gz.makeBlockInst(.block_comptime, node);
+ _ = try inner_gz.addBreak(.@"break", comptime_block_inst, .void_value);
+ try inner_gz.setBlockBody(comptime_block_inst);
+ try gz.instructions.append(gz.astgen.gpa, comptime_block_inst);
+ }
+}
+
+/// Handles destructure assignments where the LHS may contain `const` or `var` decls.
+fn assignDestructureMaybeDecls(
+ gz: *GenZir,
+ scope: *Scope,
+ node: Ast.Node.Index,
+ block_arena: Allocator,
+) InnerError!*Scope {
+ try emitDbgNode(gz, node);
+ const astgen = gz.astgen;
+ const tree = astgen.tree;
+ const token_tags = tree.tokens.items(.tag);
+ const node_datas = tree.nodes.items(.data);
+ const main_tokens = tree.nodes.items(.main_token);
+ const node_tags = tree.nodes.items(.tag);
+
+ const extra_index = node_datas[node].lhs;
+ const lhs_count = tree.extra_data[extra_index];
+ const lhs_nodes: []const Ast.Node.Index = @ptrCast(tree.extra_data[extra_index + 1 ..][0..lhs_count]);
+ const rhs = node_datas[node].rhs;
+
+ const maybe_comptime_token = tree.firstToken(node) - 1;
+ const declared_comptime = token_tags[maybe_comptime_token] == .keyword_comptime;
+ if (declared_comptime and gz.is_comptime) {
+ return astgen.failNode(node, "redundant comptime keyword in already comptime scope", .{});
+ }
+
+ const is_comptime = declared_comptime or gz.is_comptime;
+ const rhs_is_comptime = tree.nodes.items(.tag)[rhs] == .@"comptime";
+
+ // When declaring consts via a destructure, we always use a result pointer.
+ // This avoids the need to create tuple types, and is also likely easier to
+ // optimize, since it's a bit tricky for the optimizer to "split up" the
+ // value into individual pointer writes down the line.
+
+ // We know this rl information won't live past the evaluation of this
+ // expression, so it may as well go in the block arena.
+ const rl_components = try block_arena.alloc(ResultInfo.Loc.DestructureComponent, lhs_nodes.len);
+ var any_non_const_lhs = false;
+ var any_lvalue_expr = false;
+ for (rl_components, lhs_nodes) |*lhs_rl, lhs_node| {
+ switch (node_tags[lhs_node]) {
+ .identifier => {
+ // This intentionally does not support `@"_"` syntax.
+ const ident_name = tree.tokenSlice(main_tokens[lhs_node]);
+ if (mem.eql(u8, ident_name, "_")) {
+ any_non_const_lhs = true;
+ lhs_rl.* = .discard;
+ continue;
+ }
+ },
+ .global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => {
+ const full = tree.fullVarDecl(lhs_node).?;
+
+ const name_token = full.ast.mut_token + 1;
+ const ident_name_raw = tree.tokenSlice(name_token);
+ if (mem.eql(u8, ident_name_raw, "_")) {
+ return astgen.failTok(name_token, "'_' used as an identifier without @\"_\" syntax", .{});
+ }
+
+ // We detect shadowing in the second pass over these, while we're creating scopes.
+
+ if (full.ast.addrspace_node != 0) {
+ return astgen.failTok(main_tokens[full.ast.addrspace_node], "cannot set address space of local variable '{s}'", .{ident_name_raw});
+ }
+ if (full.ast.section_node != 0) {
+ return astgen.failTok(main_tokens[full.ast.section_node], "cannot set section of local variable '{s}'", .{ident_name_raw});
+ }
+
+ const is_const = switch (token_tags[full.ast.mut_token]) {
+ .keyword_var => false,
+ .keyword_const => true,
+ else => unreachable,
+ };
+ if (!is_const) any_non_const_lhs = true;
+
+ // We also mark `const`s as comptime if the RHS is definitely comptime-known.
+ const this_lhs_comptime = is_comptime or (is_const and rhs_is_comptime);
+
+ const align_inst: Zir.Inst.Ref = if (full.ast.align_node != 0)
+ try expr(gz, scope, align_ri, full.ast.align_node)
+ else
+ .none;
+
+ if (full.ast.type_node != 0) {
+ // Typed alloc
+ const type_inst = try typeExpr(gz, scope, full.ast.type_node);
+ const ptr = if (align_inst == .none) ptr: {
+ const tag: Zir.Inst.Tag = if (is_const)
+ .alloc
+ else if (this_lhs_comptime)
+ .alloc_comptime_mut
+ else
+ .alloc_mut;
+ break :ptr try gz.addUnNode(tag, type_inst, node);
+ } else try gz.addAllocExtended(.{
+ .node = node,
+ .type_inst = type_inst,
+ .align_inst = align_inst,
+ .is_const = is_const,
+ .is_comptime = this_lhs_comptime,
+ });
+ lhs_rl.* = .{ .typed_ptr = .{ .inst = ptr } };
+ } else {
+ // Inferred alloc
+ const ptr = if (align_inst == .none) ptr: {
+ const tag: Zir.Inst.Tag = if (is_const) tag: {
+ break :tag if (this_lhs_comptime) .alloc_inferred_comptime else .alloc_inferred;
+ } else tag: {
+ break :tag if (this_lhs_comptime) .alloc_inferred_comptime_mut else .alloc_inferred_mut;
+ };
+ break :ptr try gz.addNode(tag, node);
+ } else try gz.addAllocExtended(.{
+ .node = node,
+ .type_inst = .none,
+ .align_inst = align_inst,
+ .is_const = is_const,
+ .is_comptime = this_lhs_comptime,
+ });
+ lhs_rl.* = .{ .inferred_ptr = ptr };
+ }
+
+ continue;
+ },
+ else => {},
+ }
+ // This LHS is just an lvalue expression.
+ // We will fill in its result pointer later, inside a comptime block.
+ any_non_const_lhs = true;
+ any_lvalue_expr = true;
+ lhs_rl.* = .{ .typed_ptr = .{
+ .inst = undefined,
+ .src_node = lhs_node,
+ } };
+ }
+
+ if (declared_comptime and !any_non_const_lhs) {
+ try astgen.appendErrorTok(maybe_comptime_token, "'comptime const' is redundant; instead wrap the initialization expression with 'comptime'", .{});
+ }
+
+ // If this expression is marked comptime, we must wrap it in a comptime block.
+ var gz_buf: GenZir = undefined;
+ const inner_gz = if (declared_comptime) bs: {
+ gz_buf = gz.makeSubBlock(scope);
+ gz_buf.is_comptime = true;
+ break :bs &gz_buf;
+ } else gz;
+ defer if (declared_comptime) inner_gz.unstack();
+
+ if (any_lvalue_expr) {
+ // At least one LHS was an lvalue expr. Iterate again in order to
+ // evaluate the lvalues from within the possible block_comptime.
+ for (rl_components, lhs_nodes) |*lhs_rl, lhs_node| {
+ if (lhs_rl.* != .typed_ptr) continue;
+ switch (node_tags[lhs_node]) {
+ .global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => continue,
+ else => {},
+ }
+ lhs_rl.typed_ptr.inst = try lvalExpr(inner_gz, scope, lhs_node);
+ }
+ }
+
+ // We can't give a reasonable anon name strategy for destructured inits, so
+ // leave it at its default of `.anon`.
+ _ = try reachableExpr(inner_gz, scope, .{ .rl = .{ .destructure = .{
+ .src_node = node,
+ .components = rl_components,
+ } } }, rhs, node);
+
+ if (declared_comptime) {
+ // Finish the block_comptime. Inferred alloc resolution etc will occur
+ // in the parent block.
+ const comptime_block_inst = try gz.makeBlockInst(.block_comptime, node);
+ _ = try inner_gz.addBreak(.@"break", comptime_block_inst, .void_value);
+ try inner_gz.setBlockBody(comptime_block_inst);
+ try gz.instructions.append(gz.astgen.gpa, comptime_block_inst);
+ }
+
+ // Now, iterate over the LHS exprs to construct any new scopes.
+ // If there were any inferred allocations, resolve them.
+ // If there were any `const` decls, make the pointer constant.
+ var cur_scope = scope;
+ for (rl_components, lhs_nodes) |lhs_rl, lhs_node| {
+ switch (node_tags[lhs_node]) {
+ .local_var_decl, .simple_var_decl, .aligned_var_decl => {},
+ else => continue, // We were mutating an existing lvalue - nothing to do
+ }
+ const full = tree.fullVarDecl(lhs_node).?;
+ const raw_ptr = switch (lhs_rl) {
+ .discard => unreachable,
+ .typed_ptr => |typed_ptr| typed_ptr.inst,
+ .inferred_ptr => |ptr_inst| ptr_inst,
+ };
+ // If the alloc was inferred, resolve it.
+ if (full.ast.type_node == 0) {
+ _ = try gz.addUnNode(.resolve_inferred_alloc, raw_ptr, lhs_node);
+ }
+ const is_const = switch (token_tags[full.ast.mut_token]) {
+ .keyword_var => false,
+ .keyword_const => true,
+ else => unreachable,
+ };
+ // If the alloc was const, make it const.
+ const var_ptr = if (is_const) make_const: {
+ break :make_const try gz.addUnNode(.make_ptr_const, raw_ptr, node);
+ } else raw_ptr;
+ const name_token = full.ast.mut_token + 1;
+ const ident_name_raw = tree.tokenSlice(name_token);
+ const ident_name = try astgen.identAsString(name_token);
+ try astgen.detectLocalShadowing(
+ cur_scope,
+ ident_name,
+ name_token,
+ ident_name_raw,
+ if (is_const) .@"local constant" else .@"local variable",
+ );
+ try gz.addDbgVar(.dbg_var_ptr, ident_name, var_ptr);
+ // Finally, create the scope.
+ const sub_scope = try block_arena.create(Scope.LocalPtr);
+ sub_scope.* = .{
+ .parent = cur_scope,
+ .gen_zir = gz,
+ .name = ident_name,
+ .ptr = var_ptr,
+ .token_src = name_token,
+ .maybe_comptime = is_const or is_comptime,
+ .id_cat = if (is_const) .@"local constant" else .@"local variable",
+ };
+ cur_scope = &sub_scope.base;
+ }
+
+ return cur_scope;
+}
+
fn assignOp(
gz: *GenZir,
scope: *Scope,
@@ -9059,6 +9431,7 @@ fn nodeMayEvalToError(tree: *const Ast, start_node: Ast.Node.Index) BuiltinFn.Ev
.array_cat,
.array_mult,
.assign,
+ .assign_destructure,
.assign_bit_and,
.assign_bit_or,
.assign_shl,
@@ -9237,6 +9610,7 @@ fn nodeImpliesMoreThanOnePossibleValue(tree: *const Ast, start_node: Ast.Node.In
.array_cat,
.array_mult,
.assign,
+ .assign_destructure,
.assign_bit_and,
.assign_bit_or,
.assign_shl,
@@ -9483,6 +9857,7 @@ fn nodeImpliesComptimeOnly(tree: *const Ast, start_node: Ast.Node.Index) bool {
.array_cat,
.array_mult,
.assign,
+ .assign_destructure,
.assign_bit_and,
.assign_bit_or,
.assign_shl,
@@ -9830,6 +10205,37 @@ fn rvalue(
_ = try gz.addBin(.store_to_inferred_ptr, alloc, result);
return .void_value;
},
+ .destructure => |destructure| {
+ const components = destructure.components;
+ _ = try gz.addPlNode(.validate_destructure, src_node, Zir.Inst.ValidateDestructure{
+ .operand = result,
+ .destructure_node = gz.nodeIndexToRelative(destructure.src_node),
+ .expect_len = @intCast(components.len),
+ });
+ for (components, 0..) |component, i| {
+ if (component == .discard) continue;
+ const elem_val = try gz.add(.{
+ .tag = .elem_val_imm,
+ .data = .{ .elem_val_imm = .{
+ .operand = result,
+ .idx = @intCast(i),
+ } },
+ });
+ switch (component) {
+ .typed_ptr => |ptr_res| {
+ _ = try gz.addPlNode(.store_node, ptr_res.src_node orelse src_node, Zir.Inst.Bin{
+ .lhs = ptr_res.inst,
+ .rhs = elem_val,
+ });
+ },
+ .inferred_ptr => |ptr_inst| {
+ _ = try gz.addBin(.store_to_inferred_ptr, ptr_inst, elem_val);
+ },
+ .discard => unreachable,
+ }
+ }
+ return .void_value;
+ },
}
}
src/AstRlAnnotate.zig
@@ -203,6 +203,16 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
else => unreachable,
}
},
+ .assign_destructure => {
+ const lhs_count = tree.extra_data[node_datas[node].lhs];
+ const all_lhs = tree.extra_data[node_datas[node].lhs + 1 ..][0..lhs_count];
+ for (all_lhs) |lhs| {
+ _ = try astrl.expr(lhs, block, ResultInfo.none);
+ }
+ // We don't need to gather any meaningful data here, because destructures always use RLS
+ _ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
+ return false;
+ },
.assign => {
_ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
_ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.typed_ptr);
src/print_zir.zig
@@ -242,6 +242,7 @@ const Writer = struct {
.bool_br_or,
=> try self.writeBoolBr(stream, inst),
+ .validate_destructure => try self.writeValidateDestructure(stream, inst),
.validate_array_init_ty => try self.writeValidateArrayInitTy(stream, inst),
.array_type_sentinel => try self.writeArrayTypeSentinel(stream, inst),
.ptr_type => try self.writePtrType(stream, inst),
@@ -357,6 +358,8 @@ const Writer = struct {
.for_len => try self.writePlNodeMultiOp(stream, inst),
+ .elem_val_imm => try self.writeElemValImm(stream, inst),
+
.elem_ptr_imm => try self.writeElemPtrImm(stream, inst),
.@"export" => try self.writePlNodeExport(stream, inst),
@@ -585,6 +588,20 @@ const Writer = struct {
try self.writeSrc(stream, inst_data.src());
}
+ fn writeValidateDestructure(
+ self: *Writer,
+ stream: anytype,
+ inst: Zir.Inst.Index,
+ ) (@TypeOf(stream).Error || error{OutOfMemory})!void {
+ const inst_data = self.code.instructions.items(.data)[inst].pl_node;
+ const extra = self.code.extraData(Zir.Inst.ValidateDestructure, inst_data.payload_index).data;
+ try self.writeInstRef(stream, extra.operand);
+ try stream.print(", {d}) (destructure=", .{extra.expect_len});
+ try self.writeSrc(stream, LazySrcLoc.nodeOffset(extra.destructure_node));
+ try stream.writeAll(") ");
+ try self.writeSrc(stream, inst_data.src());
+ }
+
fn writeValidateArrayInitTy(
self: *Writer,
stream: anytype,
@@ -892,6 +909,12 @@ const Writer = struct {
try self.writeSrc(stream, inst_data.src());
}
+ fn writeElemValImm(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void {
+ const inst_data = self.code.instructions.items(.data)[inst].elem_val_imm;
+ try self.writeInstRef(stream, inst_data.operand);
+ try stream.print(", {d})", .{inst_data.idx});
+ }
+
fn writeElemPtrImm(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void {
const inst_data = self.code.instructions.items(.data)[inst].pl_node;
const extra = self.code.extraData(Zir.Inst.ElemPtrImm, inst_data.payload_index).data;
src/Sema.zig
@@ -1018,6 +1018,7 @@ fn analyzeBodyInner(
.elem_ptr_imm => try sema.zirElemPtrImm(block, inst),
.elem_val => try sema.zirElemVal(block, inst),
.elem_val_node => try sema.zirElemValNode(block, inst),
+ .elem_val_imm => try sema.zirElemValImm(block, inst),
.elem_type_index => try sema.zirElemTypeIndex(block, inst),
.elem_type => try sema.zirElemType(block, inst),
.indexable_ptr_elem_type => try sema.zirIndexablePtrElemType(block, inst),
@@ -1379,6 +1380,11 @@ fn analyzeBodyInner(
i += 1;
continue;
},
+ .validate_destructure => {
+ try sema.zirValidateDestructure(block, inst);
+ i += 1;
+ continue;
+ },
.@"export" => {
try sema.zirExport(block, inst);
i += 1;
@@ -5178,6 +5184,43 @@ fn zirValidateDeref(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErr
}
}
+fn zirValidateDestructure(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void {
+ const mod = sema.mod;
+ const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
+ const extra = sema.code.extraData(Zir.Inst.ValidateDestructure, inst_data.payload_index).data;
+ const src = inst_data.src();
+ const destructure_src = LazySrcLoc.nodeOffset(extra.destructure_node);
+ const operand = try sema.resolveInst(extra.operand);
+ const operand_ty = sema.typeOf(operand);
+
+ const can_destructure = switch (operand_ty.zigTypeTag(mod)) {
+ .Array => true,
+ .Struct => operand_ty.isTuple(mod),
+ else => false,
+ };
+
+ if (!can_destructure) {
+ return sema.failWithOwnedErrorMsg(block, msg: {
+ const msg = try sema.errMsg(block, src, "type '{}' cannot be destructured", .{operand_ty.fmt(mod)});
+ errdefer msg.destroy(sema.gpa);
+ try sema.errNote(block, destructure_src, msg, "result destructured here", .{});
+ break :msg msg;
+ });
+ }
+
+ if (operand_ty.arrayLen(mod) != extra.expect_len) {
+ return sema.failWithOwnedErrorMsg(block, msg: {
+ const msg = try sema.errMsg(block, src, "expected {} elements for destructure, found {}", .{
+ extra.expect_len,
+ operand_ty.arrayLen(mod),
+ });
+ errdefer msg.destroy(sema.gpa);
+ try sema.errNote(block, destructure_src, msg, "result destructured here", .{});
+ break :msg msg;
+ });
+ }
+}
+
fn failWithBadMemberAccess(
sema: *Sema,
block: *Block,
@@ -10304,6 +10347,17 @@ fn zirElemValNode(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
return sema.elemVal(block, src, array, elem_index, elem_index_src, true);
}
+fn zirElemValImm(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
+ const tracy = trace(@src());
+ defer tracy.end();
+
+ const mod = sema.mod;
+ const inst_data = sema.code.instructions.items(.data)[inst].elem_val_imm;
+ const array = try sema.resolveInst(inst_data.operand);
+ const elem_index = try mod.intRef(Type.usize, inst_data.idx);
+ return sema.elemVal(block, .unneeded, array, elem_index, .unneeded, false);
+}
+
fn zirElemPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
const tracy = trace(@src());
defer tracy.end();
src/Zir.zig
@@ -434,6 +434,10 @@ pub const Inst = struct {
/// Payload is `Bin`.
/// No OOB safety check is emitted.
elem_val,
+ /// Same as `elem_val` but takes the index as an immediate value.
+ /// No OOB safety check is emitted. A prior instruction must validate this operation.
+ /// Uses the `elem_val_imm` union field.
+ elem_val_imm,
/// Emits a compile error if the operand is not `void`.
/// Uses the `un_node` field.
ensure_result_used,
@@ -725,6 +729,9 @@ pub const Inst = struct {
/// Check that operand type supports the dereference operand (.*).
/// Uses the `un_node` field.
validate_deref,
+ /// Check that the operand's type is an array or tuple with the given number of elements.
+ /// Uses the `pl_node` field. Payload is `ValidateDestructure`.
+ validate_destructure,
/// A struct literal with a specified type, with no fields.
/// Uses the `un_node` field.
struct_init_empty,
@@ -1069,6 +1076,7 @@ pub const Inst = struct {
.elem_ptr_node,
.elem_ptr_imm,
.elem_val_node,
+ .elem_val_imm,
.ensure_result_used,
.ensure_result_non_error,
.ensure_err_union_payload_void,
@@ -1145,6 +1153,7 @@ pub const Inst = struct {
.validate_struct_init,
.validate_array_init,
.validate_deref,
+ .validate_destructure,
.struct_init_empty,
.struct_init,
.struct_init_ref,
@@ -1295,6 +1304,7 @@ pub const Inst = struct {
.validate_struct_init,
.validate_array_init,
.validate_deref,
+ .validate_destructure,
.@"export",
.export_value,
.set_runtime_safety,
@@ -1369,6 +1379,7 @@ pub const Inst = struct {
.elem_ptr_node,
.elem_ptr_imm,
.elem_val_node,
+ .elem_val_imm,
.field_ptr,
.field_ptr_init,
.field_val,
@@ -1615,6 +1626,7 @@ pub const Inst = struct {
.elem_ptr_imm = .pl_node,
.elem_val = .pl_node,
.elem_val_node = .pl_node,
+ .elem_val_imm = .elem_val_imm,
.ensure_result_used = .un_node,
.ensure_result_non_error = .un_node,
.ensure_err_union_payload_void = .un_node,
@@ -1689,6 +1701,7 @@ pub const Inst = struct {
.validate_struct_init = .pl_node,
.validate_array_init = .pl_node,
.validate_deref = .un_node,
+ .validate_destructure = .pl_node,
.struct_init_empty = .un_node,
.field_type = .pl_node,
.field_type_ref = .pl_node,
@@ -2295,6 +2308,12 @@ pub const Inst = struct {
block: Ref, // If restored, the index is from this block's entrypoint
operand: Ref, // If non-error (or .none), then restore the index
},
+ elem_val_imm: struct {
+ /// The indexable value being accessed.
+ operand: Ref,
+ /// The index being accessed.
+ idx: u32,
+ },
// Make sure we don't accidentally add a field to make this union
// bigger than expected. Note that in Debug builds, Zig is allowed
@@ -2334,6 +2353,7 @@ pub const Inst = struct {
defer_err_code,
save_err_ret_index,
restore_err_ret_index,
+ elem_val_imm,
};
};
@@ -3233,6 +3253,15 @@ pub const Inst = struct {
index: u32,
len: u32,
};
+
+ pub const ValidateDestructure = struct {
+ /// The value being destructured.
+ operand: Ref,
+ /// The `destructure_assign` node.
+ destructure_node: i32,
+ /// The expected field count.
+ expect_len: u32,
+ };
};
pub const SpecialProng = enum { none, @"else", under };
test/behavior/destructure.zig
@@ -0,0 +1,100 @@
+const std = @import("std");
+const assert = std.debug.assert;
+const expect = std.testing.expect;
+
+test "simple destructure" {
+ const S = struct {
+ fn doTheTest() !void {
+ var x: u32 = undefined;
+ x, const y, var z: u64 = .{ 1, @as(u16, 2), 3 };
+
+ comptime assert(@TypeOf(y) == u16);
+
+ try expect(x == 1);
+ try expect(y == 2);
+ try expect(z == 3);
+ }
+ };
+
+ try S.doTheTest();
+ try comptime S.doTheTest();
+}
+
+test "destructure with comptime syntax" {
+ const S = struct {
+ fn doTheTest() void {
+ comptime var x: f32 = undefined;
+ comptime x, const y, var z = .{ 0.5, 123, 456 }; // z is a comptime var
+
+ comptime assert(@TypeOf(y) == comptime_int);
+ comptime assert(@TypeOf(z) == comptime_int);
+ comptime assert(x == 0.5);
+ comptime assert(y == 123);
+ comptime assert(z == 456);
+ }
+ };
+
+ S.doTheTest();
+ comptime S.doTheTest();
+}
+
+test "destructure from labeled block" {
+ const S = struct {
+ fn doTheTest(rt_true: bool) !void {
+ const x: u32, const y: u8, const z: i64 = blk: {
+ if (rt_true) break :blk .{ 1, 2, 3 };
+ break :blk .{ 4, 5, 6 };
+ };
+
+ try expect(x == 1);
+ try expect(y == 2);
+ try expect(z == 3);
+ }
+ };
+
+ try S.doTheTest(true);
+ try comptime S.doTheTest(true);
+}
+
+test "destructure tuple value" {
+ const tup: struct { f32, u32, i64 } = .{ 10.0, 20, 30 };
+ const x, const y, const z = tup;
+
+ comptime assert(@TypeOf(x) == f32);
+ comptime assert(@TypeOf(y) == u32);
+ comptime assert(@TypeOf(z) == i64);
+
+ try expect(x == 10.0);
+ try expect(y == 20);
+ try expect(z == 30);
+}
+
+test "destructure array value" {
+ const arr: [3]u32 = .{ 10, 20, 30 };
+ const x, const y, const z = arr;
+
+ comptime assert(@TypeOf(x) == u32);
+ comptime assert(@TypeOf(y) == u32);
+ comptime assert(@TypeOf(z) == u32);
+
+ try expect(x == 10);
+ try expect(y == 20);
+ try expect(z == 30);
+}
+
+test "destructure from struct init with named tuple fields" {
+ const Tuple = struct { u8, u16, u32 };
+ const x, const y, const z = Tuple{
+ .@"0" = 100,
+ .@"1" = 200,
+ .@"2" = 300,
+ };
+
+ comptime assert(@TypeOf(x) == u8);
+ comptime assert(@TypeOf(y) == u16);
+ comptime assert(@TypeOf(z) == u32);
+
+ try expect(x == 100);
+ try expect(y == 200);
+ try expect(z == 300);
+}
test/cases/compile_errors/cast_without_result_type.zig
@@ -13,6 +13,10 @@ export fn d() void {
var x: f32 = 0;
_ = x + @floatFromInt(123);
}
+export fn e() void {
+ const x: u32, const y: u64 = @intCast(123);
+ _ = x + y;
+}
// error
// backend=stage2
@@ -26,3 +30,6 @@ export fn d() void {
// :9:10: note: use @as to provide explicit result type
// :14:13: error: @floatFromInt must have a known result type
// :14:13: note: use @as to provide explicit result type
+// :17:34: error: @intCast must have a known result type
+// :17:32: note: destructure expressions do not provide a single result type
+// :17:34: note: use @as to provide explicit result type
test/cases/compile_errors/extra_comma_in_destructure.zig
@@ -0,0 +1,10 @@
+export fn foo() void {
+ const x, const y, = .{ 1, 2 };
+ _ = .{ x, y };
+}
+
+// error
+// backend=stage2
+// target=native
+//
+// :2:23: error: expected expression or var decl, found '='
test/cases/compile_errors/invalid_destructure_astgen.zig
@@ -0,0 +1,22 @@
+export fn foo() void {
+ const x, const y = .{ 1, 2, 3 };
+ _ = .{ x, y };
+}
+
+export fn bar() void {
+ var x: u32 = undefined;
+ x, const y: u64 = blk: {
+ if (true) break :blk .{ 1, 2 };
+ break :blk .{ .x = 123, .y = 456 };
+ };
+ _ = y;
+}
+
+// error
+// backend=stage2
+// target=native
+//
+// :2:25: error: expected 2 elements for destructure, found 3
+// :2:22: note: result destructured here
+// :10:21: error: struct value cannot be destructured
+// :8:21: note: result destructured here
test/cases/compile_errors/invalid_destructure_sema.zig
@@ -0,0 +1,23 @@
+export fn foo() void {
+ const x, const y = 123;
+ _ = .{ x, y };
+}
+
+export fn bar() void {
+ var x: u32 = undefined;
+ x, const y: u64 = blk: {
+ if (false) break :blk .{ 1, 2 };
+ const val = .{ 3, 4, 5 };
+ break :blk val;
+ };
+ _ = y;
+}
+
+// error
+// backend=stage2
+// target=native
+//
+// :2:24: error: type 'comptime_int' cannot be destructured
+// :2:22: note: result destructured here
+// :11:20: error: expected 2 elements for destructure, found 3
+// :8:21: note: result destructured here
test/cases/unused_vars.zig
@@ -1,7 +1,10 @@
pub fn main() void {
const x = 1;
+ const y, var z = .{ 2, 3 };
}
// error
//
+// :3:18: error: unused local variable
+// :3:11: error: unused local constant
// :2:11: error: unused local constant
test/behavior.zig
@@ -157,6 +157,7 @@ test {
_ = @import("behavior/decltest.zig");
_ = @import("behavior/duplicated_test_names.zig");
_ = @import("behavior/defer.zig");
+ _ = @import("behavior/destructure.zig");
_ = @import("behavior/empty_tuple_fields.zig");
_ = @import("behavior/empty_union.zig");
_ = @import("behavior/enum.zig");