Commit ca6fb30e99
Changed files (25)
lib
compiler
lib/compiler/aro_translate_c/ast.zig
@@ -775,10 +775,7 @@ pub fn render(gpa: Allocator, nodes: []const Node) !std.zig.Ast {
ctx.nodes.appendAssumeCapacity(.{
.tag = .root,
.main_token = 0,
- .data = .{
- .lhs = undefined,
- .rhs = undefined,
- },
+ .data = undefined,
});
const root_members = blk: {
@@ -793,10 +790,7 @@ pub fn render(gpa: Allocator, nodes: []const Node) !std.zig.Ast {
break :blk try ctx.listToSpan(result.items);
};
- ctx.nodes.items(.data)[0] = .{
- .lhs = root_members.start,
- .rhs = root_members.end,
- };
+ ctx.nodes.items(.data)[0] = .{ .extra_range = root_members };
try ctx.tokens.append(gpa, .{
.tag = .eof,
@@ -814,15 +808,18 @@ pub fn render(gpa: Allocator, nodes: []const Node) !std.zig.Ast {
}
const NodeIndex = std.zig.Ast.Node.Index;
+const NodeOptionalIndex = std.zig.Ast.Node.OptionalIndex;
const NodeSubRange = std.zig.Ast.Node.SubRange;
const TokenIndex = std.zig.Ast.TokenIndex;
+const TokenOptionalIndex = std.zig.Ast.OptionalTokenIndex;
const TokenTag = std.zig.Token.Tag;
+const ExtraIndex = std.zig.Ast.ExtraIndex;
const Context = struct {
gpa: Allocator,
buf: std.ArrayList(u8),
nodes: std.zig.Ast.NodeList = .{},
- extra_data: std.ArrayListUnmanaged(std.zig.Ast.Node.Index) = .empty,
+ extra_data: std.ArrayListUnmanaged(u32) = .empty,
tokens: std.zig.Ast.TokenList = .{},
fn addTokenFmt(c: *Context, tag: TokenTag, comptime format: []const u8, args: anytype) Allocator.Error!TokenIndex {
@@ -834,7 +831,7 @@ const Context = struct {
.start = @as(u32, @intCast(start_index)),
});
- return @as(u32, @intCast(c.tokens.len - 1));
+ return @intCast(c.tokens.len - 1);
}
fn addToken(c: *Context, tag: TokenTag, bytes: []const u8) Allocator.Error!TokenIndex {
@@ -848,26 +845,33 @@ const Context = struct {
}
fn listToSpan(c: *Context, list: []const NodeIndex) Allocator.Error!NodeSubRange {
- try c.extra_data.appendSlice(c.gpa, list);
+ try c.extra_data.appendSlice(c.gpa, @ptrCast(list));
return NodeSubRange{
- .start = @as(NodeIndex, @intCast(c.extra_data.items.len - list.len)),
- .end = @as(NodeIndex, @intCast(c.extra_data.items.len)),
+ .start = @enumFromInt(c.extra_data.items.len - list.len),
+ .end = @enumFromInt(c.extra_data.items.len),
};
}
fn addNode(c: *Context, elem: std.zig.Ast.Node) Allocator.Error!NodeIndex {
- const result = @as(NodeIndex, @intCast(c.nodes.len));
+ const result: NodeIndex = @enumFromInt(c.nodes.len);
try c.nodes.append(c.gpa, elem);
return result;
}
- fn addExtra(c: *Context, extra: anytype) Allocator.Error!NodeIndex {
+ fn addExtra(c: *Context, extra: anytype) Allocator.Error!std.zig.Ast.ExtraIndex {
const fields = std.meta.fields(@TypeOf(extra));
try c.extra_data.ensureUnusedCapacity(c.gpa, fields.len);
- const result = @as(u32, @intCast(c.extra_data.items.len));
+ const result: ExtraIndex = @enumFromInt(c.extra_data.items.len);
inline for (fields) |field| {
- comptime std.debug.assert(field.type == NodeIndex);
- c.extra_data.appendAssumeCapacity(@field(extra, field.name));
+ switch (field.type) {
+ NodeIndex,
+ NodeOptionalIndex,
+ TokenIndex,
+ TokenOptionalIndex,
+ ExtraIndex,
+ => c.extra_data.appendAssumeCapacity(@intFromEnum(@field(extra, field.name))),
+ else => @compileError("unexpected field type"),
+ }
}
return result;
}
@@ -894,7 +898,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
try c.buf.append('\n');
try c.buf.appendSlice(payload);
try c.buf.append('\n');
- return @as(NodeIndex, 0); // error: integer value 0 cannot be coerced to type 'std.mem.Allocator.Error!u32'
+ return @enumFromInt(0);
},
.helpers_cast => {
const payload = node.castTag(.helpers_cast).?.data;
@@ -991,26 +995,17 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
.@"continue" => return c.addNode(.{
.tag = .@"continue",
.main_token = try c.addToken(.keyword_continue, "continue"),
- .data = .{
- .lhs = 0,
- .rhs = 0,
- },
+ .data = .{ .opt_token_and_opt_node = .{ .none, .none } },
}),
.return_void => return c.addNode(.{
.tag = .@"return",
.main_token = try c.addToken(.keyword_return, "return"),
- .data = .{
- .lhs = 0,
- .rhs = undefined,
- },
+ .data = .{ .opt_node = .none },
}),
.@"break" => return c.addNode(.{
.tag = .@"break",
.main_token = try c.addToken(.keyword_break, "break"),
- .data = .{
- .lhs = 0,
- .rhs = 0,
- },
+ .data = .{ .opt_token_and_opt_node = .{ .none, .none } },
}),
.break_val => {
const payload = node.castTag(.break_val).?.data;
@@ -1018,14 +1013,14 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
const break_label = if (payload.label) |some| blk: {
_ = try c.addToken(.colon, ":");
break :blk try c.addIdentifier(some);
- } else 0;
+ } else null;
return c.addNode(.{
.tag = .@"break",
.main_token = tok,
- .data = .{
- .lhs = break_label,
- .rhs = try renderNode(c, payload.val),
- },
+ .data = .{ .opt_token_and_opt_node = .{
+ .fromOptional(break_label),
+ (try renderNode(c, payload.val)).toOptional(),
+ } },
});
},
.@"return" => {
@@ -1033,10 +1028,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .@"return",
.main_token = try c.addToken(.keyword_return, "return"),
- .data = .{
- .lhs = try renderNode(c, payload),
- .rhs = undefined,
- },
+ .data = .{ .opt_node = (try renderNode(c, payload)).toOptional() },
});
},
.@"comptime" => {
@@ -1044,10 +1036,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .@"comptime",
.main_token = try c.addToken(.keyword_comptime, "comptime"),
- .data = .{
- .lhs = try renderNode(c, payload),
- .rhs = undefined,
- },
+ .data = .{ .node = try renderNode(c, payload) },
});
},
.@"defer" => {
@@ -1055,10 +1044,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .@"defer",
.main_token = try c.addToken(.keyword_defer, "defer"),
- .data = .{
- .lhs = undefined,
- .rhs = try renderNode(c, payload),
- },
+ .data = .{ .node = try renderNode(c, payload) },
});
},
.asm_simple => {
@@ -1068,10 +1054,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .asm_simple,
.main_token = asm_token,
- .data = .{
- .lhs = try renderNode(c, payload),
- .rhs = try c.addToken(.r_paren, ")"),
- },
+ .data = .{ .node_and_token = .{
+ try renderNode(c, payload),
+ try c.addToken(.r_paren, ")"),
+ } },
});
},
.type => {
@@ -1104,10 +1090,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .address_of,
.main_token = tok,
- .data = .{
- .lhs = arg,
- .rhs = undefined,
- },
+ .data = .{ .node = arg },
});
},
.float_literal => {
@@ -1191,13 +1174,13 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .slice,
.main_token = l_bracket,
- .data = .{
- .lhs = string,
- .rhs = try c.addExtra(std.zig.Ast.Node.Slice{
+ .data = .{ .node_and_extra = .{
+ string,
+ try c.addExtra(std.zig.Ast.Node.Slice{
.start = start,
.end = end,
}),
- },
+ } },
});
},
.fail_decl => {
@@ -1220,20 +1203,17 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
const compile_error = try c.addNode(.{
.tag = .builtin_call_two,
.main_token = compile_error_tok,
- .data = .{
- .lhs = err_msg,
- .rhs = 0,
- },
+ .data = .{ .opt_node_and_opt_node = .{ err_msg.toOptional(), .none } },
});
_ = try c.addToken(.semicolon, ";");
return c.addNode(.{
.tag = .simple_var_decl,
.main_token = const_tok,
- .data = .{
- .lhs = 0,
- .rhs = compile_error,
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ .none,
+ compile_error.toOptional(),
+ } },
});
},
.pub_var_simple, .var_simple => {
@@ -1249,10 +1229,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .simple_var_decl,
.main_token = const_tok,
- .data = .{
- .lhs = 0,
- .rhs = init,
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ .none,
+ init.toOptional(),
+ } },
});
},
.static_local_var => {
@@ -1268,10 +1248,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
const container_def = try c.addNode(.{
.tag = .container_decl_two_trailing,
.main_token = kind_tok,
- .data = .{
- .lhs = try renderNode(c, payload.init),
- .rhs = 0,
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ (try renderNode(c, payload.init)).toOptional(),
+ .none,
+ } },
});
_ = try c.addToken(.r_brace, "}");
_ = try c.addToken(.semicolon, ";");
@@ -1279,10 +1259,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .simple_var_decl,
.main_token = const_tok,
- .data = .{
- .lhs = 0,
- .rhs = container_def,
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ .none,
+ container_def.toOptional(),
+ } },
});
},
.extern_local_var => {
@@ -1298,10 +1278,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
const container_def = try c.addNode(.{
.tag = .container_decl_two_trailing,
.main_token = kind_tok,
- .data = .{
- .lhs = try renderNode(c, payload.init),
- .rhs = 0,
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ (try renderNode(c, payload.init)).toOptional(),
+ .none,
+ } },
});
_ = try c.addToken(.r_brace, "}");
_ = try c.addToken(.semicolon, ";");
@@ -1309,10 +1289,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .simple_var_decl,
.main_token = const_tok,
- .data = .{
- .lhs = 0,
- .rhs = container_def,
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ .none,
+ container_def.toOptional(),
+ } },
});
},
.mut_str => {
@@ -1324,10 +1304,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
const deref = try c.addNode(.{
.tag = .deref,
- .data = .{
- .lhs = try renderNodeGrouped(c, payload.init),
- .rhs = undefined,
- },
+ .data = .{ .node = try renderNodeGrouped(c, payload.init) },
.main_token = try c.addToken(.period_asterisk, ".*"),
});
_ = try c.addToken(.semicolon, ";");
@@ -1335,7 +1312,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .simple_var_decl,
.main_token = var_tok,
- .data = .{ .lhs = 0, .rhs = deref },
+ .data = .{ .opt_node_and_opt_node = .{
+ .none,
+ deref.toOptional(),
+ } },
});
},
.var_decl => return renderVar(c, node),
@@ -1359,10 +1339,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .simple_var_decl,
.main_token = mut_tok,
- .data = .{
- .lhs = 0,
- .rhs = init,
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ .none,
+ init.toOptional(),
+ } },
});
},
.int_cast => {
@@ -1505,10 +1485,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .address_of,
.main_token = ampersand,
- .data = .{
- .lhs = base,
- .rhs = undefined,
- },
+ .data = .{ .node = base },
});
},
.deref => {
@@ -1518,10 +1495,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .deref,
.main_token = deref_tok,
- .data = .{
- .lhs = operand,
- .rhs = undefined,
- },
+ .data = .{ .node = operand },
});
},
.unwrap => {
@@ -1532,10 +1506,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .unwrap_optional,
.main_token = period,
- .data = .{
- .lhs = operand,
- .rhs = question_mark,
- },
+ .data = .{ .node_and_token = .{
+ operand,
+ question_mark,
+ } },
});
},
.c_pointer, .single_pointer => {
@@ -1557,10 +1531,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .ptr_type_aligned,
.main_token = main_token,
- .data = .{
- .lhs = 0,
- .rhs = elem_type,
- },
+ .data = .{ .opt_node_and_node = .{
+ .none,
+ elem_type,
+ } },
});
},
.add => return renderBinOpGrouped(c, node, .add, .plus, "+"),
@@ -1606,10 +1580,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .block_two,
.main_token = l_brace,
- .data = .{
- .lhs = 0,
- .rhs = 0,
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ .none,
+ .none,
+ } },
});
},
.block_single => {
@@ -1623,10 +1597,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .block_two_semicolon,
.main_token = l_brace,
- .data = .{
- .lhs = stmt,
- .rhs = 0,
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ stmt.toOptional(),
+ .none,
+ } },
});
},
.block => {
@@ -1641,7 +1615,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
defer stmts.deinit();
for (payload.stmts) |stmt| {
const res = try renderNode(c, stmt);
- if (res == 0) continue;
+ if (@intFromEnum(res) == 0) continue;
try addSemicolonIfNeeded(c, stmt);
try stmts.append(res);
}
@@ -1652,17 +1626,14 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = if (semicolon) .block_semicolon else .block,
.main_token = l_brace,
- .data = .{
- .lhs = span.start,
- .rhs = span.end,
- },
+ .data = .{ .extra_range = span },
});
},
.func => return renderFunc(c, node),
.pub_inline_fn => return renderMacroFunc(c, node),
.discard => {
const payload = node.castTag(.discard).?.data;
- if (payload.should_skip) return @as(NodeIndex, 0);
+ if (payload.should_skip) return @enumFromInt(0);
const lhs = try c.addNode(.{
.tag = .identifier,
@@ -1680,19 +1651,19 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .assign,
.main_token = main_token,
- .data = .{
- .lhs = lhs,
- .rhs = try renderNode(c, addr_of),
- },
+ .data = .{ .node_and_node = .{
+ lhs,
+ try renderNode(c, addr_of),
+ } },
});
} else {
return c.addNode(.{
.tag = .assign,
.main_token = main_token,
- .data = .{
- .lhs = lhs,
- .rhs = try renderNode(c, payload.value),
- },
+ .data = .{ .node_and_node = .{
+ lhs,
+ try renderNode(c, payload.value),
+ } },
});
}
},
@@ -1709,29 +1680,29 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
const res = try renderNode(c, some);
_ = try c.addToken(.r_paren, ")");
break :blk res;
- } else 0;
+ } else null;
const body = try renderNode(c, payload.body);
- if (cont_expr == 0) {
+ if (cont_expr == null) {
return c.addNode(.{
.tag = .while_simple,
.main_token = while_tok,
- .data = .{
- .lhs = cond,
- .rhs = body,
- },
+ .data = .{ .node_and_node = .{
+ cond,
+ body,
+ } },
});
} else {
return c.addNode(.{
.tag = .while_cont,
.main_token = while_tok,
- .data = .{
- .lhs = cond,
- .rhs = try c.addExtra(std.zig.Ast.Node.WhileCont{
- .cont_expr = cont_expr,
+ .data = .{ .node_and_extra = .{
+ cond,
+ try c.addExtra(std.zig.Ast.Node.WhileCont{
+ .cont_expr = cont_expr.?,
.then_expr = body,
}),
- },
+ } },
});
}
},
@@ -1750,10 +1721,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .while_simple,
.main_token = while_tok,
- .data = .{
- .lhs = cond,
- .rhs = body,
- },
+ .data = .{ .node_and_node = .{
+ cond,
+ body,
+ } },
});
},
.@"if" => {
@@ -1767,10 +1738,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
const else_node = payload.@"else" orelse return c.addNode(.{
.tag = .if_simple,
.main_token = if_tok,
- .data = .{
- .lhs = cond,
- .rhs = then_expr,
- },
+ .data = .{ .node_and_node = .{
+ cond,
+ then_expr,
+ } },
});
_ = try c.addToken(.keyword_else, "else");
const else_expr = try renderNode(c, else_node);
@@ -1778,13 +1749,13 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .@"if",
.main_token = if_tok,
- .data = .{
- .lhs = cond,
- .rhs = try c.addExtra(std.zig.Ast.Node.If{
+ .data = .{ .node_and_extra = .{
+ cond,
+ try c.addExtra(std.zig.Ast.Node.If{
.then_expr = then_expr,
.else_expr = else_expr,
}),
- },
+ } },
});
},
.if_not_break => {
@@ -1794,28 +1765,25 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
const cond = try c.addNode(.{
.tag = .bool_not,
.main_token = try c.addToken(.bang, "!"),
- .data = .{
- .lhs = try renderNodeGrouped(c, payload),
- .rhs = undefined,
- },
+ .data = .{ .node = try renderNodeGrouped(c, payload) },
});
_ = try c.addToken(.r_paren, ")");
const then_expr = try c.addNode(.{
.tag = .@"break",
.main_token = try c.addToken(.keyword_break, "break"),
- .data = .{
- .lhs = 0,
- .rhs = 0,
- },
+ .data = .{ .opt_token_and_opt_node = .{
+ .none,
+ .none,
+ } },
});
return c.addNode(.{
.tag = .if_simple,
.main_token = if_tok,
- .data = .{
- .lhs = cond,
- .rhs = then_expr,
- },
+ .data = .{ .node_and_node = .{
+ cond,
+ then_expr,
+ } },
});
},
.@"switch" => {
@@ -1837,13 +1805,12 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .switch_comma,
.main_token = switch_tok,
- .data = .{
- .lhs = cond,
- .rhs = try c.addExtra(NodeSubRange{
+ .data = .{ .node_and_extra = .{
+ cond, try c.addExtra(NodeSubRange{
.start = span.start,
.end = span.end,
}),
- },
+ } },
});
},
.switch_else => {
@@ -1852,43 +1819,42 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .switch_case_one,
.main_token = try c.addToken(.equal_angle_bracket_right, "=>"),
- .data = .{
- .lhs = 0,
- .rhs = try renderNode(c, payload),
- },
+ .data = .{ .opt_node_and_node = .{
+ .none,
+ try renderNode(c, payload),
+ } },
});
},
.switch_prong => {
const payload = node.castTag(.switch_prong).?.data;
- var items = try c.gpa.alloc(NodeIndex, @max(payload.cases.len, 1));
+ var items = try c.gpa.alloc(NodeIndex, payload.cases.len);
defer c.gpa.free(items);
- items[0] = 0;
- for (payload.cases, 0..) |item, i| {
+ for (payload.cases, items, 0..) |case, *item, i| {
if (i != 0) _ = try c.addToken(.comma, ",");
- items[i] = try renderNode(c, item);
+ item.* = try renderNode(c, case);
}
_ = try c.addToken(.r_brace, "}");
if (items.len < 2) {
return c.addNode(.{
.tag = .switch_case_one,
.main_token = try c.addToken(.equal_angle_bracket_right, "=>"),
- .data = .{
- .lhs = items[0],
- .rhs = try renderNode(c, payload.cond),
- },
+ .data = .{ .opt_node_and_node = .{
+ if (items.len == 0) .none else items[0].toOptional(),
+ try renderNode(c, payload.cond),
+ } },
});
} else {
const span = try c.listToSpan(items);
return c.addNode(.{
.tag = .switch_case,
.main_token = try c.addToken(.equal_angle_bracket_right, "=>"),
- .data = .{
- .lhs = try c.addExtra(NodeSubRange{
+ .data = .{ .extra_and_node = .{
+ try c.addExtra(NodeSubRange{
.start = span.start,
.end = span.end,
}),
- .rhs = try renderNode(c, payload.cond),
- },
+ try renderNode(c, payload.cond),
+ } },
});
}
},
@@ -1900,10 +1866,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .container_decl_two,
.main_token = opaque_tok,
- .data = .{
- .lhs = 0,
- .rhs = 0,
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ .none,
+ .none,
+ } },
});
},
.array_access => {
@@ -1915,10 +1881,10 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .array_access,
.main_token = l_bracket,
- .data = .{
- .lhs = lhs,
- .rhs = index_expr,
- },
+ .data = .{ .node_and_node = .{
+ lhs,
+ index_expr,
+ } },
});
},
.array_type => {
@@ -1940,22 +1906,22 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
const init = try c.addNode(.{
.tag = .array_init_one,
.main_token = l_brace,
- .data = .{
- .lhs = type_expr,
- .rhs = val,
- },
+ .data = .{ .node_and_node = .{
+ type_expr,
+ val,
+ } },
});
return c.addNode(.{
.tag = .array_cat,
.main_token = try c.addToken(.asterisk_asterisk, "**"),
- .data = .{
- .lhs = init,
- .rhs = try c.addNode(.{
+ .data = .{ .node_and_node = .{
+ init,
+ try c.addNode(.{
.tag = .number_literal,
.main_token = try c.addTokenFmt(.number_literal, "{d}", .{payload.count}),
.data = undefined,
}),
- },
+ } },
});
},
.empty_array => {
@@ -1989,7 +1955,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
const type_node = if (payload.type) |enum_const_type| blk: {
_ = try c.addToken(.colon, ":");
break :blk try renderNode(c, enum_const_type);
- } else 0;
+ } else null;
_ = try c.addToken(.equal, "=");
@@ -1999,20 +1965,18 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .simple_var_decl,
.main_token = const_tok,
- .data = .{
- .lhs = type_node,
- .rhs = init_node,
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ .fromOptional(type_node),
+ init_node.toOptional(),
+ } },
});
},
.tuple => {
const payload = node.castTag(.tuple).?.data;
_ = try c.addToken(.period, ".");
const l_brace = try c.addToken(.l_brace, "{");
- var inits = try c.gpa.alloc(NodeIndex, @max(payload.len, 2));
+ var inits = try c.gpa.alloc(NodeIndex, payload.len);
defer c.gpa.free(inits);
- inits[0] = 0;
- inits[1] = 0;
for (payload, 0..) |init, i| {
if (i != 0) _ = try c.addToken(.comma, ",");
inits[i] = try renderNode(c, init);
@@ -2022,20 +1986,17 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .array_init_dot_two,
.main_token = l_brace,
- .data = .{
- .lhs = inits[0],
- .rhs = inits[1],
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ if (inits.len < 1) .none else inits[0].toOptional(),
+ if (inits.len < 2) .none else inits[1].toOptional(),
+ } },
});
} else {
const span = try c.listToSpan(inits);
return c.addNode(.{
.tag = .array_init_dot,
.main_token = l_brace,
- .data = .{
- .lhs = span.start,
- .rhs = span.end,
- },
+ .data = .{ .extra_range = span },
});
}
},
@@ -2043,10 +2004,8 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
const payload = node.castTag(.container_init_dot).?.data;
_ = try c.addToken(.period, ".");
const l_brace = try c.addToken(.l_brace, "{");
- var inits = try c.gpa.alloc(NodeIndex, @max(payload.len, 2));
+ var inits = try c.gpa.alloc(NodeIndex, payload.len);
defer c.gpa.free(inits);
- inits[0] = 0;
- inits[1] = 0;
for (payload, 0..) |init, i| {
_ = try c.addToken(.period, ".");
_ = try c.addIdentifier(init.name);
@@ -2060,20 +2019,17 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
return c.addNode(.{
.tag = .struct_init_dot_two_comma,
.main_token = l_brace,
- .data = .{
- .lhs = inits[0],
- .rhs = inits[1],
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ if (inits.len < 1) .none else inits[0].toOptional(),
+ if (inits.len < 2) .none else inits[1].toOptional(),
+ } },
});
} else {
const span = try c.listToSpan(inits);
return c.addNode(.{
.tag = .struct_init_dot_comma,
.main_token = l_brace,
- .data = .{
- .lhs = span.start,
- .rhs = span.end,
- },
+ .data = .{ .extra_range = span },
});
}
},
@@ -2082,9 +2038,8 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
const lhs = try renderNode(c, payload.lhs);
const l_brace = try c.addToken(.l_brace, "{");
- var inits = try c.gpa.alloc(NodeIndex, @max(payload.inits.len, 1));
+ var inits = try c.gpa.alloc(NodeIndex, payload.inits.len);
defer c.gpa.free(inits);
- inits[0] = 0;
for (payload.inits, 0..) |init, i| {
_ = try c.addToken(.period, ".");
_ = try c.addIdentifier(init.name);
@@ -2098,31 +2053,30 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
0 => c.addNode(.{
.tag = .struct_init_one,
.main_token = l_brace,
- .data = .{
- .lhs = lhs,
- .rhs = 0,
- },
+ .data = .{ .node_and_opt_node = .{
+ lhs,
+ .none,
+ } },
}),
1 => c.addNode(.{
.tag = .struct_init_one_comma,
.main_token = l_brace,
- .data = .{
- .lhs = lhs,
- .rhs = inits[0],
- },
+ .data = .{ .node_and_opt_node = .{
+ lhs,
+ inits[0].toOptional(),
+ } },
}),
else => blk: {
const span = try c.listToSpan(inits);
break :blk c.addNode(.{
.tag = .struct_init_comma,
.main_token = l_brace,
- .data = .{
- .lhs = lhs,
- .rhs = try c.addExtra(NodeSubRange{
+ .data = .{ .node_and_extra = .{
+ lhs, try c.addExtra(NodeSubRange{
.start = span.start,
.end = span.end,
}),
- },
+ } },
});
},
};
@@ -2147,10 +2101,8 @@ fn renderRecord(c: *Context, node: Node) !NodeIndex {
const num_vars = payload.variables.len;
const num_funcs = payload.functions.len;
const total_members = payload.fields.len + num_vars + num_funcs;
- const members = try c.gpa.alloc(NodeIndex, @max(total_members, 2));
+ const members = try c.gpa.alloc(NodeIndex, total_members);
defer c.gpa.free(members);
- members[0] = 0;
- members[1] = 0;
for (payload.fields, 0..) |field, i| {
const name_tok = try c.addTokenFmt(.identifier, "{p}", .{std.zig.fmtId(field.name)});
@@ -2167,37 +2119,36 @@ fn renderRecord(c: *Context, node: Node) !NodeIndex {
});
_ = try c.addToken(.r_paren, ")");
break :blk align_expr;
- } else 0;
+ } else null;
const value_expr = if (field.default_value) |value| blk: {
_ = try c.addToken(.equal, "=");
break :blk try renderNode(c, value);
- } else 0;
+ } else null;
- members[i] = try c.addNode(if (align_expr == 0) .{
+ members[i] = try c.addNode(if (align_expr == null) .{
.tag = .container_field_init,
.main_token = name_tok,
- .data = .{
- .lhs = type_expr,
- .rhs = value_expr,
- },
- } else if (value_expr == 0) .{
+ .data = .{ .node_and_opt_node = .{
+ type_expr,
+ .fromOptional(value_expr),
+ } },
+ } else if (value_expr == null) .{
.tag = .container_field_align,
.main_token = name_tok,
- .data = .{
- .lhs = type_expr,
- .rhs = align_expr,
- },
+ .data = .{ .node_and_node = .{
+ type_expr,
+ align_expr.?,
+ } },
} else .{
.tag = .container_field,
.main_token = name_tok,
- .data = .{
- .lhs = type_expr,
- .rhs = try c.addExtra(std.zig.Ast.Node.ContainerField{
- .align_expr = align_expr,
- .value_expr = value_expr,
+ .data = .{ .node_and_extra = .{
+ type_expr, try c.addExtra(std.zig.Ast.Node.ContainerField{
+ .align_expr = align_expr.?,
+ .value_expr = value_expr.?,
}),
- },
+ } },
});
_ = try c.addToken(.comma, ",");
}
@@ -2213,29 +2164,26 @@ fn renderRecord(c: *Context, node: Node) !NodeIndex {
return c.addNode(.{
.tag = .container_decl_two,
.main_token = kind_tok,
- .data = .{
- .lhs = 0,
- .rhs = 0,
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ .none,
+ .none,
+ } },
});
} else if (total_members <= 2) {
return c.addNode(.{
.tag = if (num_funcs == 0) .container_decl_two_trailing else .container_decl_two,
.main_token = kind_tok,
- .data = .{
- .lhs = members[0],
- .rhs = members[1],
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ if (members.len < 1) .none else members[0].toOptional(),
+ if (members.len < 2) .none else members[1].toOptional(),
+ } },
});
} else {
const span = try c.listToSpan(members);
return c.addNode(.{
.tag = if (num_funcs == 0) .container_decl_trailing else .container_decl,
.main_token = kind_tok,
- .data = .{
- .lhs = span.start,
- .rhs = span.end,
- },
+ .data = .{ .extra_range = span },
});
}
}
@@ -2244,45 +2192,52 @@ fn renderFieldAccess(c: *Context, lhs: NodeIndex, field_name: []const u8) !NodeI
return c.addNode(.{
.tag = .field_access,
.main_token = try c.addToken(.period, "."),
- .data = .{
- .lhs = lhs,
- .rhs = try c.addTokenFmt(.identifier, "{p}", .{std.zig.fmtId(field_name)}),
- },
+ .data = .{ .node_and_token = .{
+ lhs,
+ try c.addTokenFmt(.identifier, "{p}", .{std.zig.fmtId(field_name)}),
+ } },
});
}
fn renderArrayInit(c: *Context, lhs: NodeIndex, inits: []const Node) !NodeIndex {
const l_brace = try c.addToken(.l_brace, "{");
- var rendered = try c.gpa.alloc(NodeIndex, @max(inits.len, 1));
+ var rendered = try c.gpa.alloc(NodeIndex, inits.len);
defer c.gpa.free(rendered);
- rendered[0] = 0;
for (inits, 0..) |init, i| {
rendered[i] = try renderNode(c, init);
_ = try c.addToken(.comma, ",");
}
_ = try c.addToken(.r_brace, "}");
- if (inits.len < 2) {
- return c.addNode(.{
- .tag = .array_init_one_comma,
+ switch (inits.len) {
+ 0 => return c.addNode(.{
+ .tag = .struct_init_one,
.main_token = l_brace,
- .data = .{
- .lhs = lhs,
- .rhs = rendered[0],
- },
- });
- } else {
- const span = try c.listToSpan(rendered);
- return c.addNode(.{
- .tag = .array_init_comma,
+ .data = .{ .node_and_opt_node = .{
+ lhs,
+ .none,
+ } },
+ }),
+ 1 => return c.addNode(.{
+ .tag = .array_init_one_comma,
.main_token = l_brace,
- .data = .{
- .lhs = lhs,
- .rhs = try c.addExtra(NodeSubRange{
- .start = span.start,
- .end = span.end,
- }),
- },
- });
+ .data = .{ .node_and_node = .{
+ lhs,
+ rendered[0],
+ } },
+ }),
+ else => {
+ const span = try c.listToSpan(rendered);
+ return c.addNode(.{
+ .tag = .array_init_comma,
+ .main_token = l_brace,
+ .data = .{ .node_and_extra = .{
+ lhs, try c.addExtra(NodeSubRange{
+ .start = span.start,
+ .end = span.end,
+ }),
+ } },
+ });
+ },
}
}
@@ -2298,10 +2253,10 @@ fn renderArrayType(c: *Context, len: usize, elem_type: Node) !NodeIndex {
return c.addNode(.{
.tag = .array_type,
.main_token = l_bracket,
- .data = .{
- .lhs = len_expr,
- .rhs = elem_type_expr,
- },
+ .data = .{ .node_and_node = .{
+ len_expr,
+ elem_type_expr,
+ } },
});
}
@@ -2325,13 +2280,13 @@ fn renderNullSentinelArrayType(c: *Context, len: usize, elem_type: Node) !NodeIn
return c.addNode(.{
.tag = .array_type_sentinel,
.main_token = l_bracket,
- .data = .{
- .lhs = len_expr,
- .rhs = try c.addExtra(std.zig.Ast.Node.ArrayTypeSentinel{
+ .data = .{ .node_and_extra = .{
+ len_expr,
+ try c.addExtra(std.zig.Ast.Node.ArrayTypeSentinel{
.sentinel = sentinel_expr,
.elem_type = elem_type_expr,
}),
- },
+ } },
});
}
@@ -2482,10 +2437,10 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex {
=> return c.addNode(.{
.tag = .grouped_expression,
.main_token = try c.addToken(.l_paren, "("),
- .data = .{
- .lhs = try renderNode(c, node),
- .rhs = try c.addToken(.r_paren, ")"),
- },
+ .data = .{ .node_and_token = .{
+ try renderNode(c, node),
+ try c.addToken(.r_paren, ")"),
+ } },
}),
.ellipsis3,
.switch_prong,
@@ -2539,10 +2494,7 @@ fn renderPrefixOp(c: *Context, node: Node, tag: std.zig.Ast.Node.Tag, tok_tag: T
return c.addNode(.{
.tag = tag,
.main_token = try c.addToken(tok_tag, bytes),
- .data = .{
- .lhs = try renderNodeGrouped(c, payload),
- .rhs = undefined,
- },
+ .data = .{ .node = try renderNodeGrouped(c, payload) },
});
}
@@ -2552,10 +2504,10 @@ fn renderBinOpGrouped(c: *Context, node: Node, tag: std.zig.Ast.Node.Tag, tok_ta
return c.addNode(.{
.tag = tag,
.main_token = try c.addToken(tok_tag, bytes),
- .data = .{
- .lhs = lhs,
- .rhs = try renderNodeGrouped(c, payload.rhs),
- },
+ .data = .{ .node_and_node = .{
+ lhs,
+ try renderNodeGrouped(c, payload.rhs),
+ } },
});
}
@@ -2565,10 +2517,10 @@ fn renderBinOp(c: *Context, node: Node, tag: std.zig.Ast.Node.Tag, tok_tag: Toke
return c.addNode(.{
.tag = tag,
.main_token = try c.addToken(tok_tag, bytes),
- .data = .{
- .lhs = lhs,
- .rhs = try renderNode(c, payload.rhs),
- },
+ .data = .{ .node_and_node = .{
+ lhs,
+ try renderNode(c, payload.rhs),
+ } },
});
}
@@ -2586,10 +2538,7 @@ fn renderStdImport(c: *Context, parts: []const []const u8) !NodeIndex {
const import_node = try c.addNode(.{
.tag = .builtin_call_two,
.main_token = import_tok,
- .data = .{
- .lhs = std_node,
- .rhs = 0,
- },
+ .data = .{ .opt_node_and_opt_node = .{ std_node.toOptional(), .none } },
});
var access_chain = import_node;
@@ -2605,20 +2554,14 @@ fn renderCall(c: *Context, lhs: NodeIndex, args: []const Node) !NodeIndex {
0 => try c.addNode(.{
.tag = .call_one,
.main_token = lparen,
- .data = .{
- .lhs = lhs,
- .rhs = 0,
- },
+ .data = .{ .node_and_opt_node = .{ lhs, .none } },
}),
1 => blk: {
const arg = try renderNode(c, args[0]);
break :blk try c.addNode(.{
.tag = .call_one,
.main_token = lparen,
- .data = .{
- .lhs = lhs,
- .rhs = arg,
- },
+ .data = .{ .node_and_opt_node = .{ lhs, arg.toOptional() } },
});
},
else => blk: {
@@ -2633,13 +2576,10 @@ fn renderCall(c: *Context, lhs: NodeIndex, args: []const Node) !NodeIndex {
break :blk try c.addNode(.{
.tag = .call,
.main_token = lparen,
- .data = .{
- .lhs = lhs,
- .rhs = try c.addExtra(NodeSubRange{
- .start = span.start,
- .end = span.end,
- }),
- },
+ .data = .{ .node_and_extra = .{
+ lhs,
+ try c.addExtra(NodeSubRange{ .start = span.start, .end = span.end }),
+ } },
});
},
};
@@ -2650,10 +2590,10 @@ fn renderCall(c: *Context, lhs: NodeIndex, args: []const Node) !NodeIndex {
fn renderBuiltinCall(c: *Context, builtin: []const u8, args: []const Node) !NodeIndex {
const builtin_tok = try c.addToken(.builtin, builtin);
_ = try c.addToken(.l_paren, "(");
- var arg_1: NodeIndex = 0;
- var arg_2: NodeIndex = 0;
- var arg_3: NodeIndex = 0;
- var arg_4: NodeIndex = 0;
+ var arg_1: NodeIndex = undefined;
+ var arg_2: NodeIndex = undefined;
+ var arg_3: NodeIndex = undefined;
+ var arg_4: NodeIndex = undefined;
switch (args.len) {
0 => {},
1 => {
@@ -2681,10 +2621,10 @@ fn renderBuiltinCall(c: *Context, builtin: []const u8, args: []const Node) !Node
return c.addNode(.{
.tag = .builtin_call_two,
.main_token = builtin_tok,
- .data = .{
- .lhs = arg_1,
- .rhs = arg_2,
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ if (args.len < 1) .none else arg_1.toOptional(),
+ if (args.len < 2) .none else arg_2.toOptional(),
+ } },
});
} else {
std.debug.assert(args.len == 4);
@@ -2693,10 +2633,7 @@ fn renderBuiltinCall(c: *Context, builtin: []const u8, args: []const Node) !Node
return c.addNode(.{
.tag = .builtin_call,
.main_token = builtin_tok,
- .data = .{
- .lhs = params.start,
- .rhs = params.end,
- },
+ .data = .{ .extra_range = params },
});
}
}
@@ -2725,7 +2662,7 @@ fn renderVar(c: *Context, node: Node) !NodeIndex {
});
_ = try c.addToken(.r_paren, ")");
break :blk res;
- } else 0;
+ } else null;
const section_node = if (payload.linksection_string) |some| blk: {
_ = try c.addToken(.keyword_linksection, "linksection");
@@ -2737,50 +2674,50 @@ fn renderVar(c: *Context, node: Node) !NodeIndex {
});
_ = try c.addToken(.r_paren, ")");
break :blk res;
- } else 0;
+ } else null;
const init_node = if (payload.init) |some| blk: {
_ = try c.addToken(.equal, "=");
break :blk try renderNode(c, some);
- } else 0;
+ } else null;
_ = try c.addToken(.semicolon, ";");
- if (section_node == 0) {
- if (align_node == 0) {
+ if (section_node == null) {
+ if (align_node == null) {
return c.addNode(.{
.tag = .simple_var_decl,
.main_token = mut_tok,
- .data = .{
- .lhs = type_node,
- .rhs = init_node,
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ type_node.toOptional(),
+ .fromOptional(init_node),
+ } },
});
} else {
return c.addNode(.{
.tag = .local_var_decl,
.main_token = mut_tok,
- .data = .{
- .lhs = try c.addExtra(std.zig.Ast.Node.LocalVarDecl{
+ .data = .{ .extra_and_opt_node = .{
+ try c.addExtra(std.zig.Ast.Node.LocalVarDecl{
.type_node = type_node,
- .align_node = align_node,
+ .align_node = align_node.?,
}),
- .rhs = init_node,
- },
+ .fromOptional(init_node),
+ } },
});
}
} else {
return c.addNode(.{
.tag = .global_var_decl,
.main_token = mut_tok,
- .data = .{
- .lhs = try c.addExtra(std.zig.Ast.Node.GlobalVarDecl{
- .type_node = type_node,
- .align_node = align_node,
- .section_node = section_node,
- .addrspace_node = 0,
+ .data = .{ .extra_and_opt_node = .{
+ try c.addExtra(std.zig.Ast.Node.GlobalVarDecl{
+ .type_node = type_node.toOptional(),
+ .align_node = .fromOptional(align_node),
+ .section_node = .fromOptional(section_node),
+ .addrspace_node = .none,
}),
- .rhs = init_node,
- },
+ .fromOptional(init_node),
+ } },
});
}
}
@@ -2809,7 +2746,7 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex {
});
_ = try c.addToken(.r_paren, ")");
break :blk res;
- } else 0;
+ } else null;
const section_expr = if (payload.linksection_string) |some| blk: {
_ = try c.addToken(.keyword_linksection, "linksection");
@@ -2821,7 +2758,7 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex {
});
_ = try c.addToken(.r_paren, ")");
break :blk res;
- } else 0;
+ } else null;
const callconv_expr = if (payload.explicit_callconv) |some| blk: {
_ = try c.addToken(.keyword_callconv, "callconv");
@@ -2856,48 +2793,52 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex {
const inner_lbrace = try c.addToken(.l_brace, "{");
_ = try c.addToken(.r_brace, "}");
_ = try c.addToken(.r_brace, "}");
+ const inner_node = try c.addNode(.{
+ .tag = .struct_init_dot_two,
+ .main_token = inner_lbrace,
+ .data = .{ .opt_node_and_opt_node = .{
+ .none,
+ .none,
+ } },
+ });
break :cc_node try c.addNode(.{
.tag = .struct_init_dot_two,
.main_token = outer_lbrace,
- .data = .{
- .lhs = try c.addNode(.{
- .tag = .struct_init_dot_two,
- .main_token = inner_lbrace,
- .data = .{ .lhs = 0, .rhs = 0 },
- }),
- .rhs = 0,
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ inner_node.toOptional(),
+ .none,
+ } },
});
},
};
_ = try c.addToken(.r_paren, ")");
break :blk cc_node;
- } else 0;
+ } else null;
const return_type_expr = try renderNode(c, payload.return_type);
const fn_proto = try blk: {
- if (align_expr == 0 and section_expr == 0 and callconv_expr == 0) {
+ if (align_expr == null and section_expr == null and callconv_expr == null) {
if (params.items.len < 2)
break :blk c.addNode(.{
.tag = .fn_proto_simple,
.main_token = fn_token,
- .data = .{
- .lhs = params.items[0],
- .rhs = return_type_expr,
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ if (params.items.len == 0) .none else params.items[0].toOptional(),
+ return_type_expr.toOptional(),
+ } },
})
else
break :blk c.addNode(.{
.tag = .fn_proto_multi,
.main_token = fn_token,
- .data = .{
- .lhs = try c.addExtra(NodeSubRange{
+ .data = .{ .extra_and_opt_node = .{
+ try c.addExtra(NodeSubRange{
.start = span.start,
.end = span.end,
}),
- .rhs = return_type_expr,
- },
+ return_type_expr.toOptional(),
+ } },
});
}
if (params.items.len < 2)
@@ -2905,14 +2846,16 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex {
.tag = .fn_proto_one,
.main_token = fn_token,
.data = .{
- .lhs = try c.addExtra(std.zig.Ast.Node.FnProtoOne{
- .param = params.items[0],
- .align_expr = align_expr,
- .addrspace_expr = 0, // TODO
- .section_expr = section_expr,
- .callconv_expr = callconv_expr,
- }),
- .rhs = return_type_expr,
+ .extra_and_opt_node = .{
+ try c.addExtra(std.zig.Ast.Node.FnProtoOne{
+ .param = if (params.items.len == 0) .none else params.items[0].toOptional(),
+ .align_expr = .fromOptional(align_expr),
+ .addrspace_expr = .none, // TODO
+ .section_expr = .fromOptional(section_expr),
+ .callconv_expr = .fromOptional(callconv_expr),
+ }),
+ return_type_expr.toOptional(),
+ },
},
})
else
@@ -2920,15 +2863,17 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex {
.tag = .fn_proto,
.main_token = fn_token,
.data = .{
- .lhs = try c.addExtra(std.zig.Ast.Node.FnProto{
- .params_start = span.start,
- .params_end = span.end,
- .align_expr = align_expr,
- .addrspace_expr = 0, // TODO
- .section_expr = section_expr,
- .callconv_expr = callconv_expr,
- }),
- .rhs = return_type_expr,
+ .extra_and_opt_node = .{
+ try c.addExtra(std.zig.Ast.Node.FnProto{
+ .params_start = span.start,
+ .params_end = span.end,
+ .align_expr = .fromOptional(align_expr),
+ .addrspace_expr = .none, // TODO
+ .section_expr = .fromOptional(section_expr),
+ .callconv_expr = .fromOptional(callconv_expr),
+ }),
+ return_type_expr.toOptional(),
+ },
},
});
};
@@ -2943,10 +2888,10 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex {
return c.addNode(.{
.tag = .fn_decl,
.main_token = fn_token,
- .data = .{
- .lhs = fn_proto,
- .rhs = body,
- },
+ .data = .{ .node_and_node = .{
+ fn_proto,
+ body,
+ } },
});
}
@@ -2959,8 +2904,6 @@ fn renderMacroFunc(c: *Context, node: Node) !NodeIndex {
const params = try renderParams(c, payload.params, false);
defer params.deinit();
- var span: NodeSubRange = undefined;
- if (params.items.len > 1) span = try c.listToSpan(params.items);
const return_type_expr = try renderNodeGrouped(c, payload.return_type);
@@ -2969,38 +2912,39 @@ fn renderMacroFunc(c: *Context, node: Node) !NodeIndex {
break :blk try c.addNode(.{
.tag = .fn_proto_simple,
.main_token = fn_token,
- .data = .{
- .lhs = params.items[0],
- .rhs = return_type_expr,
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ if (params.items.len == 0) .none else params.items[0].toOptional(),
+ return_type_expr.toOptional(),
+ } },
});
} else {
+ const span: NodeSubRange = try c.listToSpan(params.items);
break :blk try c.addNode(.{
.tag = .fn_proto_multi,
.main_token = fn_token,
- .data = .{
- .lhs = try c.addExtra(std.zig.Ast.Node.SubRange{
+ .data = .{ .extra_and_opt_node = .{
+ try c.addExtra(std.zig.Ast.Node.SubRange{
.start = span.start,
.end = span.end,
}),
- .rhs = return_type_expr,
- },
+ return_type_expr.toOptional(),
+ } },
});
}
};
return c.addNode(.{
.tag = .fn_decl,
.main_token = fn_token,
- .data = .{
- .lhs = fn_proto,
- .rhs = try renderNode(c, payload.body),
- },
+ .data = .{ .node_and_node = .{
+ fn_proto,
+ try renderNode(c, payload.body),
+ } },
});
}
fn renderParams(c: *Context, params: []Payload.Param, is_var_args: bool) !std.ArrayList(NodeIndex) {
_ = try c.addToken(.l_paren, "(");
- var rendered = try std.ArrayList(NodeIndex).initCapacity(c.gpa, @max(params.len, 1));
+ var rendered = try std.ArrayList(NodeIndex).initCapacity(c.gpa, params.len);
errdefer rendered.deinit();
for (params, 0..) |param, i| {
@@ -3022,6 +2966,5 @@ fn renderParams(c: *Context, params: []Payload.Param, is_var_args: bool) !std.Ar
}
_ = try c.addToken(.r_paren, ")");
- if (rendered.items.len == 0) rendered.appendAssumeCapacity(0);
return rendered;
}
lib/compiler/reduce/Walk.zig
@@ -98,29 +98,26 @@ const ScanDeclsAction = enum { add, remove };
fn scanDecls(w: *Walk, members: []const Ast.Node.Index, action: ScanDeclsAction) Error!void {
const ast = w.ast;
const gpa = w.gpa;
- const node_tags = ast.nodes.items(.tag);
- const main_tokens = ast.nodes.items(.main_token);
- const token_tags = ast.tokens.items(.tag);
for (members) |member_node| {
- const name_token = switch (node_tags[member_node]) {
+ const name_token = switch (ast.nodeTag(member_node)) {
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
- => main_tokens[member_node] + 1,
+ => ast.nodeMainToken(member_node) + 1,
.fn_proto_simple,
.fn_proto_multi,
.fn_proto_one,
.fn_proto,
.fn_decl,
- => main_tokens[member_node] + 1,
+ => ast.nodeMainToken(member_node) + 1,
else => continue,
};
- assert(token_tags[name_token] == .identifier);
+ assert(ast.tokenTag(name_token) == .identifier);
const name_bytes = ast.tokenSlice(name_token);
switch (action) {
@@ -145,12 +142,10 @@ fn scanDecls(w: *Walk, members: []const Ast.Node.Index, action: ScanDeclsAction)
fn walkMember(w: *Walk, decl: Ast.Node.Index) Error!void {
const ast = w.ast;
- const datas = ast.nodes.items(.data);
- switch (ast.nodes.items(.tag)[decl]) {
+ switch (ast.nodeTag(decl)) {
.fn_decl => {
- const fn_proto = datas[decl].lhs;
+ const fn_proto, const body_node = ast.nodeData(decl).node_and_node;
try walkExpression(w, fn_proto);
- const body_node = datas[decl].rhs;
if (!isFnBodyGutted(ast, body_node)) {
w.replace_names.clearRetainingCapacity();
try w.transformations.append(.{ .gut_function = decl });
@@ -167,7 +162,7 @@ fn walkMember(w: *Walk, decl: Ast.Node.Index) Error!void {
.@"usingnamespace" => {
try w.transformations.append(.{ .delete_node = decl });
- const expr = datas[decl].lhs;
+ const expr = ast.nodeData(decl).node;
try walkExpression(w, expr);
},
@@ -179,7 +174,7 @@ fn walkMember(w: *Walk, decl: Ast.Node.Index) Error!void {
.test_decl => {
try w.transformations.append(.{ .delete_node = decl });
- try walkExpression(w, datas[decl].rhs);
+ try walkExpression(w, ast.nodeData(decl).opt_token_and_node[1]);
},
.container_field_init,
@@ -202,14 +197,10 @@ fn walkMember(w: *Walk, decl: Ast.Node.Index) Error!void {
fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
const ast = w.ast;
- const token_tags = ast.tokens.items(.tag);
- const main_tokens = ast.nodes.items(.main_token);
- const node_tags = ast.nodes.items(.tag);
- const datas = ast.nodes.items(.data);
- switch (node_tags[node]) {
+ switch (ast.nodeTag(node)) {
.identifier => {
- const name_ident = main_tokens[node];
- assert(token_tags[name_ident] == .identifier);
+ const name_ident = ast.nodeMainToken(node);
+ assert(ast.tokenTag(name_ident) == .identifier);
const name_bytes = ast.tokenSlice(name_ident);
_ = w.unreferenced_globals.swapRemove(name_bytes);
if (w.replace_names.get(name_bytes)) |index| {
@@ -239,46 +230,27 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
},
.@"errdefer" => {
- const expr = datas[node].rhs;
+ const expr = ast.nodeData(node).opt_token_and_node[1];
return walkExpression(w, expr);
},
- .@"defer" => {
- const expr = datas[node].rhs;
- return walkExpression(w, expr);
- },
- .@"comptime", .@"nosuspend" => {
- const block = datas[node].lhs;
- return walkExpression(w, block);
- },
-
- .@"suspend" => {
- const body = datas[node].lhs;
- return walkExpression(w, body);
- },
-
- .@"catch" => {
- try walkExpression(w, datas[node].lhs); // target
- try walkExpression(w, datas[node].rhs); // fallback
+ .@"defer",
+ .@"comptime",
+ .@"nosuspend",
+ .@"suspend",
+ => {
+ return walkExpression(w, ast.nodeData(node).node);
},
.field_access => {
- const field_access = datas[node];
- try walkExpression(w, field_access.lhs);
+ try walkExpression(w, ast.nodeData(node).node_and_token[0]);
},
- .error_union,
- .switch_range,
- => {
- const infix = datas[node];
- try walkExpression(w, infix.lhs);
- return walkExpression(w, infix.rhs);
- },
.for_range => {
- const infix = datas[node];
- try walkExpression(w, infix.lhs);
- if (infix.rhs != 0) {
- return walkExpression(w, infix.rhs);
+ const start, const opt_end = ast.nodeData(node).node_and_opt_node;
+ try walkExpression(w, start);
+ if (opt_end.unwrap()) |end| {
+ return walkExpression(w, end);
}
},
@@ -328,17 +300,21 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
.sub,
.sub_wrap,
.sub_sat,
+ .@"catch",
+ .error_union,
+ .switch_range,
.@"orelse",
+ .array_access,
=> {
- const infix = datas[node];
- try walkExpression(w, infix.lhs);
- try walkExpression(w, infix.rhs);
+ const lhs, const rhs = ast.nodeData(node).node_and_node;
+ try walkExpression(w, lhs);
+ try walkExpression(w, rhs);
},
.assign_destructure => {
const full = ast.assignDestructure(node);
for (full.ast.variables) |variable_node| {
- switch (node_tags[variable_node]) {
+ switch (ast.nodeTag(variable_node)) {
.global_var_decl,
.local_var_decl,
.simple_var_decl,
@@ -357,15 +333,12 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
.negation_wrap,
.optional_type,
.address_of,
- => {
- return walkExpression(w, datas[node].lhs);
- },
-
.@"try",
.@"resume",
.@"await",
+ .deref,
=> {
- return walkExpression(w, datas[node].lhs);
+ return walkExpression(w, ast.nodeData(node).node);
},
.array_type,
@@ -417,51 +390,40 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
return walkCall(w, ast.fullCall(&buf, node).?);
},
- .array_access => {
- const suffix = datas[node];
- try walkExpression(w, suffix.lhs);
- try walkExpression(w, suffix.rhs);
- },
-
.slice_open, .slice, .slice_sentinel => return walkSlice(w, node, ast.fullSlice(node).?),
- .deref => {
- try walkExpression(w, datas[node].lhs);
- },
-
.unwrap_optional => {
- try walkExpression(w, datas[node].lhs);
+ try walkExpression(w, ast.nodeData(node).node_and_token[0]);
},
.@"break" => {
- const label_token = datas[node].lhs;
- const target = datas[node].rhs;
- if (label_token == 0 and target == 0) {
+ const label_token, const target = ast.nodeData(node).opt_token_and_opt_node;
+ if (label_token == .none and target == .none) {
// no expressions
- } else if (label_token == 0 and target != 0) {
- try walkExpression(w, target);
- } else if (label_token != 0 and target == 0) {
- try walkIdentifier(w, label_token);
- } else if (label_token != 0 and target != 0) {
- try walkExpression(w, target);
+ } else if (label_token == .none and target != .none) {
+ try walkExpression(w, target.unwrap().?);
+ } else if (label_token != .none and target == .none) {
+ try walkIdentifier(w, label_token.unwrap().?);
+ } else if (label_token != .none and target != .none) {
+ try walkExpression(w, target.unwrap().?);
}
},
.@"continue" => {
- const label = datas[node].lhs;
- if (label != 0) {
- return walkIdentifier(w, label); // label
+ const opt_label = ast.nodeData(node).opt_token_and_opt_node[0];
+ if (opt_label.unwrap()) |label| {
+ return walkIdentifier(w, label);
}
},
.@"return" => {
- if (datas[node].lhs != 0) {
- try walkExpression(w, datas[node].lhs);
+ if (ast.nodeData(node).opt_node.unwrap()) |lhs| {
+ try walkExpression(w, lhs);
}
},
.grouped_expression => {
- try walkExpression(w, datas[node].lhs);
+ try walkExpression(w, ast.nodeData(node).node_and_token[0]);
},
.container_decl,
@@ -482,13 +444,13 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
},
.error_set_decl => {
- const error_token = main_tokens[node];
+ const error_token = ast.nodeMainToken(node);
const lbrace = error_token + 1;
- const rbrace = datas[node].rhs;
+ const rbrace = ast.nodeData(node).token;
var i = lbrace + 1;
while (i < rbrace) : (i += 1) {
- switch (token_tags[i]) {
+ switch (ast.tokenTag(i)) {
.doc_comment => unreachable, // TODO
.identifier => try walkIdentifier(w, i),
.comma => {},
@@ -517,20 +479,16 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
},
.anyframe_type => {
- if (datas[node].rhs != 0) {
- return walkExpression(w, datas[node].rhs);
- }
+ _, const child_type = ast.nodeData(node).token_and_node;
+ return walkExpression(w, child_type);
},
.@"switch",
.switch_comma,
=> {
- const condition = datas[node].lhs;
- const extra = ast.extraData(datas[node].rhs, Ast.Node.SubRange);
- const cases = ast.extra_data[extra.start..extra.end];
-
- try walkExpression(w, condition); // condition expression
- try walkExpressions(w, cases);
+ const full = ast.fullSwitch(node).?;
+ try walkExpression(w, full.ast.condition); // condition expression
+ try walkExpressions(w, full.ast.cases);
},
.switch_case_one,
@@ -557,7 +515,7 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
=> return walkAsm(w, ast.fullAsm(node).?),
.enum_literal => {
- return walkIdentifier(w, main_tokens[node]); // name
+ return walkIdentifier(w, ast.nodeMainToken(node)); // name
},
.fn_decl => unreachable,
@@ -579,66 +537,66 @@ fn walkExpression(w: *Walk, node: Ast.Node.Index) Error!void {
fn walkGlobalVarDecl(w: *Walk, decl_node: Ast.Node.Index, var_decl: Ast.full.VarDecl) Error!void {
_ = decl_node;
- if (var_decl.ast.type_node != 0) {
- try walkExpression(w, var_decl.ast.type_node);
+ if (var_decl.ast.type_node.unwrap()) |type_node| {
+ try walkExpression(w, type_node);
}
- if (var_decl.ast.align_node != 0) {
- try walkExpression(w, var_decl.ast.align_node);
+ if (var_decl.ast.align_node.unwrap()) |align_node| {
+ try walkExpression(w, align_node);
}
- if (var_decl.ast.addrspace_node != 0) {
- try walkExpression(w, var_decl.ast.addrspace_node);
+ if (var_decl.ast.addrspace_node.unwrap()) |addrspace_node| {
+ try walkExpression(w, addrspace_node);
}
- if (var_decl.ast.section_node != 0) {
- try walkExpression(w, var_decl.ast.section_node);
+ if (var_decl.ast.section_node.unwrap()) |section_node| {
+ try walkExpression(w, section_node);
}
- if (var_decl.ast.init_node != 0) {
- if (!isUndefinedIdent(w.ast, var_decl.ast.init_node)) {
- try w.transformations.append(.{ .replace_with_undef = var_decl.ast.init_node });
+ if (var_decl.ast.init_node.unwrap()) |init_node| {
+ if (!isUndefinedIdent(w.ast, init_node)) {
+ try w.transformations.append(.{ .replace_with_undef = init_node });
}
- try walkExpression(w, var_decl.ast.init_node);
+ try walkExpression(w, init_node);
}
}
fn walkLocalVarDecl(w: *Walk, var_decl: Ast.full.VarDecl) Error!void {
try walkIdentifierNew(w, var_decl.ast.mut_token + 1); // name
- if (var_decl.ast.type_node != 0) {
- try walkExpression(w, var_decl.ast.type_node);
+ if (var_decl.ast.type_node.unwrap()) |type_node| {
+ try walkExpression(w, type_node);
}
- if (var_decl.ast.align_node != 0) {
- try walkExpression(w, var_decl.ast.align_node);
+ if (var_decl.ast.align_node.unwrap()) |align_node| {
+ try walkExpression(w, align_node);
}
- if (var_decl.ast.addrspace_node != 0) {
- try walkExpression(w, var_decl.ast.addrspace_node);
+ if (var_decl.ast.addrspace_node.unwrap()) |addrspace_node| {
+ try walkExpression(w, addrspace_node);
}
- if (var_decl.ast.section_node != 0) {
- try walkExpression(w, var_decl.ast.section_node);
+ if (var_decl.ast.section_node.unwrap()) |section_node| {
+ try walkExpression(w, section_node);
}
- if (var_decl.ast.init_node != 0) {
- if (!isUndefinedIdent(w.ast, var_decl.ast.init_node)) {
- try w.transformations.append(.{ .replace_with_undef = var_decl.ast.init_node });
+ if (var_decl.ast.init_node.unwrap()) |init_node| {
+ if (!isUndefinedIdent(w.ast, init_node)) {
+ try w.transformations.append(.{ .replace_with_undef = init_node });
}
- try walkExpression(w, var_decl.ast.init_node);
+ try walkExpression(w, init_node);
}
}
fn walkContainerField(w: *Walk, field: Ast.full.ContainerField) Error!void {
- if (field.ast.type_expr != 0) {
- try walkExpression(w, field.ast.type_expr); // type
+ if (field.ast.type_expr.unwrap()) |type_expr| {
+ try walkExpression(w, type_expr); // type
}
- if (field.ast.align_expr != 0) {
- try walkExpression(w, field.ast.align_expr); // alignment
+ if (field.ast.align_expr.unwrap()) |align_expr| {
+ try walkExpression(w, align_expr); // alignment
}
- if (field.ast.value_expr != 0) {
- try walkExpression(w, field.ast.value_expr); // value
+ if (field.ast.value_expr.unwrap()) |value_expr| {
+ try walkExpression(w, value_expr); // value
}
}
@@ -649,18 +607,17 @@ fn walkBlock(
) Error!void {
_ = block_node;
const ast = w.ast;
- const node_tags = ast.nodes.items(.tag);
for (statements) |stmt| {
- switch (node_tags[stmt]) {
+ switch (ast.nodeTag(stmt)) {
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> {
const var_decl = ast.fullVarDecl(stmt).?;
- if (var_decl.ast.init_node != 0 and
- isUndefinedIdent(w.ast, var_decl.ast.init_node))
+ if (var_decl.ast.init_node != .none and
+ isUndefinedIdent(w.ast, var_decl.ast.init_node.unwrap().?))
{
try w.transformations.append(.{ .delete_var_decl = .{
.var_decl_node = stmt,
@@ -691,15 +648,15 @@ fn walkBlock(
fn walkArrayType(w: *Walk, array_type: Ast.full.ArrayType) Error!void {
try walkExpression(w, array_type.ast.elem_count);
- if (array_type.ast.sentinel != 0) {
- try walkExpression(w, array_type.ast.sentinel);
+ if (array_type.ast.sentinel.unwrap()) |sentinel| {
+ try walkExpression(w, sentinel);
}
return walkExpression(w, array_type.ast.elem_type);
}
fn walkArrayInit(w: *Walk, array_init: Ast.full.ArrayInit) Error!void {
- if (array_init.ast.type_expr != 0) {
- try walkExpression(w, array_init.ast.type_expr); // T
+ if (array_init.ast.type_expr.unwrap()) |type_expr| {
+ try walkExpression(w, type_expr); // T
}
for (array_init.ast.elements) |elem_init| {
try walkExpression(w, elem_init);
@@ -712,8 +669,8 @@ fn walkStructInit(
struct_init: Ast.full.StructInit,
) Error!void {
_ = struct_node;
- if (struct_init.ast.type_expr != 0) {
- try walkExpression(w, struct_init.ast.type_expr); // T
+ if (struct_init.ast.type_expr.unwrap()) |type_expr| {
+ try walkExpression(w, type_expr); // T
}
for (struct_init.ast.fields) |field_init| {
try walkExpression(w, field_init);
@@ -733,18 +690,17 @@ fn walkSlice(
_ = slice_node;
try walkExpression(w, slice.ast.sliced);
try walkExpression(w, slice.ast.start);
- if (slice.ast.end != 0) {
- try walkExpression(w, slice.ast.end);
+ if (slice.ast.end.unwrap()) |end| {
+ try walkExpression(w, end);
}
- if (slice.ast.sentinel != 0) {
- try walkExpression(w, slice.ast.sentinel);
+ if (slice.ast.sentinel.unwrap()) |sentinel| {
+ try walkExpression(w, sentinel);
}
}
fn walkIdentifier(w: *Walk, name_ident: Ast.TokenIndex) Error!void {
const ast = w.ast;
- const token_tags = ast.tokens.items(.tag);
- assert(token_tags[name_ident] == .identifier);
+ assert(ast.tokenTag(name_ident) == .identifier);
const name_bytes = ast.tokenSlice(name_ident);
_ = w.unreferenced_globals.swapRemove(name_bytes);
}
@@ -760,8 +716,8 @@ fn walkContainerDecl(
container_decl: Ast.full.ContainerDecl,
) Error!void {
_ = container_decl_node;
- if (container_decl.ast.arg != 0) {
- try walkExpression(w, container_decl.ast.arg);
+ if (container_decl.ast.arg.unwrap()) |arg| {
+ try walkExpression(w, arg);
}
try walkMembers(w, container_decl.ast.members);
}
@@ -772,14 +728,13 @@ fn walkBuiltinCall(
params: []const Ast.Node.Index,
) Error!void {
const ast = w.ast;
- const main_tokens = ast.nodes.items(.main_token);
- const builtin_token = main_tokens[call_node];
+ const builtin_token = ast.nodeMainToken(call_node);
const builtin_name = ast.tokenSlice(builtin_token);
const info = BuiltinFn.list.get(builtin_name).?;
switch (info.tag) {
.import => {
const operand_node = params[0];
- const str_lit_token = main_tokens[operand_node];
+ const str_lit_token = ast.nodeMainToken(operand_node);
const token_bytes = ast.tokenSlice(str_lit_token);
if (std.mem.endsWith(u8, token_bytes, ".zig\"")) {
const imported_string = std.zig.string_literal.parseAlloc(w.arena, token_bytes) catch
@@ -808,29 +763,30 @@ fn walkFnProto(w: *Walk, fn_proto: Ast.full.FnProto) Error!void {
{
var it = fn_proto.iterate(ast);
while (it.next()) |param| {
- if (param.type_expr != 0) {
- try walkExpression(w, param.type_expr);
+ if (param.type_expr) |type_expr| {
+ try walkExpression(w, type_expr);
}
}
}
- if (fn_proto.ast.align_expr != 0) {
- try walkExpression(w, fn_proto.ast.align_expr);
+ if (fn_proto.ast.align_expr.unwrap()) |align_expr| {
+ try walkExpression(w, align_expr);
}
- if (fn_proto.ast.addrspace_expr != 0) {
- try walkExpression(w, fn_proto.ast.addrspace_expr);
+ if (fn_proto.ast.addrspace_expr.unwrap()) |addrspace_expr| {
+ try walkExpression(w, addrspace_expr);
}
- if (fn_proto.ast.section_expr != 0) {
- try walkExpression(w, fn_proto.ast.section_expr);
+ if (fn_proto.ast.section_expr.unwrap()) |section_expr| {
+ try walkExpression(w, section_expr);
}
- if (fn_proto.ast.callconv_expr != 0) {
- try walkExpression(w, fn_proto.ast.callconv_expr);
+ if (fn_proto.ast.callconv_expr.unwrap()) |callconv_expr| {
+ try walkExpression(w, callconv_expr);
}
- try walkExpression(w, fn_proto.ast.return_type);
+ const return_type = fn_proto.ast.return_type.unwrap().?;
+ try walkExpression(w, return_type);
}
fn walkExpressions(w: *Walk, expressions: []const Ast.Node.Index) Error!void {
@@ -847,16 +803,13 @@ fn walkSwitchCase(w: *Walk, switch_case: Ast.full.SwitchCase) Error!void {
}
fn walkWhile(w: *Walk, node_index: Ast.Node.Index, while_node: Ast.full.While) Error!void {
- assert(while_node.ast.cond_expr != 0);
- assert(while_node.ast.then_expr != 0);
-
// Perform these transformations in this priority order:
// 1. If the `else` expression is missing or an empty block, replace the condition with `if (true)` if it is not already.
// 2. If the `then` block is empty, replace the condition with `if (false)` if it is not already.
// 3. If the condition is `if (true)`, replace the `if` expression with the contents of the `then` expression.
// 4. If the condition is `if (false)`, replace the `if` expression with the contents of the `else` expression.
if (!isTrueIdent(w.ast, while_node.ast.cond_expr) and
- (while_node.ast.else_expr == 0 or isEmptyBlock(w.ast, while_node.ast.else_expr)))
+ (while_node.ast.else_expr == .none or isEmptyBlock(w.ast, while_node.ast.else_expr.unwrap().?)))
{
try w.transformations.ensureUnusedCapacity(1);
w.transformations.appendAssumeCapacity(.{ .replace_with_true = while_node.ast.cond_expr });
@@ -873,45 +826,39 @@ fn walkWhile(w: *Walk, node_index: Ast.Node.Index, while_node: Ast.full.While) E
try w.transformations.ensureUnusedCapacity(1);
w.transformations.appendAssumeCapacity(.{ .replace_node = .{
.to_replace = node_index,
- .replacement = while_node.ast.else_expr,
+ .replacement = while_node.ast.else_expr.unwrap().?,
} });
}
try walkExpression(w, while_node.ast.cond_expr); // condition
- if (while_node.ast.cont_expr != 0) {
- try walkExpression(w, while_node.ast.cont_expr);
+ if (while_node.ast.cont_expr.unwrap()) |cont_expr| {
+ try walkExpression(w, cont_expr);
}
- if (while_node.ast.then_expr != 0) {
- try walkExpression(w, while_node.ast.then_expr);
- }
- if (while_node.ast.else_expr != 0) {
- try walkExpression(w, while_node.ast.else_expr);
+ try walkExpression(w, while_node.ast.then_expr);
+
+ if (while_node.ast.else_expr.unwrap()) |else_expr| {
+ try walkExpression(w, else_expr);
}
}
fn walkFor(w: *Walk, for_node: Ast.full.For) Error!void {
try walkParamList(w, for_node.ast.inputs);
- if (for_node.ast.then_expr != 0) {
- try walkExpression(w, for_node.ast.then_expr);
- }
- if (for_node.ast.else_expr != 0) {
- try walkExpression(w, for_node.ast.else_expr);
+ try walkExpression(w, for_node.ast.then_expr);
+ if (for_node.ast.else_expr.unwrap()) |else_expr| {
+ try walkExpression(w, else_expr);
}
}
fn walkIf(w: *Walk, node_index: Ast.Node.Index, if_node: Ast.full.If) Error!void {
- assert(if_node.ast.cond_expr != 0);
- assert(if_node.ast.then_expr != 0);
-
// Perform these transformations in this priority order:
// 1. If the `else` expression is missing or an empty block, replace the condition with `if (true)` if it is not already.
// 2. If the `then` block is empty, replace the condition with `if (false)` if it is not already.
// 3. If the condition is `if (true)`, replace the `if` expression with the contents of the `then` expression.
// 4. If the condition is `if (false)`, replace the `if` expression with the contents of the `else` expression.
if (!isTrueIdent(w.ast, if_node.ast.cond_expr) and
- (if_node.ast.else_expr == 0 or isEmptyBlock(w.ast, if_node.ast.else_expr)))
+ (if_node.ast.else_expr == .none or isEmptyBlock(w.ast, if_node.ast.else_expr.unwrap().?)))
{
try w.transformations.ensureUnusedCapacity(1);
w.transformations.appendAssumeCapacity(.{ .replace_with_true = if_node.ast.cond_expr });
@@ -928,17 +875,14 @@ fn walkIf(w: *Walk, node_index: Ast.Node.Index, if_node: Ast.full.If) Error!void
try w.transformations.ensureUnusedCapacity(1);
w.transformations.appendAssumeCapacity(.{ .replace_node = .{
.to_replace = node_index,
- .replacement = if_node.ast.else_expr,
+ .replacement = if_node.ast.else_expr.unwrap().?,
} });
}
try walkExpression(w, if_node.ast.cond_expr); // condition
-
- if (if_node.ast.then_expr != 0) {
- try walkExpression(w, if_node.ast.then_expr);
- }
- if (if_node.ast.else_expr != 0) {
- try walkExpression(w, if_node.ast.else_expr);
+ try walkExpression(w, if_node.ast.then_expr);
+ if (if_node.ast.else_expr.unwrap()) |else_expr| {
+ try walkExpression(w, else_expr);
}
}
@@ -958,9 +902,8 @@ fn walkParamList(w: *Walk, params: []const Ast.Node.Index) Error!void {
/// Check if it is already gutted (i.e. its body replaced with `@trap()`).
fn isFnBodyGutted(ast: *const Ast, body_node: Ast.Node.Index) bool {
// skip over discards
- const node_tags = ast.nodes.items(.tag);
var statements_buf: [2]Ast.Node.Index = undefined;
- const statements = switch (node_tags[body_node]) {
+ const statements = switch (ast.nodeTag(body_node)) {
.block_two,
.block_two_semicolon,
.block,
@@ -988,10 +931,7 @@ const StmtCategory = enum {
};
fn categorizeStmt(ast: *const Ast, stmt: Ast.Node.Index) StmtCategory {
- const node_tags = ast.nodes.items(.tag);
- const datas = ast.nodes.items(.data);
- const main_tokens = ast.nodes.items(.main_token);
- switch (node_tags[stmt]) {
+ switch (ast.nodeTag(stmt)) {
.builtin_call_two,
.builtin_call_two_comma,
.builtin_call,
@@ -999,12 +939,12 @@ fn categorizeStmt(ast: *const Ast, stmt: Ast.Node.Index) StmtCategory {
=> {
var buf: [2]Ast.Node.Index = undefined;
const params = ast.builtinCallParams(&buf, stmt).?;
- return categorizeBuiltinCall(ast, main_tokens[stmt], params);
+ return categorizeBuiltinCall(ast, ast.nodeMainToken(stmt), params);
},
.assign => {
- const infix = datas[stmt];
- if (isDiscardIdent(ast, infix.lhs) and node_tags[infix.rhs] == .identifier) {
- const name_bytes = ast.tokenSlice(main_tokens[infix.rhs]);
+ const lhs, const rhs = ast.nodeData(stmt).node_and_node;
+ if (isDiscardIdent(ast, lhs) and ast.nodeTag(rhs) == .identifier) {
+ const name_bytes = ast.tokenSlice(ast.nodeMainToken(rhs));
if (std.mem.eql(u8, name_bytes, "undefined")) {
return .discard_undefined;
} else {
@@ -1046,11 +986,9 @@ fn isFalseIdent(ast: *const Ast, node: Ast.Node.Index) bool {
}
fn isMatchingIdent(ast: *const Ast, node: Ast.Node.Index, string: []const u8) bool {
- const node_tags = ast.nodes.items(.tag);
- const main_tokens = ast.nodes.items(.main_token);
- switch (node_tags[node]) {
+ switch (ast.nodeTag(node)) {
.identifier => {
- const token_index = main_tokens[node];
+ const token_index = ast.nodeMainToken(node);
const name_bytes = ast.tokenSlice(token_index);
return std.mem.eql(u8, name_bytes, string);
},
@@ -1059,11 +997,10 @@ fn isMatchingIdent(ast: *const Ast, node: Ast.Node.Index, string: []const u8) bo
}
fn isEmptyBlock(ast: *const Ast, node: Ast.Node.Index) bool {
- const node_tags = ast.nodes.items(.tag);
- const node_data = ast.nodes.items(.data);
- switch (node_tags[node]) {
+ switch (ast.nodeTag(node)) {
.block_two => {
- return node_data[node].lhs == 0 and node_data[node].rhs == 0;
+ const opt_lhs, const opt_rhs = ast.nodeData(node).opt_node_and_opt_node;
+ return opt_lhs == .none and opt_rhs == .none;
},
else => return false,
}
lib/compiler/reduce.zig
@@ -220,7 +220,7 @@ pub fn main() !void {
mem.eql(u8, msg, "unused function parameter") or
mem.eql(u8, msg, "unused capture"))
{
- const ident_token = item.data.token;
+ const ident_token = item.data.token.unwrap().?;
try more_fixups.unused_var_decls.put(gpa, ident_token, {});
} else {
std.debug.print("found other ZIR error: '{s}'\n", .{msg});
lib/docs/wasm/Decl.zig
@@ -15,8 +15,7 @@ parent: Index,
pub const ExtraInfo = struct {
is_pub: bool,
name: []const u8,
- /// This might not be a doc_comment token in which case there are no doc comments.
- first_doc_comment: Ast.TokenIndex,
+ first_doc_comment: Ast.OptionalTokenIndex,
};
pub const Index = enum(u32) {
@@ -34,16 +33,14 @@ pub fn is_pub(d: *const Decl) bool {
pub fn extra_info(d: *const Decl) ExtraInfo {
const ast = d.file.get_ast();
- const token_tags = ast.tokens.items(.tag);
- const node_tags = ast.nodes.items(.tag);
- switch (node_tags[d.ast_node]) {
+ switch (ast.nodeTag(d.ast_node)) {
.root => return .{
.name = "",
.is_pub = true,
- .first_doc_comment = if (token_tags[0] == .container_doc_comment)
- 0
+ .first_doc_comment = if (ast.tokenTag(0) == .container_doc_comment)
+ .fromToken(0)
else
- token_tags.len - 1,
+ .none,
},
.global_var_decl,
@@ -53,7 +50,7 @@ pub fn extra_info(d: *const Decl) ExtraInfo {
=> {
const var_decl = ast.fullVarDecl(d.ast_node).?;
const name_token = var_decl.ast.mut_token + 1;
- assert(token_tags[name_token] == .identifier);
+ assert(ast.tokenTag(name_token) == .identifier);
const ident_name = ast.tokenSlice(name_token);
return .{
.name = ident_name,
@@ -71,7 +68,7 @@ pub fn extra_info(d: *const Decl) ExtraInfo {
var buf: [1]Ast.Node.Index = undefined;
const fn_proto = ast.fullFnProto(&buf, d.ast_node).?;
const name_token = fn_proto.name_token.?;
- assert(token_tags[name_token] == .identifier);
+ assert(ast.tokenTag(name_token) == .identifier);
const ident_name = ast.tokenSlice(name_token);
return .{
.name = ident_name,
@@ -89,9 +86,7 @@ pub fn extra_info(d: *const Decl) ExtraInfo {
pub fn value_node(d: *const Decl) ?Ast.Node.Index {
const ast = d.file.get_ast();
- const node_tags = ast.nodes.items(.tag);
- const token_tags = ast.tokens.items(.tag);
- return switch (node_tags[d.ast_node]) {
+ return switch (ast.nodeTag(d.ast_node)) {
.fn_proto,
.fn_proto_multi,
.fn_proto_one,
@@ -106,8 +101,8 @@ pub fn value_node(d: *const Decl) ?Ast.Node.Index {
.aligned_var_decl,
=> {
const var_decl = ast.fullVarDecl(d.ast_node).?;
- if (token_tags[var_decl.ast.mut_token] == .keyword_const)
- return var_decl.ast.init_node;
+ if (ast.tokenTag(var_decl.ast.mut_token) == .keyword_const)
+ return var_decl.ast.init_node.unwrap();
return null;
},
@@ -148,19 +143,12 @@ pub fn get_child(decl: *const Decl, name: []const u8) ?Decl.Index {
pub fn get_type_fn_return_type_fn(decl: *const Decl) ?Decl.Index {
if (decl.get_type_fn_return_expr()) |return_expr| {
const ast = decl.file.get_ast();
- const node_tags = ast.nodes.items(.tag);
-
- switch (node_tags[return_expr]) {
- .call, .call_comma, .call_one, .call_one_comma => {
- const node_data = ast.nodes.items(.data);
- const function = node_data[return_expr].lhs;
- const token = ast.nodes.items(.main_token)[function];
- const name = ast.tokenSlice(token);
- if (decl.lookup(name)) |function_decl| {
- return function_decl;
- }
- },
- else => {},
+ var buffer: [1]Ast.Node.Index = undefined;
+ const call = ast.fullCall(&buffer, return_expr) orelse return null;
+ const token = ast.nodeMainToken(call.ast.fn_expr);
+ const name = ast.tokenSlice(token);
+ if (decl.lookup(name)) |function_decl| {
+ return function_decl;
}
}
return null;
@@ -171,35 +159,18 @@ pub fn get_type_fn_return_expr(decl: *const Decl) ?Ast.Node.Index {
switch (decl.categorize()) {
.type_function => {
const ast = decl.file.get_ast();
- const node_tags = ast.nodes.items(.tag);
- const node_data = ast.nodes.items(.data);
- const body_node = node_data[decl.ast_node].rhs;
- if (body_node == 0) return null;
- switch (node_tags[body_node]) {
- .block, .block_semicolon => {
- const statements = ast.extra_data[node_data[body_node].lhs..node_data[body_node].rhs];
- // Look for the return statement
- for (statements) |stmt| {
- if (node_tags[stmt] == .@"return") {
- return node_data[stmt].lhs;
- }
- }
- return null;
- },
- .block_two, .block_two_semicolon => {
- if (node_tags[node_data[body_node].lhs] == .@"return") {
- return node_data[node_data[body_node].lhs].lhs;
- }
- if (node_data[body_node].rhs != 0 and
- node_tags[node_data[body_node].rhs] == .@"return")
- {
- return node_data[node_data[body_node].rhs].lhs;
- }
- return null;
- },
- else => return null,
+ const body_node = ast.nodeData(decl.ast_node).node_and_node[1];
+
+ var buf: [2]Ast.Node.Index = undefined;
+ const statements = ast.blockStatements(&buf, body_node) orelse return null;
+
+ for (statements) |stmt| {
+ if (ast.nodeTag(stmt) == .@"return") {
+ return ast.nodeData(stmt).node;
+ }
}
+ return null;
},
else => return null,
}
@@ -269,16 +240,15 @@ pub fn append_parent_ns(list: *std.ArrayListUnmanaged(u8), parent: Decl.Index) O
}
}
-pub fn findFirstDocComment(ast: *const Ast, token: Ast.TokenIndex) Ast.TokenIndex {
- const token_tags = ast.tokens.items(.tag);
+pub fn findFirstDocComment(ast: *const Ast, token: Ast.TokenIndex) Ast.OptionalTokenIndex {
var it = token;
while (it > 0) {
it -= 1;
- if (token_tags[it] != .doc_comment) {
- return it + 1;
+ if (ast.tokenTag(it) != .doc_comment) {
+ return .fromToken(it + 1);
}
}
- return it;
+ return .none;
}
/// Successively looks up each component.
lib/docs/wasm/html_render.zig
@@ -41,14 +41,10 @@ pub fn fileSourceHtml(
var field_access_buffer: std.ArrayListUnmanaged(u8) = .empty;
};
- const token_tags = ast.tokens.items(.tag);
- const token_starts = ast.tokens.items(.start);
- const main_tokens = ast.nodes.items(.main_token);
-
const start_token = ast.firstToken(root_node);
const end_token = ast.lastToken(root_node) + 1;
- var cursor: usize = token_starts[start_token];
+ var cursor: usize = ast.tokenStart(start_token);
var indent: usize = 0;
if (std.mem.lastIndexOf(u8, ast.source[0..cursor], "\n")) |newline_index| {
@@ -64,8 +60,8 @@ pub fn fileSourceHtml(
var next_annotate_index: usize = 0;
for (
- token_tags[start_token..end_token],
- token_starts[start_token..end_token],
+ ast.tokens.items(.tag)[start_token..end_token],
+ ast.tokens.items(.start)[start_token..end_token],
start_token..,
) |tag, start, token_index| {
const between = ast.source[cursor..start];
@@ -184,7 +180,7 @@ pub fn fileSourceHtml(
.identifier => i: {
if (options.fn_link != .none) {
const fn_link = options.fn_link.get();
- const fn_token = main_tokens[fn_link.ast_node];
+ const fn_token = ast.nodeMainToken(fn_link.ast_node);
if (token_index == fn_token + 1) {
try out.appendSlice(gpa, "<a class=\"tok-fn\" href=\"#");
_ = missing_feature_url_escape;
@@ -196,7 +192,7 @@ pub fn fileSourceHtml(
}
}
- if (token_index > 0 and token_tags[token_index - 1] == .keyword_fn) {
+ if (token_index > 0 and ast.tokenTag(token_index - 1) == .keyword_fn) {
try out.appendSlice(gpa, "<span class=\"tok-fn\">");
try appendEscaped(out, slice);
try out.appendSlice(gpa, "</span>");
@@ -358,16 +354,11 @@ fn walkFieldAccesses(
node: Ast.Node.Index,
) Oom!void {
const ast = file_index.get_ast();
- const node_tags = ast.nodes.items(.tag);
- assert(node_tags[node] == .field_access);
- const node_datas = ast.nodes.items(.data);
- const main_tokens = ast.nodes.items(.main_token);
- const object_node = node_datas[node].lhs;
- const dot_token = main_tokens[node];
- const field_ident = dot_token + 1;
- switch (node_tags[object_node]) {
+ assert(ast.nodeTag(node) == .field_access);
+ const object_node, const field_ident = ast.nodeData(node).node_and_token;
+ switch (ast.nodeTag(object_node)) {
.identifier => {
- const lhs_ident = main_tokens[object_node];
+ const lhs_ident = ast.nodeMainToken(object_node);
try resolveIdentLink(file_index, out, lhs_ident);
},
.field_access => {
lib/docs/wasm/main.zig
@@ -124,7 +124,9 @@ fn query_exec_fallible(query: []const u8, ignore_case: bool) !void {
@memcpy(g.full_path_search_text_lower.items, g.full_path_search_text.items);
const ast = decl.file.get_ast();
- try collect_docs(&g.doc_search_text, ast, info.first_doc_comment);
+ if (info.first_doc_comment.unwrap()) |first_doc_comment| {
+ try collect_docs(&g.doc_search_text, ast, first_doc_comment);
+ }
if (ignore_case) {
ascii_lower(g.full_path_search_text_lower.items);
@@ -227,18 +229,15 @@ const ErrorIdentifier = packed struct(u64) {
fn hasDocs(ei: ErrorIdentifier) bool {
const decl_index = ei.decl_index;
const ast = decl_index.get().file.get_ast();
- const token_tags = ast.tokens.items(.tag);
const token_index = ei.token_index;
if (token_index == 0) return false;
- return token_tags[token_index - 1] == .doc_comment;
+ return ast.tokenTag(token_index - 1) == .doc_comment;
}
fn html(ei: ErrorIdentifier, base_decl: Decl.Index, out: *std.ArrayListUnmanaged(u8)) Oom!void {
const decl_index = ei.decl_index;
const ast = decl_index.get().file.get_ast();
const name = ast.tokenSlice(ei.token_index);
- const first_doc_comment = Decl.findFirstDocComment(ast, ei.token_index);
- const has_docs = ast.tokens.items(.tag)[first_doc_comment] == .doc_comment;
const has_link = base_decl != decl_index;
try out.appendSlice(gpa, "<dt>");
@@ -253,7 +252,7 @@ const ErrorIdentifier = packed struct(u64) {
}
try out.appendSlice(gpa, "</dt>");
- if (has_docs) {
+ if (Decl.findFirstDocComment(ast, ei.token_index).unwrap()) |first_doc_comment| {
try out.appendSlice(gpa, "<dd>");
try render_docs(out, decl_index, first_doc_comment, false);
try out.appendSlice(gpa, "</dd>");
@@ -319,17 +318,16 @@ fn addErrorsFromExpr(
) Oom!void {
const decl = decl_index.get();
const ast = decl.file.get_ast();
- const node_tags = ast.nodes.items(.tag);
- const node_datas = ast.nodes.items(.data);
switch (decl.file.categorize_expr(node)) {
- .error_set => |n| switch (node_tags[n]) {
+ .error_set => |n| switch (ast.nodeTag(n)) {
.error_set_decl => {
try addErrorsFromNode(decl_index, out, node);
},
.merge_error_sets => {
- try addErrorsFromExpr(decl_index, out, node_datas[node].lhs);
- try addErrorsFromExpr(decl_index, out, node_datas[node].rhs);
+ const lhs, const rhs = ast.nodeData(n).node_and_node;
+ try addErrorsFromExpr(decl_index, out, lhs);
+ try addErrorsFromExpr(decl_index, out, rhs);
},
else => unreachable,
},
@@ -347,11 +345,9 @@ fn addErrorsFromNode(
) Oom!void {
const decl = decl_index.get();
const ast = decl.file.get_ast();
- const main_tokens = ast.nodes.items(.main_token);
- const token_tags = ast.tokens.items(.tag);
- const error_token = main_tokens[node];
+ const error_token = ast.nodeMainToken(node);
var tok_i = error_token + 2;
- while (true) : (tok_i += 1) switch (token_tags[tok_i]) {
+ while (true) : (tok_i += 1) switch (ast.tokenTag(tok_i)) {
.doc_comment, .comma => {},
.identifier => {
const name = ast.tokenSlice(tok_i);
@@ -391,15 +387,13 @@ fn decl_fields_fallible(decl_index: Decl.Index) ![]Ast.Node.Index {
switch (decl.categorize()) {
.type_function => {
- const node_tags = ast.nodes.items(.tag);
-
// If the type function returns a reference to another type function, get the fields from there
if (decl.get_type_fn_return_type_fn()) |function_decl| {
return decl_fields_fallible(function_decl);
}
// If the type function returns a container, such as a `struct`, read that container's fields
if (decl.get_type_fn_return_expr()) |return_expr| {
- switch (node_tags[return_expr]) {
+ switch (ast.nodeTag(return_expr)) {
.container_decl, .container_decl_trailing, .container_decl_two, .container_decl_two_trailing, .container_decl_arg, .container_decl_arg_trailing => {
return ast_decl_fields_fallible(ast, return_expr);
},
@@ -420,10 +414,9 @@ fn ast_decl_fields_fallible(ast: *Ast, ast_index: Ast.Node.Index) ![]Ast.Node.In
var result: std.ArrayListUnmanaged(Ast.Node.Index) = .empty;
};
g.result.clearRetainingCapacity();
- const node_tags = ast.nodes.items(.tag);
var buf: [2]Ast.Node.Index = undefined;
const container_decl = ast.fullContainerDecl(&buf, ast_index) orelse return &.{};
- for (container_decl.ast.members) |member_node| switch (node_tags[member_node]) {
+ for (container_decl.ast.members) |member_node| switch (ast.nodeTag(member_node)) {
.container_field_init,
.container_field_align,
.container_field,
@@ -478,9 +471,8 @@ fn decl_field_html_fallible(
try out.appendSlice(gpa, "</code></pre>");
const field = ast.fullContainerField(field_node).?;
- const first_doc_comment = Decl.findFirstDocComment(ast, field.firstToken());
- if (ast.tokens.items(.tag)[first_doc_comment] == .doc_comment) {
+ if (Decl.findFirstDocComment(ast, field.firstToken()).unwrap()) |first_doc_comment| {
try out.appendSlice(gpa, "<div class=\"fieldDocs\">");
try render_docs(out, decl_index, first_doc_comment, false);
try out.appendSlice(gpa, "</div>");
@@ -494,14 +486,13 @@ fn decl_param_html_fallible(
) !void {
const decl = decl_index.get();
const ast = decl.file.get_ast();
- const token_tags = ast.tokens.items(.tag);
const colon = ast.firstToken(param_node) - 1;
const name_token = colon - 1;
const first_doc_comment = f: {
var it = ast.firstToken(param_node);
while (it > 0) {
it -= 1;
- switch (token_tags[it]) {
+ switch (ast.tokenTag(it)) {
.doc_comment, .colon, .identifier, .keyword_comptime, .keyword_noalias => {},
else => break,
}
@@ -516,7 +507,7 @@ fn decl_param_html_fallible(
try fileSourceHtml(decl.file, out, param_node, .{});
try out.appendSlice(gpa, "</code></pre>");
- if (ast.tokens.items(.tag)[first_doc_comment] == .doc_comment) {
+ if (ast.tokenTag(first_doc_comment) == .doc_comment) {
try out.appendSlice(gpa, "<div class=\"fieldDocs\">");
try render_docs(out, decl_index, first_doc_comment, false);
try out.appendSlice(gpa, "</div>");
@@ -526,10 +517,8 @@ fn decl_param_html_fallible(
export fn decl_fn_proto_html(decl_index: Decl.Index, linkify_fn_name: bool) String {
const decl = decl_index.get();
const ast = decl.file.get_ast();
- const node_tags = ast.nodes.items(.tag);
- const node_datas = ast.nodes.items(.data);
- const proto_node = switch (node_tags[decl.ast_node]) {
- .fn_decl => node_datas[decl.ast_node].lhs,
+ const proto_node = switch (ast.nodeTag(decl.ast_node)) {
+ .fn_decl => ast.nodeData(decl.ast_node).node_and_node[0],
.fn_proto,
.fn_proto_one,
@@ -586,17 +575,16 @@ export fn decl_parent(decl_index: Decl.Index) Decl.Index {
return decl.parent;
}
-export fn fn_error_set(decl_index: Decl.Index) Ast.Node.Index {
+export fn fn_error_set(decl_index: Decl.Index) Ast.Node.OptionalIndex {
const decl = decl_index.get();
const ast = decl.file.get_ast();
var buf: [1]Ast.Node.Index = undefined;
const full = ast.fullFnProto(&buf, decl.ast_node).?;
- const node_tags = ast.nodes.items(.tag);
- const node_datas = ast.nodes.items(.data);
- return switch (node_tags[full.ast.return_type]) {
- .error_set_decl => full.ast.return_type,
- .error_union => node_datas[full.ast.return_type].lhs,
- else => 0,
+ const return_type = full.ast.return_type.unwrap().?;
+ return switch (ast.nodeTag(return_type)) {
+ .error_set_decl => return_type.toOptional(),
+ .error_union => ast.nodeData(return_type).node_and_node[0].toOptional(),
+ else => .none,
};
}
@@ -609,21 +597,19 @@ export fn decl_file_path(decl_index: Decl.Index) String {
export fn decl_category_name(decl_index: Decl.Index) String {
const decl = decl_index.get();
const ast = decl.file.get_ast();
- const token_tags = ast.tokens.items(.tag);
const name = switch (decl.categorize()) {
.namespace, .container => |node| {
- const node_tags = ast.nodes.items(.tag);
- if (node_tags[decl.ast_node] == .root)
+ if (ast.nodeTag(decl.ast_node) == .root)
return String.init("struct");
string_result.clearRetainingCapacity();
var buf: [2]Ast.Node.Index = undefined;
const container_decl = ast.fullContainerDecl(&buf, node).?;
if (container_decl.layout_token) |t| {
- if (token_tags[t] == .keyword_extern) {
+ if (ast.tokenTag(t) == .keyword_extern) {
string_result.appendSlice(gpa, "extern ") catch @panic("OOM");
}
}
- const main_token_tag = token_tags[container_decl.ast.main_token];
+ const main_token_tag = ast.tokenTag(container_decl.ast.main_token);
string_result.appendSlice(gpa, main_token_tag.lexeme().?) catch @panic("OOM");
return String.init(string_result.items);
},
@@ -656,7 +642,9 @@ export fn decl_name(decl_index: Decl.Index) String {
export fn decl_docs_html(decl_index: Decl.Index, short: bool) String {
const decl = decl_index.get();
string_result.clearRetainingCapacity();
- render_docs(&string_result, decl_index, decl.extra_info().first_doc_comment, short) catch @panic("OOM");
+ if (decl.extra_info().first_doc_comment.unwrap()) |first_doc_comment| {
+ render_docs(&string_result, decl_index, first_doc_comment, short) catch @panic("OOM");
+ }
return String.init(string_result.items);
}
@@ -665,10 +653,9 @@ fn collect_docs(
ast: *const Ast,
first_doc_comment: Ast.TokenIndex,
) Oom!void {
- const token_tags = ast.tokens.items(.tag);
list.clearRetainingCapacity();
var it = first_doc_comment;
- while (true) : (it += 1) switch (token_tags[it]) {
+ while (true) : (it += 1) switch (ast.tokenTag(it)) {
.doc_comment, .container_doc_comment => {
// It is tempting to trim this string but think carefully about how
// that will affect the markdown parser.
@@ -687,12 +674,11 @@ fn render_docs(
) Oom!void {
const decl = decl_index.get();
const ast = decl.file.get_ast();
- const token_tags = ast.tokens.items(.tag);
var parser = try markdown.Parser.init(gpa);
defer parser.deinit();
var it = first_doc_comment;
- while (true) : (it += 1) switch (token_tags[it]) {
+ while (true) : (it += 1) switch (ast.tokenTag(it)) {
.doc_comment, .container_doc_comment => {
const line = ast.tokenSlice(it)[3..];
if (short and line.len == 0) break;
@@ -767,9 +753,9 @@ export fn decl_type_html(decl_index: Decl.Index) String {
t: {
// If there is an explicit type, use it.
if (ast.fullVarDecl(decl.ast_node)) |var_decl| {
- if (var_decl.ast.type_node != 0) {
+ if (var_decl.ast.type_node.unwrap()) |type_node| {
string_result.appendSlice(gpa, "<code>") catch @panic("OOM");
- fileSourceHtml(decl.file, &string_result, var_decl.ast.type_node, .{
+ fileSourceHtml(decl.file, &string_result, type_node, .{
.skip_comments = true,
.collapse_whitespace = true,
}) catch |e| {
lib/docs/wasm/Walk.zig
@@ -91,12 +91,10 @@ pub const File = struct {
pub fn categorize_decl(file_index: File.Index, node: Ast.Node.Index) Category {
const ast = file_index.get_ast();
- const node_tags = ast.nodes.items(.tag);
- const token_tags = ast.tokens.items(.tag);
- switch (node_tags[node]) {
+ switch (ast.nodeTag(node)) {
.root => {
for (ast.rootDecls()) |member| {
- switch (node_tags[member]) {
+ switch (ast.nodeTag(member)) {
.container_field_init,
.container_field_align,
.container_field,
@@ -113,10 +111,12 @@ pub const File = struct {
.aligned_var_decl,
=> {
const var_decl = ast.fullVarDecl(node).?;
- if (token_tags[var_decl.ast.mut_token] == .keyword_var)
+ if (ast.tokenTag(var_decl.ast.mut_token) == .keyword_var)
return .{ .global_variable = node };
+ const init_node = var_decl.ast.init_node.unwrap() orelse
+ return .{ .global_const = node };
- return categorize_expr(file_index, var_decl.ast.init_node);
+ return categorize_expr(file_index, init_node);
},
.fn_proto,
@@ -139,7 +139,7 @@ pub const File = struct {
node: Ast.Node.Index,
full: Ast.full.FnProto,
) Category {
- return switch (categorize_expr(file_index, full.ast.return_type)) {
+ return switch (categorize_expr(file_index, full.ast.return_type.unwrap().?)) {
.namespace, .container, .error_set, .type_type => .{ .type_function = node },
else => .{ .function = node },
};
@@ -155,12 +155,8 @@ pub const File = struct {
pub fn categorize_expr(file_index: File.Index, node: Ast.Node.Index) Category {
const file = file_index.get();
const ast = file_index.get_ast();
- const node_tags = ast.nodes.items(.tag);
- const node_datas = ast.nodes.items(.data);
- const main_tokens = ast.nodes.items(.main_token);
- const token_tags = ast.tokens.items(.tag);
- //log.debug("categorize_expr tag {s}", .{@tagName(node_tags[node])});
- return switch (node_tags[node]) {
+ //log.debug("categorize_expr tag {s}", .{@tagName(ast.nodeTag(node))});
+ return switch (ast.nodeTag(node)) {
.container_decl,
.container_decl_trailing,
.container_decl_arg,
@@ -176,11 +172,11 @@ pub const File = struct {
=> {
var buf: [2]Ast.Node.Index = undefined;
const container_decl = ast.fullContainerDecl(&buf, node).?;
- if (token_tags[container_decl.ast.main_token] != .keyword_struct) {
+ if (ast.tokenTag(container_decl.ast.main_token) != .keyword_struct) {
return .{ .container = node };
}
for (container_decl.ast.members) |member| {
- switch (node_tags[member]) {
+ switch (ast.nodeTag(member)) {
.container_field_init,
.container_field_align,
.container_field,
@@ -196,7 +192,7 @@ pub const File = struct {
=> .{ .error_set = node },
.identifier => {
- const name_token = ast.nodes.items(.main_token)[node];
+ const name_token = ast.nodeMainToken(node);
const ident_name = ast.tokenSlice(name_token);
if (std.mem.eql(u8, ident_name, "type"))
return .type_type;
@@ -217,9 +213,7 @@ pub const File = struct {
},
.field_access => {
- const object_node = node_datas[node].lhs;
- const dot_token = main_tokens[node];
- const field_ident = dot_token + 1;
+ const object_node, const field_ident = ast.nodeData(node).node_and_token;
const field_name = ast.tokenSlice(field_ident);
switch (categorize_expr(file_index, object_node)) {
@@ -259,9 +253,9 @@ pub const File = struct {
.@"if",
=> {
const if_full = ast.fullIf(node).?;
- if (if_full.ast.else_expr != 0) {
+ if (if_full.ast.else_expr.unwrap()) |else_expr| {
const then_cat = categorize_expr_deep(file_index, if_full.ast.then_expr);
- const else_cat = categorize_expr_deep(file_index, if_full.ast.else_expr);
+ const else_cat = categorize_expr_deep(file_index, else_expr);
if (then_cat == .type_type and else_cat == .type_type) {
return .type_type;
} else if (then_cat == .error_set and else_cat == .error_set) {
@@ -320,11 +314,10 @@ pub const File = struct {
params: []const Ast.Node.Index,
) Category {
const ast = file_index.get_ast();
- const main_tokens = ast.nodes.items(.main_token);
- const builtin_token = main_tokens[node];
+ const builtin_token = ast.nodeMainToken(node);
const builtin_name = ast.tokenSlice(builtin_token);
if (std.mem.eql(u8, builtin_name, "@import")) {
- const str_lit_token = main_tokens[params[0]];
+ const str_lit_token = ast.nodeMainToken(params[0]);
const str_bytes = ast.tokenSlice(str_lit_token);
const file_path = std.zig.string_literal.parseAlloc(gpa, str_bytes) catch @panic("OOM");
defer gpa.free(file_path);
@@ -357,14 +350,12 @@ pub const File = struct {
fn categorize_switch(file_index: File.Index, node: Ast.Node.Index) Category {
const ast = file_index.get_ast();
- const node_datas = ast.nodes.items(.data);
- const extra = ast.extraData(node_datas[node].rhs, Ast.Node.SubRange);
- const case_nodes = ast.extra_data[extra.start..extra.end];
+ const full = ast.fullSwitch(node).?;
var all_type_type = true;
var all_error_set = true;
var any_type = false;
- if (case_nodes.len == 0) return .{ .global_const = node };
- for (case_nodes) |case_node| {
+ if (full.ast.cases.len == 0) return .{ .global_const = node };
+ for (full.ast.cases) |case_node| {
const case = ast.fullSwitchCase(case_node).?;
switch (categorize_expr_deep(file_index, case.ast.target_expr)) {
.type_type => {
@@ -410,8 +401,8 @@ pub fn add_file(file_name: []const u8, bytes: []u8) !File.Index {
const scope = try gpa.create(Scope);
scope.* = .{ .tag = .top };
- const decl_index = try file_index.add_decl(0, .none);
- try struct_decl(&w, scope, decl_index, 0, ast.containerDeclRoot());
+ const decl_index = try file_index.add_decl(.root, .none);
+ try struct_decl(&w, scope, decl_index, .root, ast.containerDeclRoot());
const file = file_index.get();
shrinkToFit(&file.ident_decls);
@@ -505,13 +496,12 @@ pub const Scope = struct {
}
pub fn lookup(start_scope: *Scope, ast: *const Ast, name: []const u8) ?Ast.Node.Index {
- const main_tokens = ast.nodes.items(.main_token);
var it: *Scope = start_scope;
while (true) switch (it.tag) {
.top => break,
.local => {
const local: *Local = @alignCast(@fieldParentPtr("base", it));
- const name_token = main_tokens[local.var_node] + 1;
+ const name_token = ast.nodeMainToken(local.var_node) + 1;
const ident_name = ast.tokenSlice(name_token);
if (std.mem.eql(u8, ident_name, name)) {
return local.var_node;
@@ -538,8 +528,6 @@ fn struct_decl(
container_decl: Ast.full.ContainerDecl,
) Oom!void {
const ast = w.file.get_ast();
- const node_tags = ast.nodes.items(.tag);
- const node_datas = ast.nodes.items(.data);
const namespace = try gpa.create(Scope.Namespace);
namespace.* = .{
@@ -549,7 +537,7 @@ fn struct_decl(
try w.file.get().scopes.putNoClobber(gpa, node, &namespace.base);
try w.scanDecls(namespace, container_decl.ast.members);
- for (container_decl.ast.members) |member| switch (node_tags[member]) {
+ for (container_decl.ast.members) |member| switch (ast.nodeTag(member)) {
.container_field_init,
.container_field_align,
.container_field,
@@ -569,7 +557,7 @@ fn struct_decl(
try w.file.get().doctests.put(gpa, member, doctest_node);
}
const decl_index = try w.file.add_decl(member, parent_decl);
- const body = if (node_tags[member] == .fn_decl) node_datas[member].rhs else 0;
+ const body = if (ast.nodeTag(member) == .fn_decl) ast.nodeData(member).node_and_node[1].toOptional() else .none;
try w.fn_decl(&namespace.base, decl_index, body, full);
},
@@ -584,9 +572,9 @@ fn struct_decl(
.@"comptime",
.@"usingnamespace",
- => try w.expr(&namespace.base, parent_decl, node_datas[member].lhs),
+ => try w.expr(&namespace.base, parent_decl, ast.nodeData(member).node),
- .test_decl => try w.expr(&namespace.base, parent_decl, node_datas[member].rhs),
+ .test_decl => try w.expr(&namespace.base, parent_decl, ast.nodeData(member).opt_token_and_node[1]),
else => unreachable,
};
@@ -633,13 +621,13 @@ fn fn_decl(
w: *Walk,
scope: *Scope,
parent_decl: Decl.Index,
- body: Ast.Node.Index,
+ body: Ast.Node.OptionalIndex,
full: Ast.full.FnProto,
) Oom!void {
for (full.ast.params) |param| {
try expr(w, scope, parent_decl, param);
}
- try expr(w, scope, parent_decl, full.ast.return_type);
+ try expr(w, scope, parent_decl, full.ast.return_type.unwrap().?);
try maybe_expr(w, scope, parent_decl, full.ast.align_expr);
try maybe_expr(w, scope, parent_decl, full.ast.addrspace_expr);
try maybe_expr(w, scope, parent_decl, full.ast.section_expr);
@@ -647,17 +635,13 @@ fn fn_decl(
try maybe_expr(w, scope, parent_decl, body);
}
-fn maybe_expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index) Oom!void {
- if (node != 0) return expr(w, scope, parent_decl, node);
+fn maybe_expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.OptionalIndex) Oom!void {
+ if (node.unwrap()) |n| return expr(w, scope, parent_decl, n);
}
fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index) Oom!void {
- assert(node != 0);
const ast = w.file.get_ast();
- const node_tags = ast.nodes.items(.tag);
- const node_datas = ast.nodes.items(.data);
- const main_tokens = ast.nodes.items(.main_token);
- switch (node_tags[node]) {
+ switch (ast.nodeTag(node)) {
.root => unreachable, // Top-level declaration.
.@"usingnamespace" => unreachable, // Top-level declaration.
.test_decl => unreachable, // Top-level declaration.
@@ -738,8 +722,9 @@ fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index)
.array_access,
.switch_range,
=> {
- try expr(w, scope, parent_decl, node_datas[node].lhs);
- try expr(w, scope, parent_decl, node_datas[node].rhs);
+ const lhs, const rhs = ast.nodeData(node).node_and_node;
+ try expr(w, scope, parent_decl, lhs);
+ try expr(w, scope, parent_decl, rhs);
},
.assign_destructure => {
@@ -752,35 +737,33 @@ fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index)
.bit_not,
.negation,
.negation_wrap,
- .@"return",
.deref,
.address_of,
.optional_type,
- .unwrap_optional,
- .grouped_expression,
.@"comptime",
.@"nosuspend",
.@"suspend",
.@"await",
.@"resume",
.@"try",
- => try maybe_expr(w, scope, parent_decl, node_datas[node].lhs),
+ => try expr(w, scope, parent_decl, ast.nodeData(node).node),
+ .unwrap_optional,
+ .grouped_expression,
+ => try expr(w, scope, parent_decl, ast.nodeData(node).node_and_token[0]),
+ .@"return" => try maybe_expr(w, scope, parent_decl, ast.nodeData(node).opt_node),
- .anyframe_type,
- .@"break",
- => try maybe_expr(w, scope, parent_decl, node_datas[node].rhs),
+ .anyframe_type => try expr(w, scope, parent_decl, ast.nodeData(node).token_and_node[1]),
+ .@"break" => try maybe_expr(w, scope, parent_decl, ast.nodeData(node).opt_token_and_opt_node[1]),
.identifier => {
- const ident_token = main_tokens[node];
+ const ident_token = ast.nodeMainToken(node);
const ident_name = ast.tokenSlice(ident_token);
if (scope.lookup(ast, ident_name)) |var_node| {
try w.file.get().ident_decls.put(gpa, ident_token, var_node);
}
},
.field_access => {
- const object_node = node_datas[node].lhs;
- const dot_token = main_tokens[node];
- const field_ident = dot_token + 1;
+ const object_node, const field_ident = ast.nodeData(node).node_and_token;
try w.file.get().token_parents.put(gpa, field_ident, node);
// This will populate the left-most field object if it is an
// identifier, allowing rendering code to piece together the link.
@@ -857,9 +840,10 @@ fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index)
.for_simple, .@"for" => {
const full = ast.fullFor(node).?;
for (full.ast.inputs) |input| {
- if (node_tags[input] == .for_range) {
- try expr(w, scope, parent_decl, node_datas[input].lhs);
- try maybe_expr(w, scope, parent_decl, node_datas[input].rhs);
+ if (ast.nodeTag(input) == .for_range) {
+ const start, const end = ast.nodeData(input).node_and_opt_node;
+ try expr(w, scope, parent_decl, start);
+ try maybe_expr(w, scope, parent_decl, end);
} else {
try expr(w, scope, parent_decl, input);
}
@@ -914,17 +898,16 @@ fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index)
},
.array_type_sentinel => {
- const extra = ast.extraData(node_datas[node].rhs, Ast.Node.ArrayTypeSentinel);
- try expr(w, scope, parent_decl, node_datas[node].lhs);
+ const len_expr, const extra_index = ast.nodeData(node).node_and_extra;
+ const extra = ast.extraData(extra_index, Ast.Node.ArrayTypeSentinel);
+ try expr(w, scope, parent_decl, len_expr);
try expr(w, scope, parent_decl, extra.elem_type);
try expr(w, scope, parent_decl, extra.sentinel);
},
.@"switch", .switch_comma => {
- const operand_node = node_datas[node].lhs;
- try expr(w, scope, parent_decl, operand_node);
- const extra = ast.extraData(node_datas[node].rhs, Ast.Node.SubRange);
- const case_nodes = ast.extra_data[extra.start..extra.end];
- for (case_nodes) |case_node| {
+ const full = ast.fullSwitch(node).?;
+ try expr(w, scope, parent_decl, full.ast.condition);
+ for (full.ast.cases) |case_node| {
const case = ast.fullSwitchCase(case_node).?;
for (case.ast.values) |value_node| {
try expr(w, scope, parent_decl, value_node);
@@ -973,7 +956,7 @@ fn expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, node: Ast.Node.Index)
.fn_proto,
=> {
var buf: [1]Ast.Node.Index = undefined;
- return fn_decl(w, scope, parent_decl, 0, ast.fullFnProto(&buf, node).?);
+ return fn_decl(w, scope, parent_decl, .none, ast.fullFnProto(&buf, node).?);
},
}
}
@@ -993,8 +976,7 @@ fn builtin_call(
params: []const Ast.Node.Index,
) Oom!void {
const ast = w.file.get_ast();
- const main_tokens = ast.nodes.items(.main_token);
- const builtin_token = main_tokens[node];
+ const builtin_token = ast.nodeMainToken(node);
const builtin_name = ast.tokenSlice(builtin_token);
if (std.mem.eql(u8, builtin_name, "@This")) {
try w.file.get().node_decls.put(gpa, node, scope.getNamespaceDecl());
@@ -1012,13 +994,11 @@ fn block(
statements: []const Ast.Node.Index,
) Oom!void {
const ast = w.file.get_ast();
- const node_tags = ast.nodes.items(.tag);
- const node_datas = ast.nodes.items(.data);
var scope = parent_scope;
for (statements) |node| {
- switch (node_tags[node]) {
+ switch (ast.nodeTag(node)) {
.global_var_decl,
.local_var_decl,
.simple_var_decl,
@@ -1039,11 +1019,10 @@ fn block(
log.debug("walk assign_destructure not implemented yet", .{});
},
- .grouped_expression => try expr(w, scope, parent_decl, node_datas[node].lhs),
+ .grouped_expression => try expr(w, scope, parent_decl, ast.nodeData(node).node_and_token[0]),
- .@"defer",
- .@"errdefer",
- => try expr(w, scope, parent_decl, node_datas[node].rhs),
+ .@"defer" => try expr(w, scope, parent_decl, ast.nodeData(node).node),
+ .@"errdefer" => try expr(w, scope, parent_decl, ast.nodeData(node).opt_token_and_node[1]),
else => try expr(w, scope, parent_decl, node),
}
@@ -1059,18 +1038,14 @@ fn while_expr(w: *Walk, scope: *Scope, parent_decl: Decl.Index, full: Ast.full.W
fn scanDecls(w: *Walk, namespace: *Scope.Namespace, members: []const Ast.Node.Index) Oom!void {
const ast = w.file.get_ast();
- const node_tags = ast.nodes.items(.tag);
- const main_tokens = ast.nodes.items(.main_token);
- const token_tags = ast.tokens.items(.tag);
- const node_datas = ast.nodes.items(.data);
for (members) |member_node| {
- const name_token = switch (node_tags[member_node]) {
+ const name_token = switch (ast.nodeTag(member_node)) {
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
- => main_tokens[member_node] + 1,
+ => ast.nodeMainToken(member_node) + 1,
.fn_proto_simple,
.fn_proto_multi,
@@ -1078,17 +1053,19 @@ fn scanDecls(w: *Walk, namespace: *Scope.Namespace, members: []const Ast.Node.In
.fn_proto,
.fn_decl,
=> blk: {
- const ident = main_tokens[member_node] + 1;
- if (token_tags[ident] != .identifier) continue;
+ const ident = ast.nodeMainToken(member_node) + 1;
+ if (ast.tokenTag(ident) != .identifier) continue;
break :blk ident;
},
.test_decl => {
- const ident_token = node_datas[member_node].lhs;
- const is_doctest = token_tags[ident_token] == .identifier;
- if (is_doctest) {
- const token_bytes = ast.tokenSlice(ident_token);
- try namespace.doctests.put(gpa, token_bytes, member_node);
+ const opt_ident_token = ast.nodeData(member_node).opt_token_and_node[0];
+ if (opt_ident_token.unwrap()) |ident_token| {
+ const is_doctest = ast.tokenTag(ident_token) == .identifier;
+ if (is_doctest) {
+ const token_bytes = ast.tokenSlice(ident_token);
+ try namespace.doctests.put(gpa, token_bytes, member_node);
+ }
}
continue;
},
lib/std/zig/Ast.zig
@@ -8,15 +8,12 @@
source: [:0]const u8,
tokens: TokenList.Slice,
-/// The root AST node is assumed to be index 0. Since there can be no
-/// references to the root node, this means 0 is available to indicate null.
nodes: NodeList.Slice,
-extra_data: []Node.Index,
+extra_data: []u32,
mode: Mode = .zig,
errors: []const Error,
-pub const TokenIndex = u32;
pub const ByteOffset = u32;
pub const TokenList = std.MultiArrayList(struct {
@@ -25,6 +22,91 @@ pub const TokenList = std.MultiArrayList(struct {
});
pub const NodeList = std.MultiArrayList(Node);
+/// Index into `tokens`.
+pub const TokenIndex = u32;
+
+/// Index into `tokens`, or null.
+pub const OptionalTokenIndex = enum(u32) {
+ none = std.math.maxInt(u32),
+ _,
+
+ pub fn unwrap(oti: OptionalTokenIndex) ?TokenIndex {
+ return if (oti == .none) null else @intFromEnum(oti);
+ }
+
+ pub fn fromToken(ti: TokenIndex) OptionalTokenIndex {
+ return @enumFromInt(ti);
+ }
+
+ pub fn fromOptional(oti: ?TokenIndex) OptionalTokenIndex {
+ return if (oti) |ti| @enumFromInt(ti) else .none;
+ }
+};
+
+/// A relative token index.
+pub const TokenOffset = enum(i32) {
+ zero = 0,
+ _,
+
+ pub fn init(base: TokenIndex, destination: TokenIndex) TokenOffset {
+ const base_i64: i64 = base;
+ const destination_i64: i64 = destination;
+ return @enumFromInt(destination_i64 - base_i64);
+ }
+
+ pub fn toOptional(to: TokenOffset) OptionalTokenOffset {
+ const result: OptionalTokenOffset = @enumFromInt(@intFromEnum(to));
+ assert(result != .none);
+ return result;
+ }
+
+ pub fn toAbsolute(offset: TokenOffset, base: TokenIndex) TokenIndex {
+ return @intCast(@as(i64, base) + @intFromEnum(offset));
+ }
+};
+
+/// A relative token index, or null.
+pub const OptionalTokenOffset = enum(i32) {
+ none = std.math.maxInt(i32),
+ _,
+
+ pub fn unwrap(oto: OptionalTokenOffset) ?TokenOffset {
+ return if (oto == .none) null else @enumFromInt(@intFromEnum(oto));
+ }
+};
+
+pub fn tokenTag(tree: *const Ast, token_index: TokenIndex) Token.Tag {
+ return tree.tokens.items(.tag)[token_index];
+}
+
+pub fn tokenStart(tree: *const Ast, token_index: TokenIndex) ByteOffset {
+ return tree.tokens.items(.start)[token_index];
+}
+
+pub fn nodeTag(tree: *const Ast, node: Node.Index) Node.Tag {
+ return tree.nodes.items(.tag)[@intFromEnum(node)];
+}
+
+pub fn nodeMainToken(tree: *const Ast, node: Node.Index) TokenIndex {
+ return tree.nodes.items(.main_token)[@intFromEnum(node)];
+}
+
+pub fn nodeData(tree: *const Ast, node: Node.Index) Node.Data {
+ return tree.nodes.items(.data)[@intFromEnum(node)];
+}
+
+pub fn isTokenPrecededByTags(
+ tree: *const Ast,
+ ti: TokenIndex,
+ expected_token_tags: []const Token.Tag,
+) bool {
+ return std.mem.endsWith(
+ Token.Tag,
+ tree.tokens.items(.tag)[0..ti],
+ expected_token_tags,
+ );
+}
+
pub const Location = struct {
line: usize,
column: usize,
@@ -77,8 +159,7 @@ pub fn parse(gpa: Allocator, source: [:0]const u8, mode: Mode) Allocator.Error!A
var parser: Parse = .{
.source = source,
.gpa = gpa,
- .token_tags = tokens.items(.tag),
- .token_starts = tokens.items(.start),
+ .tokens = tokens.slice(),
.errors = .{},
.nodes = .{},
.extra_data = .{},
@@ -143,7 +224,7 @@ pub fn tokenLocation(self: Ast, start_offset: ByteOffset, token_index: TokenInde
.line_start = start_offset,
.line_end = self.source.len,
};
- const token_start = self.tokens.items(.start)[token_index];
+ const token_start = self.tokenStart(token_index);
// Scan to by line until we go past the token start
while (std.mem.indexOfScalarPos(u8, self.source, loc.line_start, '\n')) |i| {
@@ -175,9 +256,7 @@ pub fn tokenLocation(self: Ast, start_offset: ByteOffset, token_index: TokenInde
}
pub fn tokenSlice(tree: Ast, token_index: TokenIndex) []const u8 {
- const token_starts = tree.tokens.items(.start);
- const token_tags = tree.tokens.items(.tag);
- const token_tag = token_tags[token_index];
+ const token_tag = tree.tokenTag(token_index);
// Many tokens can be determined entirely by their tag.
if (token_tag.lexeme()) |lexeme| {
@@ -187,33 +266,54 @@ pub fn tokenSlice(tree: Ast, token_index: TokenIndex) []const u8 {
// For some tokens, re-tokenization is needed to find the end.
var tokenizer: std.zig.Tokenizer = .{
.buffer = tree.source,
- .index = token_starts[token_index],
+ .index = tree.tokenStart(token_index),
};
const token = tokenizer.next();
assert(token.tag == token_tag);
return tree.source[token.loc.start..token.loc.end];
}
-pub fn extraData(tree: Ast, index: usize, comptime T: type) T {
+pub fn extraDataSlice(tree: Ast, range: Node.SubRange, comptime T: type) []const T {
+ return @ptrCast(tree.extra_data[@intFromEnum(range.start)..@intFromEnum(range.end)]);
+}
+
+pub fn extraDataSliceWithLen(tree: Ast, start: ExtraIndex, len: u32, comptime T: type) []const T {
+ return @ptrCast(tree.extra_data[@intFromEnum(start)..][0..len]);
+}
+
+pub fn extraData(tree: Ast, index: ExtraIndex, comptime T: type) T {
const fields = std.meta.fields(T);
var result: T = undefined;
inline for (fields, 0..) |field, i| {
- comptime assert(field.type == Node.Index);
- @field(result, field.name) = tree.extra_data[index + i];
+ @field(result, field.name) = switch (field.type) {
+ Node.Index,
+ Node.OptionalIndex,
+ OptionalTokenIndex,
+ ExtraIndex,
+ => @enumFromInt(tree.extra_data[@intFromEnum(index) + i]),
+ TokenIndex => tree.extra_data[@intFromEnum(index) + i],
+ else => @compileError("unexpected field type: " ++ @typeName(field.type)),
+ };
}
return result;
}
+fn loadOptionalNodesIntoBuffer(comptime size: usize, buffer: *[size]Node.Index, items: [size]Node.OptionalIndex) []Node.Index {
+ for (buffer, items, 0..) |*node, opt_node, i| {
+ node.* = opt_node.unwrap() orelse return buffer[0..i];
+ }
+ return buffer[0..];
+}
+
pub fn rootDecls(tree: Ast) []const Node.Index {
- const nodes_data = tree.nodes.items(.data);
- return switch (tree.mode) {
- .zig => tree.extra_data[nodes_data[0].lhs..nodes_data[0].rhs],
- .zon => (&nodes_data[0].lhs)[0..1],
- };
+ switch (tree.mode) {
+ .zig => return tree.extraDataSlice(tree.nodeData(.root).extra_range, Node.Index),
+ // Ensure that the returned slice points into the existing memory of the Ast
+ .zon => return (&tree.nodes.items(.data)[@intFromEnum(Node.Index.root)].node)[0..1],
+ }
}
pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void {
- const token_tags = tree.tokens.items(.tag);
switch (parse_error.tag) {
.asterisk_after_ptr_deref => {
// Note that the token will point at the `.*` but ideally the source
@@ -228,72 +328,72 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void {
},
.expected_block => {
return stream.print("expected block, found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_block_or_assignment => {
return stream.print("expected block or assignment, found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_block_or_expr => {
return stream.print("expected block or expression, found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_block_or_field => {
return stream.print("expected block or field, found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_container_members => {
return stream.print("expected test, comptime, var decl, or container field, found '{s}'", .{
- token_tags[parse_error.token].symbol(),
+ tree.tokenTag(parse_error.token).symbol(),
});
},
.expected_expr => {
return stream.print("expected expression, found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_expr_or_assignment => {
return stream.print("expected expression or assignment, found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_expr_or_var_decl => {
return stream.print("expected expression or var decl, found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_fn => {
return stream.print("expected function, found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_inlinable => {
return stream.print("expected 'while' or 'for', found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_labelable => {
return stream.print("expected 'while', 'for', 'inline', or '{{', found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_param_list => {
return stream.print("expected parameter list, found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_prefix_expr => {
return stream.print("expected prefix expression, found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_primary_type_expr => {
return stream.print("expected primary type expression, found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_pub_item => {
@@ -301,7 +401,7 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void {
},
.expected_return_type => {
return stream.print("expected return type expression, found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_semi_or_else => {
@@ -312,37 +412,37 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void {
},
.expected_statement => {
return stream.print("expected statement, found '{s}'", .{
- token_tags[parse_error.token].symbol(),
+ tree.tokenTag(parse_error.token).symbol(),
});
},
.expected_suffix_op => {
return stream.print("expected pointer dereference, optional unwrap, or field access, found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_type_expr => {
return stream.print("expected type expression, found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_var_decl => {
return stream.print("expected variable declaration, found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_var_decl_or_fn => {
return stream.print("expected variable declaration or function, found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_loop_payload => {
return stream.print("expected loop payload, found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.expected_container => {
return stream.print("expected a struct, enum or union, found '{s}'", .{
- token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)].symbol(),
+ tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev)).symbol(),
});
},
.extern_fn_body => {
@@ -365,7 +465,7 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void {
},
.ptr_mod_on_array_child_type => {
return stream.print("pointer modifier '{s}' not allowed on array child type", .{
- token_tags[parse_error.token].symbol(),
+ tree.tokenTag(parse_error.token).symbol(),
});
},
.invalid_bit_range => {
@@ -421,7 +521,7 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void {
return stream.writeAll("expected field initializer");
},
.mismatched_binary_op_whitespace => {
- return stream.print("binary operator `{s}` has whitespace on one side, but not the other.", .{token_tags[parse_error.token].lexeme().?});
+ return stream.print("binary operator `{s}` has whitespace on one side, but not the other.", .{tree.tokenTag(parse_error.token).lexeme().?});
},
.invalid_ampersand_ampersand => {
return stream.writeAll("ambiguous use of '&&'; use 'and' for logical AND, or change whitespace to ' & &' for bitwise AND");
@@ -472,7 +572,7 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void {
},
.expected_token => {
- const found_tag = token_tags[parse_error.token + @intFromBool(parse_error.token_is_prev)];
+ const found_tag = tree.tokenTag(parse_error.token + @intFromBool(parse_error.token_is_prev));
const expected_symbol = parse_error.extra.expected_tag.symbol();
switch (found_tag) {
.invalid => return stream.print("expected '{s}', found invalid bytes", .{
@@ -487,13 +587,9 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void {
}
pub fn firstToken(tree: Ast, node: Node.Index) TokenIndex {
- const tags = tree.nodes.items(.tag);
- const datas = tree.nodes.items(.data);
- const main_tokens = tree.nodes.items(.main_token);
- const token_tags = tree.tokens.items(.tag);
- var end_offset: TokenIndex = 0;
+ var end_offset: u32 = 0;
var n = node;
- while (true) switch (tags[n]) {
+ while (true) switch (tree.nodeTag(n)) {
.root => return 0,
.test_decl,
@@ -537,7 +633,7 @@ pub fn firstToken(tree: Ast, node: Node.Index) TokenIndex {
.array_type,
.array_type_sentinel,
.error_value,
- => return main_tokens[n] - end_offset,
+ => return tree.nodeMainToken(n) - end_offset,
.array_init_dot,
.array_init_dot_comma,
@@ -548,11 +644,9 @@ pub fn firstToken(tree: Ast, node: Node.Index) TokenIndex {
.struct_init_dot_two,
.struct_init_dot_two_comma,
.enum_literal,
- => return main_tokens[n] - 1 - end_offset,
+ => return tree.nodeMainToken(n) - 1 - end_offset,
.@"catch",
- .field_access,
- .unwrap_optional,
.equal_equal,
.bang_equal,
.less_than,
@@ -601,33 +695,37 @@ pub fn firstToken(tree: Ast, node: Node.Index) TokenIndex {
.bool_and,
.bool_or,
.slice_open,
- .slice,
- .slice_sentinel,
- .deref,
.array_access,
.array_init_one,
.array_init_one_comma,
- .array_init,
- .array_init_comma,
+ .switch_range,
+ .error_union,
+ => n = tree.nodeData(n).node_and_node[0],
+
+ .for_range,
+ .call_one,
+ .call_one_comma,
.struct_init_one,
.struct_init_one_comma,
+ => n = tree.nodeData(n).node_and_opt_node[0],
+
+ .field_access,
+ .unwrap_optional,
+ => n = tree.nodeData(n).node_and_token[0],
+
+ .slice,
+ .slice_sentinel,
+ .array_init,
+ .array_init_comma,
.struct_init,
.struct_init_comma,
- .call_one,
- .call_one_comma,
.call,
.call_comma,
- .switch_range,
- .for_range,
- .error_union,
- => n = datas[n].lhs,
+ => n = tree.nodeData(n).node_and_extra[0],
- .assign_destructure => {
- const extra_idx = datas[n].lhs;
- const lhs_len = tree.extra_data[extra_idx];
- assert(lhs_len > 0);
- n = tree.extra_data[extra_idx + 1];
- },
+ .deref => n = tree.nodeData(n).node,
+
+ .assign_destructure => n = tree.assignDestructure(n).ast.variables[0],
.fn_decl,
.fn_proto_simple,
@@ -635,10 +733,10 @@ pub fn firstToken(tree: Ast, node: Node.Index) TokenIndex {
.fn_proto_one,
.fn_proto,
=> {
- var i = main_tokens[n]; // fn token
+ var i = tree.nodeMainToken(n); // fn token
while (i > 0) {
i -= 1;
- switch (token_tags[i]) {
+ switch (tree.tokenTag(i)) {
.keyword_extern,
.keyword_export,
.keyword_pub,
@@ -654,30 +752,33 @@ pub fn firstToken(tree: Ast, node: Node.Index) TokenIndex {
},
.@"usingnamespace" => {
- const main_token = main_tokens[n];
- if (main_token > 0 and token_tags[main_token - 1] == .keyword_pub) {
- end_offset += 1;
- }
+ const main_token: TokenIndex = tree.nodeMainToken(n);
+ const has_visib_token = tree.isTokenPrecededByTags(main_token, &.{.keyword_pub});
+ end_offset += @intFromBool(has_visib_token);
return main_token - end_offset;
},
.async_call_one,
.async_call_one_comma,
+ => {
+ end_offset += 1; // async token
+ n = tree.nodeData(n).node_and_opt_node[0];
+ },
+
.async_call,
.async_call_comma,
=> {
end_offset += 1; // async token
- n = datas[n].lhs;
+ n = tree.nodeData(n).node_and_extra[0];
},
.container_field_init,
.container_field_align,
.container_field,
=> {
- const name_token = main_tokens[n];
- if (name_token > 0 and token_tags[name_token - 1] == .keyword_comptime) {
- end_offset += 1;
- }
+ const name_token = tree.nodeMainToken(n);
+ const has_comptime_token = tree.isTokenPrecededByTags(name_token, &.{.keyword_comptime});
+ end_offset += @intFromBool(has_comptime_token);
return name_token - end_offset;
},
@@ -686,10 +787,10 @@ pub fn firstToken(tree: Ast, node: Node.Index) TokenIndex {
.simple_var_decl,
.aligned_var_decl,
=> {
- var i = main_tokens[n]; // mut token
+ var i = tree.nodeMainToken(n); // mut token
while (i > 0) {
i -= 1;
- switch (token_tags[i]) {
+ switch (tree.tokenTag(i)) {
.keyword_extern,
.keyword_export,
.keyword_comptime,
@@ -710,10 +811,8 @@ pub fn firstToken(tree: Ast, node: Node.Index) TokenIndex {
.block_two_semicolon,
=> {
// Look for a label.
- const lbrace = main_tokens[n];
- if (token_tags[lbrace - 1] == .colon and
- token_tags[lbrace - 2] == .identifier)
- {
+ const lbrace = tree.nodeMainToken(n);
+ if (tree.isTokenPrecededByTags(lbrace, &.{ .identifier, .colon })) {
end_offset += 2;
}
return lbrace - end_offset;
@@ -732,8 +831,8 @@ pub fn firstToken(tree: Ast, node: Node.Index) TokenIndex {
.tagged_union_enum_tag,
.tagged_union_enum_tag_trailing,
=> {
- const main_token = main_tokens[n];
- switch (token_tags[main_token -| 1]) {
+ const main_token = tree.nodeMainToken(n);
+ switch (tree.tokenTag(main_token -| 1)) {
.keyword_packed, .keyword_extern => end_offset += 1,
else => {},
}
@@ -744,36 +843,26 @@ pub fn firstToken(tree: Ast, node: Node.Index) TokenIndex {
.ptr_type_sentinel,
.ptr_type,
.ptr_type_bit_range,
- => return main_tokens[n] - end_offset,
+ => return tree.nodeMainToken(n) - end_offset,
- .switch_case_one => {
- if (datas[n].lhs == 0) {
- return main_tokens[n] - 1 - end_offset; // else token
- } else {
- n = datas[n].lhs;
- }
- },
- .switch_case_inline_one => {
- if (datas[n].lhs == 0) {
- return main_tokens[n] - 2 - end_offset; // else token
+ .switch_case_one,
+ .switch_case_inline_one,
+ .switch_case,
+ .switch_case_inline,
+ => {
+ const full_switch = tree.fullSwitchCase(n).?;
+ if (full_switch.inline_token) |inline_token| {
+ return inline_token;
+ } else if (full_switch.ast.values.len == 0) {
+ return full_switch.ast.arrow_token - 1 - end_offset; // else token
} else {
- return firstToken(tree, datas[n].lhs) - 1;
+ n = full_switch.ast.values[0];
}
},
- .switch_case => {
- const extra = tree.extraData(datas[n].lhs, Node.SubRange);
- assert(extra.end - extra.start > 0);
- n = tree.extra_data[extra.start];
- },
- .switch_case_inline => {
- const extra = tree.extraData(datas[n].lhs, Node.SubRange);
- assert(extra.end - extra.start > 0);
- return firstToken(tree, tree.extra_data[extra.start]) - 1;
- },
.asm_output, .asm_input => {
- assert(token_tags[main_tokens[n] - 1] == .l_bracket);
- return main_tokens[n] - 1 - end_offset;
+ assert(tree.tokenTag(tree.nodeMainToken(n) - 1) == .l_bracket);
+ return tree.nodeMainToken(n) - 1 - end_offset;
},
.while_simple,
@@ -783,13 +872,13 @@ pub fn firstToken(tree: Ast, node: Node.Index) TokenIndex {
.@"for",
=> {
// Look for a label and inline.
- const main_token = main_tokens[n];
+ const main_token = tree.nodeMainToken(n);
var result = main_token;
- if (token_tags[result -| 1] == .keyword_inline) {
- result -= 1;
+ if (tree.isTokenPrecededByTags(result, &.{.keyword_inline})) {
+ result = result - 1;
}
- if (token_tags[result -| 1] == .colon) {
- result -|= 2;
+ if (tree.isTokenPrecededByTags(result, &.{ .identifier, .colon })) {
+ result = result - 2;
}
return result - end_offset;
},
@@ -797,14 +886,10 @@ pub fn firstToken(tree: Ast, node: Node.Index) TokenIndex {
}
pub fn lastToken(tree: Ast, node: Node.Index) TokenIndex {
- const tags = tree.nodes.items(.tag);
- const datas = tree.nodes.items(.data);
- const main_tokens = tree.nodes.items(.main_token);
- const token_tags = tree.tokens.items(.tag);
var n = node;
- var end_offset: TokenIndex = 0;
- while (true) switch (tags[n]) {
- .root => return @as(TokenIndex, @intCast(tree.tokens.len - 1)),
+ var end_offset: u32 = 0;
+ while (true) switch (tree.nodeTag(n)) {
+ .root => return @intCast(tree.tokens.len - 1),
.@"usingnamespace",
.bool_not,
@@ -819,11 +904,8 @@ pub fn lastToken(tree: Ast, node: Node.Index) TokenIndex {
.@"resume",
.@"nosuspend",
.@"comptime",
- => n = datas[n].lhs,
+ => n = tree.nodeData(n).node,
- .test_decl,
- .@"errdefer",
- .@"defer",
.@"catch",
.equal_equal,
.bang_equal,
@@ -849,7 +931,6 @@ pub fn lastToken(tree: Ast, node: Node.Index) TokenIndex {
.assign_add_sat,
.assign_sub_sat,
.assign,
- .assign_destructure,
.merge_error_sets,
.mul,
.div,
@@ -873,44 +954,53 @@ pub fn lastToken(tree: Ast, node: Node.Index) TokenIndex {
.@"orelse",
.bool_and,
.bool_or,
- .anyframe_type,
.error_union,
.if_simple,
.while_simple,
.for_simple,
- .fn_proto_simple,
- .fn_proto_multi,
- .fn_proto_one,
- .fn_proto,
.fn_decl,
+ .array_type,
+ .switch_range,
+ => n = tree.nodeData(n).node_and_node[1],
+
+ .test_decl, .@"errdefer" => n = tree.nodeData(n).opt_token_and_node[1],
+ .@"defer" => n = tree.nodeData(n).node,
+ .anyframe_type => n = tree.nodeData(n).token_and_node[1],
+
+ .switch_case_one,
+ .switch_case_inline_one,
.ptr_type_aligned,
.ptr_type_sentinel,
+ => n = tree.nodeData(n).opt_node_and_node[1],
+
+ .assign_destructure,
.ptr_type,
.ptr_type_bit_range,
- .array_type,
- .switch_case_one,
- .switch_case_inline_one,
.switch_case,
.switch_case_inline,
- .switch_range,
- => n = datas[n].rhs,
+ => n = tree.nodeData(n).extra_and_node[1],
- .for_range => if (datas[n].rhs != 0) {
- n = datas[n].rhs;
- } else {
- return main_tokens[n] + end_offset;
+ .fn_proto_simple => n = tree.nodeData(n).opt_node_and_opt_node[1].unwrap().?,
+ .fn_proto_multi,
+ .fn_proto_one,
+ .fn_proto,
+ => n = tree.nodeData(n).extra_and_opt_node[1].unwrap().?,
+
+ .for_range => {
+ n = tree.nodeData(n).node_and_opt_node[1].unwrap() orelse {
+ return tree.nodeMainToken(n) + end_offset;
+ };
},
.field_access,
.unwrap_optional,
- .grouped_expression,
- .multiline_string_literal,
- .error_set_decl,
.asm_simple,
- .asm_output,
- .asm_input,
- .error_value,
- => return datas[n].rhs + end_offset,
+ => return tree.nodeData(n).node_and_token[1] + end_offset,
+ .error_set_decl => return tree.nodeData(n).token + end_offset,
+ .grouped_expression, .asm_input => return tree.nodeData(n).node_and_token[1] + end_offset,
+ .multiline_string_literal => return tree.nodeData(n).token_and_token[1] + end_offset,
+ .asm_output => return tree.nodeData(n).opt_node_and_token[1] + end_offset,
+ .error_value => return tree.nodeData(n).opt_token_and_opt_token[1].unwrap().? + end_offset,
.anyframe_literal,
.char_literal,
@@ -920,80 +1010,88 @@ pub fn lastToken(tree: Ast, node: Node.Index) TokenIndex {
.deref,
.enum_literal,
.string_literal,
- => return main_tokens[n] + end_offset,
+ => return tree.nodeMainToken(n) + end_offset,
- .@"return" => if (datas[n].lhs != 0) {
- n = datas[n].lhs;
- } else {
- return main_tokens[n] + end_offset;
+ .@"return" => {
+ n = tree.nodeData(n).opt_node.unwrap() orelse {
+ return tree.nodeMainToken(n) + end_offset;
+ };
},
.call, .async_call => {
+ _, const extra_index = tree.nodeData(n).node_and_extra;
+ const params = tree.extraData(extra_index, Node.SubRange);
+ assert(params.start != params.end);
end_offset += 1; // for the rparen
- const params = tree.extraData(datas[n].rhs, Node.SubRange);
- assert(params.end - params.start > 0);
- n = tree.extra_data[params.end - 1]; // last parameter
+ n = @enumFromInt(tree.extra_data[@intFromEnum(params.end) - 1]); // last parameter
},
.tagged_union_enum_tag => {
- const members = tree.extraData(datas[n].rhs, Node.SubRange);
- if (members.end - members.start == 0) {
+ const arg, const extra_index = tree.nodeData(n).node_and_extra;
+ const members = tree.extraData(extra_index, Node.SubRange);
+ if (members.start == members.end) {
end_offset += 4; // for the rparen + rparen + lbrace + rbrace
- n = datas[n].lhs;
+ n = arg;
} else {
end_offset += 1; // for the rbrace
- n = tree.extra_data[members.end - 1]; // last parameter
+ n = @enumFromInt(tree.extra_data[@intFromEnum(members.end) - 1]); // last parameter
}
},
.call_comma,
.async_call_comma,
.tagged_union_enum_tag_trailing,
=> {
+ _, const extra_index = tree.nodeData(n).node_and_extra;
+ const params = tree.extraData(extra_index, Node.SubRange);
+ assert(params.start != params.end);
end_offset += 2; // for the comma/semicolon + rparen/rbrace
- const params = tree.extraData(datas[n].rhs, Node.SubRange);
- assert(params.end > params.start);
- n = tree.extra_data[params.end - 1]; // last parameter
+ n = @enumFromInt(tree.extra_data[@intFromEnum(params.end) - 1]); // last parameter
},
.@"switch" => {
- const cases = tree.extraData(datas[n].rhs, Node.SubRange);
- if (cases.end - cases.start == 0) {
+ const condition, const extra_index = tree.nodeData(n).node_and_extra;
+ const cases = tree.extraData(extra_index, Node.SubRange);
+ if (cases.start == cases.end) {
end_offset += 3; // rparen, lbrace, rbrace
- n = datas[n].lhs; // condition expression
+ n = condition;
} else {
end_offset += 1; // for the rbrace
- n = tree.extra_data[cases.end - 1]; // last case
+ n = @enumFromInt(tree.extra_data[@intFromEnum(cases.end) - 1]); // last case
}
},
.container_decl_arg => {
- const members = tree.extraData(datas[n].rhs, Node.SubRange);
- if (members.end - members.start == 0) {
+ const arg, const extra_index = tree.nodeData(n).node_and_extra;
+ const members = tree.extraData(extra_index, Node.SubRange);
+ if (members.end == members.start) {
end_offset += 3; // for the rparen + lbrace + rbrace
- n = datas[n].lhs;
+ n = arg;
} else {
end_offset += 1; // for the rbrace
- n = tree.extra_data[members.end - 1]; // last parameter
+ n = @enumFromInt(tree.extra_data[@intFromEnum(members.end) - 1]); // last parameter
}
},
.@"asm" => {
- const extra = tree.extraData(datas[n].rhs, Node.Asm);
+ _, const extra_index = tree.nodeData(n).node_and_extra;
+ const extra = tree.extraData(extra_index, Node.Asm);
return extra.rparen + end_offset;
},
.array_init,
.struct_init,
=> {
- const elements = tree.extraData(datas[n].rhs, Node.SubRange);
- assert(elements.end - elements.start > 0);
+ _, const extra_index = tree.nodeData(n).node_and_extra;
+ const elements = tree.extraData(extra_index, Node.SubRange);
+ assert(elements.start != elements.end);
end_offset += 1; // for the rbrace
- n = tree.extra_data[elements.end - 1]; // last element
+ n = @enumFromInt(tree.extra_data[@intFromEnum(elements.end) - 1]); // last element
},
.array_init_comma,
.struct_init_comma,
.container_decl_arg_trailing,
.switch_comma,
=> {
- const members = tree.extraData(datas[n].rhs, Node.SubRange);
- assert(members.end - members.start > 0);
+ _, const extra_index = tree.nodeData(n).node_and_extra;
+ const members = tree.extraData(extra_index, Node.SubRange);
+ assert(members.start != members.end);
end_offset += 2; // for the comma + rbrace
- n = tree.extra_data[members.end - 1]; // last parameter
+ n = @enumFromInt(tree.extra_data[@intFromEnum(members.end) - 1]); // last parameter
},
.array_init_dot,
.struct_init_dot,
@@ -1002,9 +1100,10 @@ pub fn lastToken(tree: Ast, node: Node.Index) TokenIndex {
.tagged_union,
.builtin_call,
=> {
- assert(datas[n].rhs - datas[n].lhs > 0);
+ const range = tree.nodeData(n).extra_range;
+ assert(range.start != range.end);
end_offset += 1; // for the rbrace
- n = tree.extra_data[datas[n].rhs - 1]; // last statement
+ n = @enumFromInt(tree.extra_data[@intFromEnum(range.end) - 1]); // last statement
},
.array_init_dot_comma,
.struct_init_dot_comma,
@@ -1013,20 +1112,21 @@ pub fn lastToken(tree: Ast, node: Node.Index) TokenIndex {
.tagged_union_trailing,
.builtin_call_comma,
=> {
- assert(datas[n].rhs - datas[n].lhs > 0);
+ const range = tree.nodeData(n).extra_range;
+ assert(range.start != range.end);
end_offset += 2; // for the comma/semicolon + rbrace/rparen
- n = tree.extra_data[datas[n].rhs - 1]; // last member
+ n = @enumFromInt(tree.extra_data[@intFromEnum(range.end) - 1]); // last member
},
.call_one,
.async_call_one,
- .array_access,
=> {
- end_offset += 1; // for the rparen/rbracket
- if (datas[n].rhs == 0) {
- return main_tokens[n] + end_offset;
- }
- n = datas[n].rhs;
+ _, const first_param = tree.nodeData(n).node_and_opt_node;
+ end_offset += 1; // for the rparen
+ n = first_param.unwrap() orelse {
+ return tree.nodeMainToken(n) + end_offset;
+ };
},
+
.array_init_dot_two,
.block_two,
.builtin_call_two,
@@ -1034,14 +1134,15 @@ pub fn lastToken(tree: Ast, node: Node.Index) TokenIndex {
.container_decl_two,
.tagged_union_two,
=> {
- if (datas[n].rhs != 0) {
+ const opt_lhs, const opt_rhs = tree.nodeData(n).opt_node_and_opt_node;
+ if (opt_rhs.unwrap()) |rhs| {
end_offset += 1; // for the rparen/rbrace
- n = datas[n].rhs;
- } else if (datas[n].lhs != 0) {
+ n = rhs;
+ } else if (opt_lhs.unwrap()) |lhs| {
end_offset += 1; // for the rparen/rbrace
- n = datas[n].lhs;
+ n = lhs;
} else {
- switch (tags[n]) {
+ switch (tree.nodeTag(n)) {
.array_init_dot_two,
.block_two,
.struct_init_dot_two,
@@ -1049,17 +1150,17 @@ pub fn lastToken(tree: Ast, node: Node.Index) TokenIndex {
.builtin_call_two => end_offset += 2, // lparen/lbrace + rparen/rbrace
.container_decl_two => {
var i: u32 = 2; // lbrace + rbrace
- while (token_tags[main_tokens[n] + i] == .container_doc_comment) i += 1;
+ while (tree.tokenTag(tree.nodeMainToken(n) + i) == .container_doc_comment) i += 1;
end_offset += i;
},
.tagged_union_two => {
var i: u32 = 5; // (enum) {}
- while (token_tags[main_tokens[n] + i] == .container_doc_comment) i += 1;
+ while (tree.tokenTag(tree.nodeMainToken(n) + i) == .container_doc_comment) i += 1;
end_offset += i;
},
else => unreachable,
}
- return main_tokens[n] + end_offset;
+ return tree.nodeMainToken(n) + end_offset;
}
},
.array_init_dot_two_comma,
@@ -1069,341 +1170,345 @@ pub fn lastToken(tree: Ast, node: Node.Index) TokenIndex {
.container_decl_two_trailing,
.tagged_union_two_trailing,
=> {
+ const opt_lhs, const opt_rhs = tree.nodeData(n).opt_node_and_opt_node;
end_offset += 2; // for the comma/semicolon + rbrace/rparen
- if (datas[n].rhs != 0) {
- n = datas[n].rhs;
- } else if (datas[n].lhs != 0) {
- n = datas[n].lhs;
+ if (opt_rhs.unwrap()) |rhs| {
+ n = rhs;
+ } else if (opt_lhs.unwrap()) |lhs| {
+ n = lhs;
} else {
unreachable;
}
},
.simple_var_decl => {
- if (datas[n].rhs != 0) {
- n = datas[n].rhs;
- } else if (datas[n].lhs != 0) {
- n = datas[n].lhs;
+ const type_node, const init_node = tree.nodeData(n).opt_node_and_opt_node;
+ if (init_node.unwrap()) |rhs| {
+ n = rhs;
+ } else if (type_node.unwrap()) |lhs| {
+ n = lhs;
} else {
end_offset += 1; // from mut token to name
- return main_tokens[n] + end_offset;
+ return tree.nodeMainToken(n) + end_offset;
}
},
.aligned_var_decl => {
- if (datas[n].rhs != 0) {
- n = datas[n].rhs;
- } else if (datas[n].lhs != 0) {
- end_offset += 1; // for the rparen
- n = datas[n].lhs;
+ const align_node, const init_node = tree.nodeData(n).node_and_opt_node;
+ if (init_node.unwrap()) |rhs| {
+ n = rhs;
} else {
- end_offset += 1; // from mut token to name
- return main_tokens[n] + end_offset;
+ end_offset += 1; // for the rparen
+ n = align_node;
}
},
.global_var_decl => {
- if (datas[n].rhs != 0) {
- n = datas[n].rhs;
+ const extra_index, const init_node = tree.nodeData(n).extra_and_opt_node;
+ if (init_node.unwrap()) |rhs| {
+ n = rhs;
} else {
- const extra = tree.extraData(datas[n].lhs, Node.GlobalVarDecl);
- if (extra.section_node != 0) {
+ const extra = tree.extraData(extra_index, Node.GlobalVarDecl);
+ if (extra.section_node.unwrap()) |section_node| {
end_offset += 1; // for the rparen
- n = extra.section_node;
- } else if (extra.align_node != 0) {
+ n = section_node;
+ } else if (extra.align_node.unwrap()) |align_node| {
end_offset += 1; // for the rparen
- n = extra.align_node;
- } else if (extra.type_node != 0) {
- n = extra.type_node;
+ n = align_node;
+ } else if (extra.type_node.unwrap()) |type_node| {
+ n = type_node;
} else {
end_offset += 1; // from mut token to name
- return main_tokens[n] + end_offset;
+ return tree.nodeMainToken(n) + end_offset;
}
}
},
.local_var_decl => {
- if (datas[n].rhs != 0) {
- n = datas[n].rhs;
+ const extra_index, const init_node = tree.nodeData(n).extra_and_opt_node;
+ if (init_node.unwrap()) |rhs| {
+ n = rhs;
} else {
- const extra = tree.extraData(datas[n].lhs, Node.LocalVarDecl);
- assert(extra.align_node != 0);
+ const extra = tree.extraData(extra_index, Node.LocalVarDecl);
end_offset += 1; // for the rparen
n = extra.align_node;
}
},
.container_field_init => {
- if (datas[n].rhs != 0) {
- n = datas[n].rhs;
- } else {
- assert(datas[n].lhs != 0);
- n = datas[n].lhs;
- }
+ const type_expr, const value_expr = tree.nodeData(n).node_and_opt_node;
+ n = value_expr.unwrap() orelse type_expr;
},
- .container_field_align => {
- assert(datas[n].rhs != 0);
- end_offset += 1; // for the rparen
- n = datas[n].rhs;
+
+ .array_access,
+ .array_init_one,
+ .container_field_align,
+ => {
+ _, const rhs = tree.nodeData(n).node_and_node;
+ end_offset += 1; // for the rbracket/rbrace/rparen
+ n = rhs;
},
.container_field => {
- const extra = tree.extraData(datas[n].rhs, Node.ContainerField);
- assert(extra.value_expr != 0);
+ _, const extra_index = tree.nodeData(n).node_and_extra;
+ const extra = tree.extraData(extra_index, Node.ContainerField);
n = extra.value_expr;
},
- .array_init_one,
- .struct_init_one,
- => {
+ .struct_init_one => {
+ _, const first_field = tree.nodeData(n).node_and_opt_node;
end_offset += 1; // rbrace
- if (datas[n].rhs == 0) {
- return main_tokens[n] + end_offset;
- } else {
- n = datas[n].rhs;
- }
+ n = first_field.unwrap() orelse {
+ return tree.nodeMainToken(n) + end_offset;
+ };
+ },
+ .slice_open => {
+ _, const start_node = tree.nodeData(n).node_and_node;
+ end_offset += 2; // ellipsis2 + rbracket, or comma + rparen
+ n = start_node;
+ },
+ .array_init_one_comma => {
+ _, const first_element = tree.nodeData(n).node_and_node;
+ end_offset += 2; // comma + rbrace
+ n = first_element;
},
- .slice_open,
.call_one_comma,
.async_call_one_comma,
- .array_init_one_comma,
.struct_init_one_comma,
=> {
+ _, const first_field = tree.nodeData(n).node_and_opt_node;
end_offset += 2; // ellipsis2 + rbracket, or comma + rparen
- n = datas[n].rhs;
- assert(n != 0);
+ n = first_field.unwrap().?;
},
.slice => {
- const extra = tree.extraData(datas[n].rhs, Node.Slice);
- assert(extra.end != 0); // should have used slice_open
+ _, const extra_index = tree.nodeData(n).node_and_extra;
+ const extra = tree.extraData(extra_index, Node.Slice);
end_offset += 1; // rbracket
n = extra.end;
},
.slice_sentinel => {
- const extra = tree.extraData(datas[n].rhs, Node.SliceSentinel);
- assert(extra.sentinel != 0); // should have used slice
+ _, const extra_index = tree.nodeData(n).node_and_extra;
+ const extra = tree.extraData(extra_index, Node.SliceSentinel);
end_offset += 1; // rbracket
n = extra.sentinel;
},
.@"continue", .@"break" => {
- if (datas[n].rhs != 0) {
- n = datas[n].rhs;
- } else if (datas[n].lhs != 0) {
- return datas[n].lhs + end_offset;
+ const opt_label, const opt_rhs = tree.nodeData(n).opt_token_and_opt_node;
+ if (opt_rhs.unwrap()) |rhs| {
+ n = rhs;
+ } else if (opt_label.unwrap()) |lhs| {
+ return lhs + end_offset;
} else {
- return main_tokens[n] + end_offset;
+ return tree.nodeMainToken(n) + end_offset;
}
},
.while_cont => {
- const extra = tree.extraData(datas[n].rhs, Node.WhileCont);
- assert(extra.then_expr != 0);
+ _, const extra_index = tree.nodeData(n).node_and_extra;
+ const extra = tree.extraData(extra_index, Node.WhileCont);
n = extra.then_expr;
},
.@"while" => {
- const extra = tree.extraData(datas[n].rhs, Node.While);
- assert(extra.else_expr != 0);
+ _, const extra_index = tree.nodeData(n).node_and_extra;
+ const extra = tree.extraData(extra_index, Node.While);
n = extra.else_expr;
},
.@"if" => {
- const extra = tree.extraData(datas[n].rhs, Node.If);
- assert(extra.else_expr != 0);
+ _, const extra_index = tree.nodeData(n).node_and_extra;
+ const extra = tree.extraData(extra_index, Node.If);
n = extra.else_expr;
},
.@"for" => {
- const extra = @as(Node.For, @bitCast(datas[n].rhs));
- n = tree.extra_data[datas[n].lhs + extra.inputs + @intFromBool(extra.has_else)];
+ const extra_index, const extra = tree.nodeData(n).@"for";
+ const index = @intFromEnum(extra_index) + extra.inputs + @intFromBool(extra.has_else);
+ n = @enumFromInt(tree.extra_data[index]);
},
.array_type_sentinel => {
- const extra = tree.extraData(datas[n].rhs, Node.ArrayTypeSentinel);
+ _, const extra_index = tree.nodeData(n).node_and_extra;
+ const extra = tree.extraData(extra_index, Node.ArrayTypeSentinel);
n = extra.elem_type;
},
};
}
pub fn tokensOnSameLine(tree: Ast, token1: TokenIndex, token2: TokenIndex) bool {
- const token_starts = tree.tokens.items(.start);
- const source = tree.source[token_starts[token1]..token_starts[token2]];
+ const source = tree.source[tree.tokenStart(token1)..tree.tokenStart(token2)];
return mem.indexOfScalar(u8, source, '\n') == null;
}
pub fn getNodeSource(tree: Ast, node: Node.Index) []const u8 {
- const token_starts = tree.tokens.items(.start);
const first_token = tree.firstToken(node);
const last_token = tree.lastToken(node);
- const start = token_starts[first_token];
- const end = token_starts[last_token] + tree.tokenSlice(last_token).len;
+ const start = tree.tokenStart(first_token);
+ const end = tree.tokenStart(last_token) + tree.tokenSlice(last_token).len;
return tree.source[start..end];
}
pub fn globalVarDecl(tree: Ast, node: Node.Index) full.VarDecl {
- assert(tree.nodes.items(.tag)[node] == .global_var_decl);
- const data = tree.nodes.items(.data)[node];
- const extra = tree.extraData(data.lhs, Node.GlobalVarDecl);
+ assert(tree.nodeTag(node) == .global_var_decl);
+ const extra_index, const init_node = tree.nodeData(node).extra_and_opt_node;
+ const extra = tree.extraData(extra_index, Node.GlobalVarDecl);
return tree.fullVarDeclComponents(.{
.type_node = extra.type_node,
.align_node = extra.align_node,
.addrspace_node = extra.addrspace_node,
.section_node = extra.section_node,
- .init_node = data.rhs,
- .mut_token = tree.nodes.items(.main_token)[node],
+ .init_node = init_node,
+ .mut_token = tree.nodeMainToken(node),
});
}
pub fn localVarDecl(tree: Ast, node: Node.Index) full.VarDecl {
- assert(tree.nodes.items(.tag)[node] == .local_var_decl);
- const data = tree.nodes.items(.data)[node];
- const extra = tree.extraData(data.lhs, Node.LocalVarDecl);
+ assert(tree.nodeTag(node) == .local_var_decl);
+ const extra_index, const init_node = tree.nodeData(node).extra_and_opt_node;
+ const extra = tree.extraData(extra_index, Node.LocalVarDecl);
return tree.fullVarDeclComponents(.{
- .type_node = extra.type_node,
- .align_node = extra.align_node,
- .addrspace_node = 0,
- .section_node = 0,
- .init_node = data.rhs,
- .mut_token = tree.nodes.items(.main_token)[node],
+ .type_node = extra.type_node.toOptional(),
+ .align_node = extra.align_node.toOptional(),
+ .addrspace_node = .none,
+ .section_node = .none,
+ .init_node = init_node,
+ .mut_token = tree.nodeMainToken(node),
});
}
pub fn simpleVarDecl(tree: Ast, node: Node.Index) full.VarDecl {
- assert(tree.nodes.items(.tag)[node] == .simple_var_decl);
- const data = tree.nodes.items(.data)[node];
+ assert(tree.nodeTag(node) == .simple_var_decl);
+ const type_node, const init_node = tree.nodeData(node).opt_node_and_opt_node;
return tree.fullVarDeclComponents(.{
- .type_node = data.lhs,
- .align_node = 0,
- .addrspace_node = 0,
- .section_node = 0,
- .init_node = data.rhs,
- .mut_token = tree.nodes.items(.main_token)[node],
+ .type_node = type_node,
+ .align_node = .none,
+ .addrspace_node = .none,
+ .section_node = .none,
+ .init_node = init_node,
+ .mut_token = tree.nodeMainToken(node),
});
}
pub fn alignedVarDecl(tree: Ast, node: Node.Index) full.VarDecl {
- assert(tree.nodes.items(.tag)[node] == .aligned_var_decl);
- const data = tree.nodes.items(.data)[node];
+ assert(tree.nodeTag(node) == .aligned_var_decl);
+ const align_node, const init_node = tree.nodeData(node).node_and_opt_node;
return tree.fullVarDeclComponents(.{
- .type_node = 0,
- .align_node = data.lhs,
- .addrspace_node = 0,
- .section_node = 0,
- .init_node = data.rhs,
- .mut_token = tree.nodes.items(.main_token)[node],
+ .type_node = .none,
+ .align_node = align_node.toOptional(),
+ .addrspace_node = .none,
+ .section_node = .none,
+ .init_node = init_node,
+ .mut_token = tree.nodeMainToken(node),
});
}
pub fn assignDestructure(tree: Ast, node: Node.Index) full.AssignDestructure {
- const data = tree.nodes.items(.data)[node];
- const variable_count = tree.extra_data[data.lhs];
+ const extra_index, const value_expr = tree.nodeData(node).extra_and_node;
+ const variable_count = tree.extra_data[@intFromEnum(extra_index)];
return tree.fullAssignDestructureComponents(.{
- .variables = tree.extra_data[data.lhs + 1 ..][0..variable_count],
- .equal_token = tree.nodes.items(.main_token)[node],
- .value_expr = data.rhs,
+ .variables = tree.extraDataSliceWithLen(@enumFromInt(@intFromEnum(extra_index) + 1), variable_count, Node.Index),
+ .equal_token = tree.nodeMainToken(node),
+ .value_expr = value_expr,
});
}
pub fn ifSimple(tree: Ast, node: Node.Index) full.If {
- assert(tree.nodes.items(.tag)[node] == .if_simple);
- const data = tree.nodes.items(.data)[node];
+ assert(tree.nodeTag(node) == .if_simple);
+ const cond_expr, const then_expr = tree.nodeData(node).node_and_node;
return tree.fullIfComponents(.{
- .cond_expr = data.lhs,
- .then_expr = data.rhs,
- .else_expr = 0,
- .if_token = tree.nodes.items(.main_token)[node],
+ .cond_expr = cond_expr,
+ .then_expr = then_expr,
+ .else_expr = .none,
+ .if_token = tree.nodeMainToken(node),
});
}
pub fn ifFull(tree: Ast, node: Node.Index) full.If {
- assert(tree.nodes.items(.tag)[node] == .@"if");
- const data = tree.nodes.items(.data)[node];
- const extra = tree.extraData(data.rhs, Node.If);
+ assert(tree.nodeTag(node) == .@"if");
+ const cond_expr, const extra_index = tree.nodeData(node).node_and_extra;
+ const extra = tree.extraData(extra_index, Node.If);
return tree.fullIfComponents(.{
- .cond_expr = data.lhs,
+ .cond_expr = cond_expr,
.then_expr = extra.then_expr,
- .else_expr = extra.else_expr,
- .if_token = tree.nodes.items(.main_token)[node],
+ .else_expr = extra.else_expr.toOptional(),
+ .if_token = tree.nodeMainToken(node),
});
}
pub fn containerField(tree: Ast, node: Node.Index) full.ContainerField {
- assert(tree.nodes.items(.tag)[node] == .container_field);
- const data = tree.nodes.items(.data)[node];
- const extra = tree.extraData(data.rhs, Node.ContainerField);
- const main_token = tree.nodes.items(.main_token)[node];
+ assert(tree.nodeTag(node) == .container_field);
+ const type_expr, const extra_index = tree.nodeData(node).node_and_extra;
+ const extra = tree.extraData(extra_index, Node.ContainerField);
+ const main_token = tree.nodeMainToken(node);
return tree.fullContainerFieldComponents(.{
.main_token = main_token,
- .type_expr = data.lhs,
- .align_expr = extra.align_expr,
- .value_expr = extra.value_expr,
- .tuple_like = tree.tokens.items(.tag)[main_token] != .identifier or
- tree.tokens.items(.tag)[main_token + 1] != .colon,
+ .type_expr = type_expr.toOptional(),
+ .align_expr = extra.align_expr.toOptional(),
+ .value_expr = extra.value_expr.toOptional(),
+ .tuple_like = tree.tokenTag(main_token) != .identifier or
+ tree.tokenTag(main_token + 1) != .colon,
});
}
pub fn containerFieldInit(tree: Ast, node: Node.Index) full.ContainerField {
- assert(tree.nodes.items(.tag)[node] == .container_field_init);
- const data = tree.nodes.items(.data)[node];
- const main_token = tree.nodes.items(.main_token)[node];
+ assert(tree.nodeTag(node) == .container_field_init);
+ const type_expr, const value_expr = tree.nodeData(node).node_and_opt_node;
+ const main_token = tree.nodeMainToken(node);
return tree.fullContainerFieldComponents(.{
.main_token = main_token,
- .type_expr = data.lhs,
- .align_expr = 0,
- .value_expr = data.rhs,
- .tuple_like = tree.tokens.items(.tag)[main_token] != .identifier or
- tree.tokens.items(.tag)[main_token + 1] != .colon,
+ .type_expr = type_expr.toOptional(),
+ .align_expr = .none,
+ .value_expr = value_expr,
+ .tuple_like = tree.tokenTag(main_token) != .identifier or
+ tree.tokenTag(main_token + 1) != .colon,
});
}
pub fn containerFieldAlign(tree: Ast, node: Node.Index) full.ContainerField {
- assert(tree.nodes.items(.tag)[node] == .container_field_align);
- const data = tree.nodes.items(.data)[node];
- const main_token = tree.nodes.items(.main_token)[node];
+ assert(tree.nodeTag(node) == .container_field_align);
+ const type_expr, const align_expr = tree.nodeData(node).node_and_node;
+ const main_token = tree.nodeMainToken(node);
return tree.fullContainerFieldComponents(.{
.main_token = main_token,
- .type_expr = data.lhs,
- .align_expr = data.rhs,
- .value_expr = 0,
- .tuple_like = tree.tokens.items(.tag)[main_token] != .identifier or
- tree.tokens.items(.tag)[main_token + 1] != .colon,
+ .type_expr = type_expr.toOptional(),
+ .align_expr = align_expr.toOptional(),
+ .value_expr = .none,
+ .tuple_like = tree.tokenTag(main_token) != .identifier or
+ tree.tokenTag(main_token + 1) != .colon,
});
}
pub fn fnProtoSimple(tree: Ast, buffer: *[1]Node.Index, node: Node.Index) full.FnProto {
- assert(tree.nodes.items(.tag)[node] == .fn_proto_simple);
- const data = tree.nodes.items(.data)[node];
- buffer[0] = data.lhs;
- const params = if (data.lhs == 0) buffer[0..0] else buffer[0..1];
+ assert(tree.nodeTag(node) == .fn_proto_simple);
+ const first_param, const return_type = tree.nodeData(node).opt_node_and_opt_node;
+ const params = loadOptionalNodesIntoBuffer(1, buffer, .{first_param});
return tree.fullFnProtoComponents(.{
.proto_node = node,
- .fn_token = tree.nodes.items(.main_token)[node],
- .return_type = data.rhs,
+ .fn_token = tree.nodeMainToken(node),
+ .return_type = return_type,
.params = params,
- .align_expr = 0,
- .addrspace_expr = 0,
- .section_expr = 0,
- .callconv_expr = 0,
+ .align_expr = .none,
+ .addrspace_expr = .none,
+ .section_expr = .none,
+ .callconv_expr = .none,
});
}
pub fn fnProtoMulti(tree: Ast, node: Node.Index) full.FnProto {
- assert(tree.nodes.items(.tag)[node] == .fn_proto_multi);
- const data = tree.nodes.items(.data)[node];
- const params_range = tree.extraData(data.lhs, Node.SubRange);
- const params = tree.extra_data[params_range.start..params_range.end];
+ assert(tree.nodeTag(node) == .fn_proto_multi);
+ const extra_index, const return_type = tree.nodeData(node).extra_and_opt_node;
+ const params = tree.extraDataSlice(tree.extraData(extra_index, Node.SubRange), Node.Index);
return tree.fullFnProtoComponents(.{
.proto_node = node,
- .fn_token = tree.nodes.items(.main_token)[node],
- .return_type = data.rhs,
+ .fn_token = tree.nodeMainToken(node),
+ .return_type = return_type,
.params = params,
- .align_expr = 0,
- .addrspace_expr = 0,
- .section_expr = 0,
- .callconv_expr = 0,
+ .align_expr = .none,
+ .addrspace_expr = .none,
+ .section_expr = .none,
+ .callconv_expr = .none,
});
}
pub fn fnProtoOne(tree: Ast, buffer: *[1]Node.Index, node: Node.Index) full.FnProto {
- assert(tree.nodes.items(.tag)[node] == .fn_proto_one);
- const data = tree.nodes.items(.data)[node];
- const extra = tree.extraData(data.lhs, Node.FnProtoOne);
- buffer[0] = extra.param;
- const params = if (extra.param == 0) buffer[0..0] else buffer[0..1];
+ assert(tree.nodeTag(node) == .fn_proto_one);
+ const extra_index, const return_type = tree.nodeData(node).extra_and_opt_node;
+ const extra = tree.extraData(extra_index, Node.FnProtoOne);
+ const params = loadOptionalNodesIntoBuffer(1, buffer, .{extra.param});
return tree.fullFnProtoComponents(.{
.proto_node = node,
- .fn_token = tree.nodes.items(.main_token)[node],
- .return_type = data.rhs,
+ .fn_token = tree.nodeMainToken(node),
+ .return_type = return_type,
.params = params,
.align_expr = extra.align_expr,
.addrspace_expr = extra.addrspace_expr,
@@ -1413,14 +1518,14 @@ pub fn fnProtoOne(tree: Ast, buffer: *[1]Node.Index, node: Node.Index) full.FnPr
}
pub fn fnProto(tree: Ast, node: Node.Index) full.FnProto {
- assert(tree.nodes.items(.tag)[node] == .fn_proto);
- const data = tree.nodes.items(.data)[node];
- const extra = tree.extraData(data.lhs, Node.FnProto);
- const params = tree.extra_data[extra.params_start..extra.params_end];
+ assert(tree.nodeTag(node) == .fn_proto);
+ const extra_index, const return_type = tree.nodeData(node).extra_and_opt_node;
+ const extra = tree.extraData(extra_index, Node.FnProto);
+ const params = tree.extraDataSlice(.{ .start = extra.params_start, .end = extra.params_end }, Node.Index);
return tree.fullFnProtoComponents(.{
.proto_node = node,
- .fn_token = tree.nodes.items(.main_token)[node],
- .return_type = data.rhs,
+ .fn_token = tree.nodeMainToken(node),
+ .return_type = return_type,
.params = params,
.align_expr = extra.align_expr,
.addrspace_expr = extra.addrspace_expr,
@@ -1430,300 +1535,275 @@ pub fn fnProto(tree: Ast, node: Node.Index) full.FnProto {
}
pub fn structInitOne(tree: Ast, buffer: *[1]Node.Index, node: Node.Index) full.StructInit {
- assert(tree.nodes.items(.tag)[node] == .struct_init_one or
- tree.nodes.items(.tag)[node] == .struct_init_one_comma);
- const data = tree.nodes.items(.data)[node];
- buffer[0] = data.rhs;
- const fields = if (data.rhs == 0) buffer[0..0] else buffer[0..1];
+ assert(tree.nodeTag(node) == .struct_init_one or
+ tree.nodeTag(node) == .struct_init_one_comma);
+ const type_expr, const first_field = tree.nodeData(node).node_and_opt_node;
+ const fields = loadOptionalNodesIntoBuffer(1, buffer, .{first_field});
return .{
.ast = .{
- .lbrace = tree.nodes.items(.main_token)[node],
+ .lbrace = tree.nodeMainToken(node),
.fields = fields,
- .type_expr = data.lhs,
+ .type_expr = type_expr.toOptional(),
},
};
}
pub fn structInitDotTwo(tree: Ast, buffer: *[2]Node.Index, node: Node.Index) full.StructInit {
- assert(tree.nodes.items(.tag)[node] == .struct_init_dot_two or
- tree.nodes.items(.tag)[node] == .struct_init_dot_two_comma);
- const data = tree.nodes.items(.data)[node];
- buffer.* = .{ data.lhs, data.rhs };
- const fields = if (data.rhs != 0)
- buffer[0..2]
- else if (data.lhs != 0)
- buffer[0..1]
- else
- buffer[0..0];
+ assert(tree.nodeTag(node) == .struct_init_dot_two or
+ tree.nodeTag(node) == .struct_init_dot_two_comma);
+ const fields = loadOptionalNodesIntoBuffer(2, buffer, tree.nodeData(node).opt_node_and_opt_node);
return .{
.ast = .{
- .lbrace = tree.nodes.items(.main_token)[node],
+ .lbrace = tree.nodeMainToken(node),
.fields = fields,
- .type_expr = 0,
+ .type_expr = .none,
},
};
}
pub fn structInitDot(tree: Ast, node: Node.Index) full.StructInit {
- assert(tree.nodes.items(.tag)[node] == .struct_init_dot or
- tree.nodes.items(.tag)[node] == .struct_init_dot_comma);
- const data = tree.nodes.items(.data)[node];
+ assert(tree.nodeTag(node) == .struct_init_dot or
+ tree.nodeTag(node) == .struct_init_dot_comma);
+ const fields = tree.extraDataSlice(tree.nodeData(node).extra_range, Node.Index);
return .{
.ast = .{
- .lbrace = tree.nodes.items(.main_token)[node],
- .fields = tree.extra_data[data.lhs..data.rhs],
- .type_expr = 0,
+ .lbrace = tree.nodeMainToken(node),
+ .fields = fields,
+ .type_expr = .none,
},
};
}
pub fn structInit(tree: Ast, node: Node.Index) full.StructInit {
- assert(tree.nodes.items(.tag)[node] == .struct_init or
- tree.nodes.items(.tag)[node] == .struct_init_comma);
- const data = tree.nodes.items(.data)[node];
- const fields_range = tree.extraData(data.rhs, Node.SubRange);
+ assert(tree.nodeTag(node) == .struct_init or
+ tree.nodeTag(node) == .struct_init_comma);
+ const type_expr, const extra_index = tree.nodeData(node).node_and_extra;
+ const fields = tree.extraDataSlice(tree.extraData(extra_index, Node.SubRange), Node.Index);
return .{
.ast = .{
- .lbrace = tree.nodes.items(.main_token)[node],
- .fields = tree.extra_data[fields_range.start..fields_range.end],
- .type_expr = data.lhs,
+ .lbrace = tree.nodeMainToken(node),
+ .fields = fields,
+ .type_expr = type_expr.toOptional(),
},
};
}
pub fn arrayInitOne(tree: Ast, buffer: *[1]Node.Index, node: Node.Index) full.ArrayInit {
- assert(tree.nodes.items(.tag)[node] == .array_init_one or
- tree.nodes.items(.tag)[node] == .array_init_one_comma);
- const data = tree.nodes.items(.data)[node];
- buffer[0] = data.rhs;
- const elements = if (data.rhs == 0) buffer[0..0] else buffer[0..1];
+ assert(tree.nodeTag(node) == .array_init_one or
+ tree.nodeTag(node) == .array_init_one_comma);
+ const type_expr, buffer[0] = tree.nodeData(node).node_and_node;
return .{
.ast = .{
- .lbrace = tree.nodes.items(.main_token)[node],
- .elements = elements,
- .type_expr = data.lhs,
+ .lbrace = tree.nodeMainToken(node),
+ .elements = buffer[0..1],
+ .type_expr = type_expr.toOptional(),
},
};
}
pub fn arrayInitDotTwo(tree: Ast, buffer: *[2]Node.Index, node: Node.Index) full.ArrayInit {
- assert(tree.nodes.items(.tag)[node] == .array_init_dot_two or
- tree.nodes.items(.tag)[node] == .array_init_dot_two_comma);
- const data = tree.nodes.items(.data)[node];
- buffer.* = .{ data.lhs, data.rhs };
- const elements = if (data.rhs != 0)
- buffer[0..2]
- else if (data.lhs != 0)
- buffer[0..1]
- else
- buffer[0..0];
+ assert(tree.nodeTag(node) == .array_init_dot_two or
+ tree.nodeTag(node) == .array_init_dot_two_comma);
+ const elements = loadOptionalNodesIntoBuffer(2, buffer, tree.nodeData(node).opt_node_and_opt_node);
return .{
.ast = .{
- .lbrace = tree.nodes.items(.main_token)[node],
+ .lbrace = tree.nodeMainToken(node),
.elements = elements,
- .type_expr = 0,
+ .type_expr = .none,
},
};
}
pub fn arrayInitDot(tree: Ast, node: Node.Index) full.ArrayInit {
- assert(tree.nodes.items(.tag)[node] == .array_init_dot or
- tree.nodes.items(.tag)[node] == .array_init_dot_comma);
- const data = tree.nodes.items(.data)[node];
+ assert(tree.nodeTag(node) == .array_init_dot or
+ tree.nodeTag(node) == .array_init_dot_comma);
+ const elements = tree.extraDataSlice(tree.nodeData(node).extra_range, Node.Index);
return .{
.ast = .{
- .lbrace = tree.nodes.items(.main_token)[node],
- .elements = tree.extra_data[data.lhs..data.rhs],
- .type_expr = 0,
+ .lbrace = tree.nodeMainToken(node),
+ .elements = elements,
+ .type_expr = .none,
},
};
}
pub fn arrayInit(tree: Ast, node: Node.Index) full.ArrayInit {
- assert(tree.nodes.items(.tag)[node] == .array_init or
- tree.nodes.items(.tag)[node] == .array_init_comma);
- const data = tree.nodes.items(.data)[node];
- const elem_range = tree.extraData(data.rhs, Node.SubRange);
+ assert(tree.nodeTag(node) == .array_init or
+ tree.nodeTag(node) == .array_init_comma);
+ const type_expr, const extra_index = tree.nodeData(node).node_and_extra;
+ const elements = tree.extraDataSlice(tree.extraData(extra_index, Node.SubRange), Node.Index);
return .{
.ast = .{
- .lbrace = tree.nodes.items(.main_token)[node],
- .elements = tree.extra_data[elem_range.start..elem_range.end],
- .type_expr = data.lhs,
+ .lbrace = tree.nodeMainToken(node),
+ .elements = elements,
+ .type_expr = type_expr.toOptional(),
},
};
}
pub fn arrayType(tree: Ast, node: Node.Index) full.ArrayType {
- assert(tree.nodes.items(.tag)[node] == .array_type);
- const data = tree.nodes.items(.data)[node];
+ assert(tree.nodeTag(node) == .array_type);
+ const elem_count, const elem_type = tree.nodeData(node).node_and_node;
return .{
.ast = .{
- .lbracket = tree.nodes.items(.main_token)[node],
- .elem_count = data.lhs,
- .sentinel = 0,
- .elem_type = data.rhs,
+ .lbracket = tree.nodeMainToken(node),
+ .elem_count = elem_count,
+ .sentinel = .none,
+ .elem_type = elem_type,
},
};
}
pub fn arrayTypeSentinel(tree: Ast, node: Node.Index) full.ArrayType {
- assert(tree.nodes.items(.tag)[node] == .array_type_sentinel);
- const data = tree.nodes.items(.data)[node];
- const extra = tree.extraData(data.rhs, Node.ArrayTypeSentinel);
- assert(extra.sentinel != 0);
+ assert(tree.nodeTag(node) == .array_type_sentinel);
+ const elem_count, const extra_index = tree.nodeData(node).node_and_extra;
+ const extra = tree.extraData(extra_index, Node.ArrayTypeSentinel);
return .{
.ast = .{
- .lbracket = tree.nodes.items(.main_token)[node],
- .elem_count = data.lhs,
- .sentinel = extra.sentinel,
+ .lbracket = tree.nodeMainToken(node),
+ .elem_count = elem_count,
+ .sentinel = extra.sentinel.toOptional(),
.elem_type = extra.elem_type,
},
};
}
pub fn ptrTypeAligned(tree: Ast, node: Node.Index) full.PtrType {
- assert(tree.nodes.items(.tag)[node] == .ptr_type_aligned);
- const data = tree.nodes.items(.data)[node];
+ assert(tree.nodeTag(node) == .ptr_type_aligned);
+ const align_node, const child_type = tree.nodeData(node).opt_node_and_node;
return tree.fullPtrTypeComponents(.{
- .main_token = tree.nodes.items(.main_token)[node],
- .align_node = data.lhs,
- .addrspace_node = 0,
- .sentinel = 0,
- .bit_range_start = 0,
- .bit_range_end = 0,
- .child_type = data.rhs,
+ .main_token = tree.nodeMainToken(node),
+ .align_node = align_node,
+ .addrspace_node = .none,
+ .sentinel = .none,
+ .bit_range_start = .none,
+ .bit_range_end = .none,
+ .child_type = child_type,
});
}
pub fn ptrTypeSentinel(tree: Ast, node: Node.Index) full.PtrType {
- assert(tree.nodes.items(.tag)[node] == .ptr_type_sentinel);
- const data = tree.nodes.items(.data)[node];
+ assert(tree.nodeTag(node) == .ptr_type_sentinel);
+ const sentinel, const child_type = tree.nodeData(node).opt_node_and_node;
return tree.fullPtrTypeComponents(.{
- .main_token = tree.nodes.items(.main_token)[node],
- .align_node = 0,
- .addrspace_node = 0,
- .sentinel = data.lhs,
- .bit_range_start = 0,
- .bit_range_end = 0,
- .child_type = data.rhs,
+ .main_token = tree.nodeMainToken(node),
+ .align_node = .none,
+ .addrspace_node = .none,
+ .sentinel = sentinel,
+ .bit_range_start = .none,
+ .bit_range_end = .none,
+ .child_type = child_type,
});
}
pub fn ptrType(tree: Ast, node: Node.Index) full.PtrType {
- assert(tree.nodes.items(.tag)[node] == .ptr_type);
- const data = tree.nodes.items(.data)[node];
- const extra = tree.extraData(data.lhs, Node.PtrType);
+ assert(tree.nodeTag(node) == .ptr_type);
+ const extra_index, const child_type = tree.nodeData(node).extra_and_node;
+ const extra = tree.extraData(extra_index, Node.PtrType);
return tree.fullPtrTypeComponents(.{
- .main_token = tree.nodes.items(.main_token)[node],
+ .main_token = tree.nodeMainToken(node),
.align_node = extra.align_node,
.addrspace_node = extra.addrspace_node,
.sentinel = extra.sentinel,
- .bit_range_start = 0,
- .bit_range_end = 0,
- .child_type = data.rhs,
+ .bit_range_start = .none,
+ .bit_range_end = .none,
+ .child_type = child_type,
});
}
pub fn ptrTypeBitRange(tree: Ast, node: Node.Index) full.PtrType {
- assert(tree.nodes.items(.tag)[node] == .ptr_type_bit_range);
- const data = tree.nodes.items(.data)[node];
- const extra = tree.extraData(data.lhs, Node.PtrTypeBitRange);
+ assert(tree.nodeTag(node) == .ptr_type_bit_range);
+ const extra_index, const child_type = tree.nodeData(node).extra_and_node;
+ const extra = tree.extraData(extra_index, Node.PtrTypeBitRange);
return tree.fullPtrTypeComponents(.{
- .main_token = tree.nodes.items(.main_token)[node],
- .align_node = extra.align_node,
+ .main_token = tree.nodeMainToken(node),
+ .align_node = extra.align_node.toOptional(),
.addrspace_node = extra.addrspace_node,
.sentinel = extra.sentinel,
- .bit_range_start = extra.bit_range_start,
- .bit_range_end = extra.bit_range_end,
- .child_type = data.rhs,
+ .bit_range_start = extra.bit_range_start.toOptional(),
+ .bit_range_end = extra.bit_range_end.toOptional(),
+ .child_type = child_type,
});
}
pub fn sliceOpen(tree: Ast, node: Node.Index) full.Slice {
- assert(tree.nodes.items(.tag)[node] == .slice_open);
- const data = tree.nodes.items(.data)[node];
+ assert(tree.nodeTag(node) == .slice_open);
+ const sliced, const start = tree.nodeData(node).node_and_node;
return .{
.ast = .{
- .sliced = data.lhs,
- .lbracket = tree.nodes.items(.main_token)[node],
- .start = data.rhs,
- .end = 0,
- .sentinel = 0,
+ .sliced = sliced,
+ .lbracket = tree.nodeMainToken(node),
+ .start = start,
+ .end = .none,
+ .sentinel = .none,
},
};
}
pub fn slice(tree: Ast, node: Node.Index) full.Slice {
- assert(tree.nodes.items(.tag)[node] == .slice);
- const data = tree.nodes.items(.data)[node];
- const extra = tree.extraData(data.rhs, Node.Slice);
+ assert(tree.nodeTag(node) == .slice);
+ const sliced, const extra_index = tree.nodeData(node).node_and_extra;
+ const extra = tree.extraData(extra_index, Node.Slice);
return .{
.ast = .{
- .sliced = data.lhs,
- .lbracket = tree.nodes.items(.main_token)[node],
+ .sliced = sliced,
+ .lbracket = tree.nodeMainToken(node),
.start = extra.start,
- .end = extra.end,
- .sentinel = 0,
+ .end = extra.end.toOptional(),
+ .sentinel = .none,
},
};
}
pub fn sliceSentinel(tree: Ast, node: Node.Index) full.Slice {
- assert(tree.nodes.items(.tag)[node] == .slice_sentinel);
- const data = tree.nodes.items(.data)[node];
- const extra = tree.extraData(data.rhs, Node.SliceSentinel);
+ assert(tree.nodeTag(node) == .slice_sentinel);
+ const sliced, const extra_index = tree.nodeData(node).node_and_extra;
+ const extra = tree.extraData(extra_index, Node.SliceSentinel);
return .{
.ast = .{
- .sliced = data.lhs,
- .lbracket = tree.nodes.items(.main_token)[node],
+ .sliced = sliced,
+ .lbracket = tree.nodeMainToken(node),
.start = extra.start,
.end = extra.end,
- .sentinel = extra.sentinel,
+ .sentinel = extra.sentinel.toOptional(),
},
};
}
pub fn containerDeclTwo(tree: Ast, buffer: *[2]Node.Index, node: Node.Index) full.ContainerDecl {
- assert(tree.nodes.items(.tag)[node] == .container_decl_two or
- tree.nodes.items(.tag)[node] == .container_decl_two_trailing);
- const data = tree.nodes.items(.data)[node];
- buffer.* = .{ data.lhs, data.rhs };
- const members = if (data.rhs != 0)
- buffer[0..2]
- else if (data.lhs != 0)
- buffer[0..1]
- else
- buffer[0..0];
+ assert(tree.nodeTag(node) == .container_decl_two or
+ tree.nodeTag(node) == .container_decl_two_trailing);
+ const members = loadOptionalNodesIntoBuffer(2, buffer, tree.nodeData(node).opt_node_and_opt_node);
return tree.fullContainerDeclComponents(.{
- .main_token = tree.nodes.items(.main_token)[node],
+ .main_token = tree.nodeMainToken(node),
.enum_token = null,
.members = members,
- .arg = 0,
+ .arg = .none,
});
}
pub fn containerDecl(tree: Ast, node: Node.Index) full.ContainerDecl {
- assert(tree.nodes.items(.tag)[node] == .container_decl or
- tree.nodes.items(.tag)[node] == .container_decl_trailing);
- const data = tree.nodes.items(.data)[node];
+ assert(tree.nodeTag(node) == .container_decl or
+ tree.nodeTag(node) == .container_decl_trailing);
+ const members = tree.extraDataSlice(tree.nodeData(node).extra_range, Node.Index);
return tree.fullContainerDeclComponents(.{
- .main_token = tree.nodes.items(.main_token)[node],
+ .main_token = tree.nodeMainToken(node),
.enum_token = null,
- .members = tree.extra_data[data.lhs..data.rhs],
- .arg = 0,
+ .members = members,
+ .arg = .none,
});
}
pub fn containerDeclArg(tree: Ast, node: Node.Index) full.ContainerDecl {
- assert(tree.nodes.items(.tag)[node] == .container_decl_arg or
- tree.nodes.items(.tag)[node] == .container_decl_arg_trailing);
- const data = tree.nodes.items(.data)[node];
- const members_range = tree.extraData(data.rhs, Node.SubRange);
+ assert(tree.nodeTag(node) == .container_decl_arg or
+ tree.nodeTag(node) == .container_decl_arg_trailing);
+ const arg, const extra_index = tree.nodeData(node).node_and_extra;
+ const members = tree.extraDataSlice(tree.extraData(extra_index, Node.SubRange), Node.Index);
return tree.fullContainerDeclComponents(.{
- .main_token = tree.nodes.items(.main_token)[node],
+ .main_token = tree.nodeMainToken(node),
.enum_token = null,
- .members = tree.extra_data[members_range.start..members_range.end],
- .arg = data.lhs,
+ .members = members,
+ .arg = arg.toOptional(),
});
}
@@ -1731,175 +1811,170 @@ pub fn containerDeclRoot(tree: Ast) full.ContainerDecl {
return .{
.layout_token = null,
.ast = .{
- .main_token = undefined,
+ .main_token = 0,
.enum_token = null,
.members = tree.rootDecls(),
- .arg = 0,
+ .arg = .none,
},
};
}
pub fn taggedUnionTwo(tree: Ast, buffer: *[2]Node.Index, node: Node.Index) full.ContainerDecl {
- assert(tree.nodes.items(.tag)[node] == .tagged_union_two or
- tree.nodes.items(.tag)[node] == .tagged_union_two_trailing);
- const data = tree.nodes.items(.data)[node];
- buffer.* = .{ data.lhs, data.rhs };
- const members = if (data.rhs != 0)
- buffer[0..2]
- else if (data.lhs != 0)
- buffer[0..1]
- else
- buffer[0..0];
- const main_token = tree.nodes.items(.main_token)[node];
+ assert(tree.nodeTag(node) == .tagged_union_two or
+ tree.nodeTag(node) == .tagged_union_two_trailing);
+ const members = loadOptionalNodesIntoBuffer(2, buffer, tree.nodeData(node).opt_node_and_opt_node);
+ const main_token = tree.nodeMainToken(node);
return tree.fullContainerDeclComponents(.{
.main_token = main_token,
.enum_token = main_token + 2, // union lparen enum
.members = members,
- .arg = 0,
+ .arg = .none,
});
}
pub fn taggedUnion(tree: Ast, node: Node.Index) full.ContainerDecl {
- assert(tree.nodes.items(.tag)[node] == .tagged_union or
- tree.nodes.items(.tag)[node] == .tagged_union_trailing);
- const data = tree.nodes.items(.data)[node];
- const main_token = tree.nodes.items(.main_token)[node];
+ assert(tree.nodeTag(node) == .tagged_union or
+ tree.nodeTag(node) == .tagged_union_trailing);
+ const members = tree.extraDataSlice(tree.nodeData(node).extra_range, Node.Index);
+ const main_token = tree.nodeMainToken(node);
return tree.fullContainerDeclComponents(.{
.main_token = main_token,
.enum_token = main_token + 2, // union lparen enum
- .members = tree.extra_data[data.lhs..data.rhs],
- .arg = 0,
+ .members = members,
+ .arg = .none,
});
}
pub fn taggedUnionEnumTag(tree: Ast, node: Node.Index) full.ContainerDecl {
- assert(tree.nodes.items(.tag)[node] == .tagged_union_enum_tag or
- tree.nodes.items(.tag)[node] == .tagged_union_enum_tag_trailing);
- const data = tree.nodes.items(.data)[node];
- const members_range = tree.extraData(data.rhs, Node.SubRange);
- const main_token = tree.nodes.items(.main_token)[node];
+ assert(tree.nodeTag(node) == .tagged_union_enum_tag or
+ tree.nodeTag(node) == .tagged_union_enum_tag_trailing);
+ const arg, const extra_index = tree.nodeData(node).node_and_extra;
+ const members = tree.extraDataSlice(tree.extraData(extra_index, Node.SubRange), Node.Index);
+ const main_token = tree.nodeMainToken(node);
return tree.fullContainerDeclComponents(.{
.main_token = main_token,
.enum_token = main_token + 2, // union lparen enum
- .members = tree.extra_data[members_range.start..members_range.end],
- .arg = data.lhs,
+ .members = members,
+ .arg = arg.toOptional(),
});
}
pub fn switchFull(tree: Ast, node: Node.Index) full.Switch {
- const data = &tree.nodes.items(.data)[node];
- const main_token = tree.nodes.items(.main_token)[node];
- const switch_token: TokenIndex, const label_token: ?TokenIndex = switch (tree.tokens.items(.tag)[main_token]) {
+ const main_token = tree.nodeMainToken(node);
+ const switch_token: TokenIndex, const label_token: ?TokenIndex = switch (tree.tokenTag(main_token)) {
.identifier => .{ main_token + 2, main_token },
.keyword_switch => .{ main_token, null },
else => unreachable,
};
- const extra = tree.extraData(data.rhs, Ast.Node.SubRange);
+ const condition, const extra_index = tree.nodeData(node).node_and_extra;
+ const cases = tree.extraDataSlice(tree.extraData(extra_index, Ast.Node.SubRange), Node.Index);
return .{
.ast = .{
.switch_token = switch_token,
- .condition = data.lhs,
- .cases = tree.extra_data[extra.start..extra.end],
+ .condition = condition,
+ .cases = cases,
},
.label_token = label_token,
};
}
pub fn switchCaseOne(tree: Ast, node: Node.Index) full.SwitchCase {
- const data = &tree.nodes.items(.data)[node];
- const values: *[1]Node.Index = &data.lhs;
+ const first_value, const target_expr = tree.nodeData(node).opt_node_and_node;
return tree.fullSwitchCaseComponents(.{
- .values = if (data.lhs == 0) values[0..0] else values[0..1],
- .arrow_token = tree.nodes.items(.main_token)[node],
- .target_expr = data.rhs,
+ .values = if (first_value == .none)
+ &.{}
+ else
+ // Ensure that the returned slice points into the existing memory of the Ast
+ (@as(*const Node.Index, @ptrCast(&tree.nodes.items(.data)[@intFromEnum(node)].opt_node_and_node[0])))[0..1],
+ .arrow_token = tree.nodeMainToken(node),
+ .target_expr = target_expr,
}, node);
}
pub fn switchCase(tree: Ast, node: Node.Index) full.SwitchCase {
- const data = tree.nodes.items(.data)[node];
- const extra = tree.extraData(data.lhs, Node.SubRange);
+ const extra_index, const target_expr = tree.nodeData(node).extra_and_node;
+ const values = tree.extraDataSlice(tree.extraData(extra_index, Node.SubRange), Node.Index);
return tree.fullSwitchCaseComponents(.{
- .values = tree.extra_data[extra.start..extra.end],
- .arrow_token = tree.nodes.items(.main_token)[node],
- .target_expr = data.rhs,
+ .values = values,
+ .arrow_token = tree.nodeMainToken(node),
+ .target_expr = target_expr,
}, node);
}
pub fn asmSimple(tree: Ast, node: Node.Index) full.Asm {
- const data = tree.nodes.items(.data)[node];
+ const template, const rparen = tree.nodeData(node).node_and_token;
return tree.fullAsmComponents(.{
- .asm_token = tree.nodes.items(.main_token)[node],
- .template = data.lhs,
+ .asm_token = tree.nodeMainToken(node),
+ .template = template,
.items = &.{},
- .rparen = data.rhs,
+ .rparen = rparen,
});
}
pub fn asmFull(tree: Ast, node: Node.Index) full.Asm {
- const data = tree.nodes.items(.data)[node];
- const extra = tree.extraData(data.rhs, Node.Asm);
+ const template, const extra_index = tree.nodeData(node).node_and_extra;
+ const extra = tree.extraData(extra_index, Node.Asm);
+ const items = tree.extraDataSlice(.{ .start = extra.items_start, .end = extra.items_end }, Node.Index);
return tree.fullAsmComponents(.{
- .asm_token = tree.nodes.items(.main_token)[node],
- .template = data.lhs,
- .items = tree.extra_data[extra.items_start..extra.items_end],
+ .asm_token = tree.nodeMainToken(node),
+ .template = template,
+ .items = items,
.rparen = extra.rparen,
});
}
pub fn whileSimple(tree: Ast, node: Node.Index) full.While {
- const data = tree.nodes.items(.data)[node];
+ const cond_expr, const then_expr = tree.nodeData(node).node_and_node;
return tree.fullWhileComponents(.{
- .while_token = tree.nodes.items(.main_token)[node],
- .cond_expr = data.lhs,
- .cont_expr = 0,
- .then_expr = data.rhs,
- .else_expr = 0,
+ .while_token = tree.nodeMainToken(node),
+ .cond_expr = cond_expr,
+ .cont_expr = .none,
+ .then_expr = then_expr,
+ .else_expr = .none,
});
}
pub fn whileCont(tree: Ast, node: Node.Index) full.While {
- const data = tree.nodes.items(.data)[node];
- const extra = tree.extraData(data.rhs, Node.WhileCont);
+ const cond_expr, const extra_index = tree.nodeData(node).node_and_extra;
+ const extra = tree.extraData(extra_index, Node.WhileCont);
return tree.fullWhileComponents(.{
- .while_token = tree.nodes.items(.main_token)[node],
- .cond_expr = data.lhs,
- .cont_expr = extra.cont_expr,
+ .while_token = tree.nodeMainToken(node),
+ .cond_expr = cond_expr,
+ .cont_expr = extra.cont_expr.toOptional(),
.then_expr = extra.then_expr,
- .else_expr = 0,
+ .else_expr = .none,
});
}
pub fn whileFull(tree: Ast, node: Node.Index) full.While {
- const data = tree.nodes.items(.data)[node];
- const extra = tree.extraData(data.rhs, Node.While);
+ const cond_expr, const extra_index = tree.nodeData(node).node_and_extra;
+ const extra = tree.extraData(extra_index, Node.While);
return tree.fullWhileComponents(.{
- .while_token = tree.nodes.items(.main_token)[node],
- .cond_expr = data.lhs,
+ .while_token = tree.nodeMainToken(node),
+ .cond_expr = cond_expr,
.cont_expr = extra.cont_expr,
.then_expr = extra.then_expr,
- .else_expr = extra.else_expr,
+ .else_expr = extra.else_expr.toOptional(),
});
}
pub fn forSimple(tree: Ast, node: Node.Index) full.For {
- const data = &tree.nodes.items(.data)[node];
- const inputs: *[1]Node.Index = &data.lhs;
+ const data = &tree.nodes.items(.data)[@intFromEnum(node)].node_and_node;
return tree.fullForComponents(.{
- .for_token = tree.nodes.items(.main_token)[node],
- .inputs = inputs[0..1],
- .then_expr = data.rhs,
- .else_expr = 0,
+ .for_token = tree.nodeMainToken(node),
+ .inputs = (&data[0])[0..1],
+ .then_expr = data[1],
+ .else_expr = .none,
});
}
pub fn forFull(tree: Ast, node: Node.Index) full.For {
- const data = tree.nodes.items(.data)[node];
- const extra = @as(Node.For, @bitCast(data.rhs));
- const inputs = tree.extra_data[data.lhs..][0..extra.inputs];
- const then_expr = tree.extra_data[data.lhs + extra.inputs];
- const else_expr = if (extra.has_else) tree.extra_data[data.lhs + extra.inputs + 1] else 0;
+ const extra_index, const extra = tree.nodeData(node).@"for";
+ const inputs = tree.extraDataSliceWithLen(extra_index, extra.inputs, Node.Index);
+ const then_expr: Node.Index = @enumFromInt(tree.extra_data[@intFromEnum(extra_index) + extra.inputs]);
+ const else_expr: Node.OptionalIndex = if (extra.has_else) @enumFromInt(tree.extra_data[@intFromEnum(extra_index) + extra.inputs + 1]) else .none;
return tree.fullForComponents(.{
- .for_token = tree.nodes.items(.main_token)[node],
+ .for_token = tree.nodeMainToken(node),
.inputs = inputs,
.then_expr = then_expr,
.else_expr = else_expr,
@@ -1907,28 +1982,26 @@ pub fn forFull(tree: Ast, node: Node.Index) full.For {
}
pub fn callOne(tree: Ast, buffer: *[1]Node.Index, node: Node.Index) full.Call {
- const data = tree.nodes.items(.data)[node];
- buffer.* = .{data.rhs};
- const params = if (data.rhs != 0) buffer[0..1] else buffer[0..0];
+ const fn_expr, const first_param = tree.nodeData(node).node_and_opt_node;
+ const params = loadOptionalNodesIntoBuffer(1, buffer, .{first_param});
return tree.fullCallComponents(.{
- .lparen = tree.nodes.items(.main_token)[node],
- .fn_expr = data.lhs,
+ .lparen = tree.nodeMainToken(node),
+ .fn_expr = fn_expr,
.params = params,
});
}
pub fn callFull(tree: Ast, node: Node.Index) full.Call {
- const data = tree.nodes.items(.data)[node];
- const extra = tree.extraData(data.rhs, Node.SubRange);
+ const fn_expr, const extra_index = tree.nodeData(node).node_and_extra;
+ const params = tree.extraDataSlice(tree.extraData(extra_index, Node.SubRange), Node.Index);
return tree.fullCallComponents(.{
- .lparen = tree.nodes.items(.main_token)[node],
- .fn_expr = data.lhs,
- .params = tree.extra_data[extra.start..extra.end],
+ .lparen = tree.nodeMainToken(node),
+ .fn_expr = fn_expr,
+ .params = params,
});
}
fn fullVarDeclComponents(tree: Ast, info: full.VarDecl.Components) full.VarDecl {
- const token_tags = tree.tokens.items(.tag);
var result: full.VarDecl = .{
.ast = info,
.visib_token = null,
@@ -1940,7 +2013,7 @@ fn fullVarDeclComponents(tree: Ast, info: full.VarDecl.Components) full.VarDecl
var i = info.mut_token;
while (i > 0) {
i -= 1;
- switch (token_tags[i]) {
+ switch (tree.tokenTag(i)) {
.keyword_extern, .keyword_export => result.extern_export_token = i,
.keyword_comptime => result.comptime_token = i,
.keyword_pub => result.visib_token = i,
@@ -1953,14 +2026,12 @@ fn fullVarDeclComponents(tree: Ast, info: full.VarDecl.Components) full.VarDecl
}
fn fullAssignDestructureComponents(tree: Ast, info: full.AssignDestructure.Components) full.AssignDestructure {
- const token_tags = tree.tokens.items(.tag);
- const node_tags = tree.nodes.items(.tag);
var result: full.AssignDestructure = .{
.comptime_token = null,
.ast = info,
};
const first_variable_token = tree.firstToken(info.variables[0]);
- const maybe_comptime_token = switch (node_tags[info.variables[0]]) {
+ const maybe_comptime_token = switch (tree.nodeTag(info.variables[0])) {
.global_var_decl,
.local_var_decl,
.aligned_var_decl,
@@ -1968,14 +2039,13 @@ fn fullAssignDestructureComponents(tree: Ast, info: full.AssignDestructure.Compo
=> first_variable_token,
else => first_variable_token - 1,
};
- if (token_tags[maybe_comptime_token] == .keyword_comptime) {
+ if (tree.tokenTag(maybe_comptime_token) == .keyword_comptime) {
result.comptime_token = maybe_comptime_token;
}
return result;
}
fn fullIfComponents(tree: Ast, info: full.If.Components) full.If {
- const token_tags = tree.tokens.items(.tag);
var result: full.If = .{
.ast = info,
.payload_token = null,
@@ -1985,14 +2055,14 @@ fn fullIfComponents(tree: Ast, info: full.If.Components) full.If {
// if (cond_expr) |x|
// ^ ^
const payload_pipe = tree.lastToken(info.cond_expr) + 2;
- if (token_tags[payload_pipe] == .pipe) {
+ if (tree.tokenTag(payload_pipe) == .pipe) {
result.payload_token = payload_pipe + 1;
}
- if (info.else_expr != 0) {
+ if (info.else_expr != .none) {
// then_expr else |x|
// ^ ^
result.else_token = tree.lastToken(info.then_expr) + 1;
- if (token_tags[result.else_token + 1] == .pipe) {
+ if (tree.tokenTag(result.else_token + 1) == .pipe) {
result.error_token = result.else_token + 2;
}
}
@@ -2000,12 +2070,11 @@ fn fullIfComponents(tree: Ast, info: full.If.Components) full.If {
}
fn fullContainerFieldComponents(tree: Ast, info: full.ContainerField.Components) full.ContainerField {
- const token_tags = tree.tokens.items(.tag);
var result: full.ContainerField = .{
.ast = info,
.comptime_token = null,
};
- if (info.main_token > 0 and token_tags[info.main_token - 1] == .keyword_comptime) {
+ if (tree.isTokenPrecededByTags(info.main_token, &.{.keyword_comptime})) {
// comptime type = init,
// ^ ^
// comptime name: type = init,
@@ -2016,7 +2085,6 @@ fn fullContainerFieldComponents(tree: Ast, info: full.ContainerField.Components)
}
fn fullFnProtoComponents(tree: Ast, info: full.FnProto.Components) full.FnProto {
- const token_tags = tree.tokens.items(.tag);
var result: full.FnProto = .{
.ast = info,
.visib_token = null,
@@ -2028,7 +2096,7 @@ fn fullFnProtoComponents(tree: Ast, info: full.FnProto.Components) full.FnProto
var i = info.fn_token;
while (i > 0) {
i -= 1;
- switch (token_tags[i]) {
+ switch (tree.tokenTag(i)) {
.keyword_extern,
.keyword_export,
.keyword_inline,
@@ -2040,25 +2108,24 @@ fn fullFnProtoComponents(tree: Ast, info: full.FnProto.Components) full.FnProto
}
}
const after_fn_token = info.fn_token + 1;
- if (token_tags[after_fn_token] == .identifier) {
+ if (tree.tokenTag(after_fn_token) == .identifier) {
result.name_token = after_fn_token;
result.lparen = after_fn_token + 1;
} else {
result.lparen = after_fn_token;
}
- assert(token_tags[result.lparen] == .l_paren);
+ assert(tree.tokenTag(result.lparen) == .l_paren);
return result;
}
fn fullPtrTypeComponents(tree: Ast, info: full.PtrType.Components) full.PtrType {
- const token_tags = tree.tokens.items(.tag);
- const size: std.builtin.Type.Pointer.Size = switch (token_tags[info.main_token]) {
+ const size: std.builtin.Type.Pointer.Size = switch (tree.tokenTag(info.main_token)) {
.asterisk,
.asterisk_asterisk,
=> .one,
- .l_bracket => switch (token_tags[info.main_token + 1]) {
- .asterisk => if (token_tags[info.main_token + 2] == .identifier) .c else .many,
+ .l_bracket => switch (tree.tokenTag(info.main_token + 1)) {
+ .asterisk => if (tree.tokenTag(info.main_token + 2) == .identifier) .c else .many,
else => .slice,
},
else => unreachable,
@@ -2074,23 +2141,23 @@ fn fullPtrTypeComponents(tree: Ast, info: full.PtrType.Components) full.PtrType
// here while looking for modifiers as that could result in false
// positives. Therefore, start after a sentinel if there is one and
// skip over any align node and bit range nodes.
- var i = if (info.sentinel != 0) tree.lastToken(info.sentinel) + 1 else switch (size) {
+ var i = if (info.sentinel.unwrap()) |sentinel| tree.lastToken(sentinel) + 1 else switch (size) {
.many, .c => info.main_token + 1,
else => info.main_token,
};
const end = tree.firstToken(info.child_type);
while (i < end) : (i += 1) {
- switch (token_tags[i]) {
+ switch (tree.tokenTag(i)) {
.keyword_allowzero => result.allowzero_token = i,
.keyword_const => result.const_token = i,
.keyword_volatile => result.volatile_token = i,
.keyword_align => {
- assert(info.align_node != 0);
- if (info.bit_range_end != 0) {
- assert(info.bit_range_start != 0);
- i = tree.lastToken(info.bit_range_end) + 1;
+ const align_node = info.align_node.unwrap().?;
+ if (info.bit_range_end.unwrap()) |bit_range_end| {
+ assert(info.bit_range_start != .none);
+ i = tree.lastToken(bit_range_end) + 1;
} else {
- i = tree.lastToken(info.align_node) + 1;
+ i = tree.lastToken(align_node) + 1;
}
},
else => {},
@@ -2100,30 +2167,29 @@ fn fullPtrTypeComponents(tree: Ast, info: full.PtrType.Components) full.PtrType
}
fn fullContainerDeclComponents(tree: Ast, info: full.ContainerDecl.Components) full.ContainerDecl {
- const token_tags = tree.tokens.items(.tag);
var result: full.ContainerDecl = .{
.ast = info,
.layout_token = null,
};
- if (info.main_token == 0) return result;
+ if (info.main_token == 0) return result; // .root
+ const previous_token = info.main_token - 1;
- switch (token_tags[info.main_token - 1]) {
- .keyword_extern, .keyword_packed => result.layout_token = info.main_token - 1,
+ switch (tree.tokenTag(previous_token)) {
+ .keyword_extern, .keyword_packed => result.layout_token = previous_token,
else => {},
}
return result;
}
fn fullSwitchComponents(tree: Ast, info: full.Switch.Components) full.Switch {
- const token_tags = tree.tokens.items(.tag);
const tok_i = info.switch_token -| 1;
var result: full.Switch = .{
.ast = info,
.label_token = null,
};
- if (token_tags[tok_i] == .colon and
- token_tags[tok_i -| 1] == .identifier)
+ if (tree.tokenTag(tok_i) == .colon and
+ tree.tokenTag(tok_i -| 1) == .identifier)
{
result.label_token = tok_i - 1;
}
@@ -2131,26 +2197,25 @@ fn fullSwitchComponents(tree: Ast, info: full.Switch.Components) full.Switch {
}
fn fullSwitchCaseComponents(tree: Ast, info: full.SwitchCase.Components, node: Node.Index) full.SwitchCase {
- const token_tags = tree.tokens.items(.tag);
- const node_tags = tree.nodes.items(.tag);
var result: full.SwitchCase = .{
.ast = info,
.payload_token = null,
.inline_token = null,
};
- if (token_tags[info.arrow_token + 1] == .pipe) {
+ if (tree.tokenTag(info.arrow_token + 1) == .pipe) {
result.payload_token = info.arrow_token + 2;
}
- switch (node_tags[node]) {
- .switch_case_inline, .switch_case_inline_one => result.inline_token = firstToken(tree, node),
- else => {},
- }
+ result.inline_token = switch (tree.nodeTag(node)) {
+ .switch_case_inline, .switch_case_inline_one => if (result.ast.values.len == 0)
+ info.arrow_token - 2
+ else
+ tree.firstToken(result.ast.values[0]) - 1,
+ else => null,
+ };
return result;
}
fn fullAsmComponents(tree: Ast, info: full.Asm.Components) full.Asm {
- const token_tags = tree.tokens.items(.tag);
- const node_tags = tree.nodes.items(.tag);
var result: full.Asm = .{
.ast = info,
.volatile_token = null,
@@ -2158,11 +2223,11 @@ fn fullAsmComponents(tree: Ast, info: full.Asm.Components) full.Asm {
.outputs = &.{},
.first_clobber = null,
};
- if (token_tags[info.asm_token + 1] == .keyword_volatile) {
+ if (tree.tokenTag(info.asm_token + 1) == .keyword_volatile) {
result.volatile_token = info.asm_token + 1;
}
const outputs_end: usize = for (info.items, 0..) |item, i| {
- switch (node_tags[item]) {
+ switch (tree.nodeTag(item)) {
.asm_output => continue,
else => break i,
}
@@ -2174,10 +2239,10 @@ fn fullAsmComponents(tree: Ast, info: full.Asm.Components) full.Asm {
if (info.items.len == 0) {
// asm ("foo" ::: "a", "b");
const template_token = tree.lastToken(info.template);
- if (token_tags[template_token + 1] == .colon and
- token_tags[template_token + 2] == .colon and
- token_tags[template_token + 3] == .colon and
- token_tags[template_token + 4] == .string_literal)
+ if (tree.tokenTag(template_token + 1) == .colon and
+ tree.tokenTag(template_token + 2) == .colon and
+ tree.tokenTag(template_token + 3) == .colon and
+ tree.tokenTag(template_token + 4) == .string_literal)
{
result.first_clobber = template_token + 4;
}
@@ -2187,9 +2252,9 @@ fn fullAsmComponents(tree: Ast, info: full.Asm.Components) full.Asm {
const rparen = tree.lastToken(last_input);
var i = rparen + 1;
// Allow a (useless) comma right after the closing parenthesis.
- if (token_tags[i] == .comma) i += 1;
- if (token_tags[i] == .colon and
- token_tags[i + 1] == .string_literal)
+ if (tree.tokenTag(i) == .comma) i = i + 1;
+ if (tree.tokenTag(i) == .colon and
+ tree.tokenTag(i + 1) == .string_literal)
{
result.first_clobber = i + 1;
}
@@ -2199,10 +2264,10 @@ fn fullAsmComponents(tree: Ast, info: full.Asm.Components) full.Asm {
const rparen = tree.lastToken(last_output);
var i = rparen + 1;
// Allow a (useless) comma right after the closing parenthesis.
- if (token_tags[i] == .comma) i += 1;
- if (token_tags[i] == .colon and
- token_tags[i + 1] == .colon and
- token_tags[i + 2] == .string_literal)
+ if (tree.tokenTag(i) == .comma) i = i + 1;
+ if (tree.tokenTag(i) == .colon and
+ tree.tokenTag(i + 1) == .colon and
+ tree.tokenTag(i + 2) == .string_literal)
{
result.first_clobber = i + 2;
}
@@ -2212,7 +2277,6 @@ fn fullAsmComponents(tree: Ast, info: full.Asm.Components) full.Asm {
}
fn fullWhileComponents(tree: Ast, info: full.While.Components) full.While {
- const token_tags = tree.tokens.items(.tag);
var result: full.While = .{
.ast = info,
.inline_token = null,
@@ -2221,25 +2285,23 @@ fn fullWhileComponents(tree: Ast, info: full.While.Components) full.While {
.else_token = undefined,
.error_token = null,
};
- var tok_i = info.while_token -| 1;
- if (token_tags[tok_i] == .keyword_inline) {
- result.inline_token = tok_i;
- tok_i -|= 1;
+ var tok_i = info.while_token;
+ if (tree.isTokenPrecededByTags(tok_i, &.{.keyword_inline})) {
+ result.inline_token = tok_i - 1;
+ tok_i = tok_i - 1;
}
- if (token_tags[tok_i] == .colon and
- token_tags[tok_i -| 1] == .identifier)
- {
- result.label_token = tok_i - 1;
+ if (tree.isTokenPrecededByTags(tok_i, &.{ .identifier, .colon })) {
+ result.label_token = tok_i - 2;
}
const last_cond_token = tree.lastToken(info.cond_expr);
- if (token_tags[last_cond_token + 2] == .pipe) {
+ if (tree.tokenTag(last_cond_token + 2) == .pipe) {
result.payload_token = last_cond_token + 3;
}
- if (info.else_expr != 0) {
+ if (info.else_expr != .none) {
// then_expr else |x|
// ^ ^
result.else_token = tree.lastToken(info.then_expr) + 1;
- if (token_tags[result.else_token + 1] == .pipe) {
+ if (tree.tokenTag(result.else_token + 1) == .pipe) {
result.error_token = result.else_token + 2;
}
}
@@ -2247,7 +2309,6 @@ fn fullWhileComponents(tree: Ast, info: full.While.Components) full.While {
}
fn fullForComponents(tree: Ast, info: full.For.Components) full.For {
- const token_tags = tree.tokens.items(.tag);
var result: full.For = .{
.ast = info,
.inline_token = null,
@@ -2255,39 +2316,36 @@ fn fullForComponents(tree: Ast, info: full.For.Components) full.For {
.payload_token = undefined,
.else_token = undefined,
};
- var tok_i = info.for_token -| 1;
- if (token_tags[tok_i] == .keyword_inline) {
- result.inline_token = tok_i;
- tok_i -|= 1;
+ var tok_i = info.for_token;
+ if (tree.isTokenPrecededByTags(tok_i, &.{.keyword_inline})) {
+ result.inline_token = tok_i - 1;
+ tok_i = tok_i - 1;
}
- if (token_tags[tok_i] == .colon and
- token_tags[tok_i -| 1] == .identifier)
- {
- result.label_token = tok_i - 1;
+ if (tree.isTokenPrecededByTags(tok_i, &.{ .identifier, .colon })) {
+ result.label_token = tok_i - 2;
}
const last_cond_token = tree.lastToken(info.inputs[info.inputs.len - 1]);
- result.payload_token = last_cond_token + 3 + @intFromBool(token_tags[last_cond_token + 1] == .comma);
- if (info.else_expr != 0) {
+ result.payload_token = last_cond_token + @as(u32, 3) + @intFromBool(tree.tokenTag(last_cond_token + 1) == .comma);
+ if (info.else_expr != .none) {
result.else_token = tree.lastToken(info.then_expr) + 1;
}
return result;
}
fn fullCallComponents(tree: Ast, info: full.Call.Components) full.Call {
- const token_tags = tree.tokens.items(.tag);
var result: full.Call = .{
.ast = info,
.async_token = null,
};
const first_token = tree.firstToken(info.fn_expr);
- if (first_token != 0 and token_tags[first_token - 1] == .keyword_async) {
+ if (tree.isTokenPrecededByTags(first_token, &.{.keyword_async})) {
result.async_token = first_token - 1;
}
return result;
}
pub fn fullVarDecl(tree: Ast, node: Node.Index) ?full.VarDecl {
- return switch (tree.nodes.items(.tag)[node]) {
+ return switch (tree.nodeTag(node)) {
.global_var_decl => tree.globalVarDecl(node),
.local_var_decl => tree.localVarDecl(node),
.aligned_var_decl => tree.alignedVarDecl(node),
@@ -2297,7 +2355,7 @@ pub fn fullVarDecl(tree: Ast, node: Node.Index) ?full.VarDecl {
}
pub fn fullIf(tree: Ast, node: Node.Index) ?full.If {
- return switch (tree.nodes.items(.tag)[node]) {
+ return switch (tree.nodeTag(node)) {
.if_simple => tree.ifSimple(node),
.@"if" => tree.ifFull(node),
else => null,
@@ -2305,7 +2363,7 @@ pub fn fullIf(tree: Ast, node: Node.Index) ?full.If {
}
pub fn fullWhile(tree: Ast, node: Node.Index) ?full.While {
- return switch (tree.nodes.items(.tag)[node]) {
+ return switch (tree.nodeTag(node)) {
.while_simple => tree.whileSimple(node),
.while_cont => tree.whileCont(node),
.@"while" => tree.whileFull(node),
@@ -2314,7 +2372,7 @@ pub fn fullWhile(tree: Ast, node: Node.Index) ?full.While {
}
pub fn fullFor(tree: Ast, node: Node.Index) ?full.For {
- return switch (tree.nodes.items(.tag)[node]) {
+ return switch (tree.nodeTag(node)) {
.for_simple => tree.forSimple(node),
.@"for" => tree.forFull(node),
else => null,
@@ -2322,7 +2380,7 @@ pub fn fullFor(tree: Ast, node: Node.Index) ?full.For {
}
pub fn fullContainerField(tree: Ast, node: Node.Index) ?full.ContainerField {
- return switch (tree.nodes.items(.tag)[node]) {
+ return switch (tree.nodeTag(node)) {
.container_field_init => tree.containerFieldInit(node),
.container_field_align => tree.containerFieldAlign(node),
.container_field => tree.containerField(node),
@@ -2331,18 +2389,18 @@ pub fn fullContainerField(tree: Ast, node: Node.Index) ?full.ContainerField {
}
pub fn fullFnProto(tree: Ast, buffer: *[1]Ast.Node.Index, node: Node.Index) ?full.FnProto {
- return switch (tree.nodes.items(.tag)[node]) {
+ return switch (tree.nodeTag(node)) {
.fn_proto => tree.fnProto(node),
.fn_proto_multi => tree.fnProtoMulti(node),
.fn_proto_one => tree.fnProtoOne(buffer, node),
.fn_proto_simple => tree.fnProtoSimple(buffer, node),
- .fn_decl => tree.fullFnProto(buffer, tree.nodes.items(.data)[node].lhs),
+ .fn_decl => tree.fullFnProto(buffer, tree.nodeData(node).node_and_node[0]),
else => null,
};
}
pub fn fullStructInit(tree: Ast, buffer: *[2]Ast.Node.Index, node: Node.Index) ?full.StructInit {
- return switch (tree.nodes.items(.tag)[node]) {
+ return switch (tree.nodeTag(node)) {
.struct_init_one, .struct_init_one_comma => tree.structInitOne(buffer[0..1], node),
.struct_init_dot_two, .struct_init_dot_two_comma => tree.structInitDotTwo(buffer, node),
.struct_init_dot, .struct_init_dot_comma => tree.structInitDot(node),
@@ -2352,7 +2410,7 @@ pub fn fullStructInit(tree: Ast, buffer: *[2]Ast.Node.Index, node: Node.Index) ?
}
pub fn fullArrayInit(tree: Ast, buffer: *[2]Node.Index, node: Node.Index) ?full.ArrayInit {
- return switch (tree.nodes.items(.tag)[node]) {
+ return switch (tree.nodeTag(node)) {
.array_init_one, .array_init_one_comma => tree.arrayInitOne(buffer[0..1], node),
.array_init_dot_two, .array_init_dot_two_comma => tree.arrayInitDotTwo(buffer, node),
.array_init_dot, .array_init_dot_comma => tree.arrayInitDot(node),
@@ -2362,7 +2420,7 @@ pub fn fullArrayInit(tree: Ast, buffer: *[2]Node.Index, node: Node.Index) ?full.
}
pub fn fullArrayType(tree: Ast, node: Node.Index) ?full.ArrayType {
- return switch (tree.nodes.items(.tag)[node]) {
+ return switch (tree.nodeTag(node)) {
.array_type => tree.arrayType(node),
.array_type_sentinel => tree.arrayTypeSentinel(node),
else => null,
@@ -2370,7 +2428,7 @@ pub fn fullArrayType(tree: Ast, node: Node.Index) ?full.ArrayType {
}
pub fn fullPtrType(tree: Ast, node: Node.Index) ?full.PtrType {
- return switch (tree.nodes.items(.tag)[node]) {
+ return switch (tree.nodeTag(node)) {
.ptr_type_aligned => tree.ptrTypeAligned(node),
.ptr_type_sentinel => tree.ptrTypeSentinel(node),
.ptr_type => tree.ptrType(node),
@@ -2380,7 +2438,7 @@ pub fn fullPtrType(tree: Ast, node: Node.Index) ?full.PtrType {
}
pub fn fullSlice(tree: Ast, node: Node.Index) ?full.Slice {
- return switch (tree.nodes.items(.tag)[node]) {
+ return switch (tree.nodeTag(node)) {
.slice_open => tree.sliceOpen(node),
.slice => tree.slice(node),
.slice_sentinel => tree.sliceSentinel(node),
@@ -2389,7 +2447,7 @@ pub fn fullSlice(tree: Ast, node: Node.Index) ?full.Slice {
}
pub fn fullContainerDecl(tree: Ast, buffer: *[2]Ast.Node.Index, node: Node.Index) ?full.ContainerDecl {
- return switch (tree.nodes.items(.tag)[node]) {
+ return switch (tree.nodeTag(node)) {
.root => tree.containerDeclRoot(),
.container_decl, .container_decl_trailing => tree.containerDecl(node),
.container_decl_arg, .container_decl_arg_trailing => tree.containerDeclArg(node),
@@ -2402,14 +2460,14 @@ pub fn fullContainerDecl(tree: Ast, buffer: *[2]Ast.Node.Index, node: Node.Index
}
pub fn fullSwitch(tree: Ast, node: Node.Index) ?full.Switch {
- return switch (tree.nodes.items(.tag)[node]) {
+ return switch (tree.nodeTag(node)) {
.@"switch", .switch_comma => tree.switchFull(node),
else => null,
};
}
pub fn fullSwitchCase(tree: Ast, node: Node.Index) ?full.SwitchCase {
- return switch (tree.nodes.items(.tag)[node]) {
+ return switch (tree.nodeTag(node)) {
.switch_case_one, .switch_case_inline_one => tree.switchCaseOne(node),
.switch_case, .switch_case_inline => tree.switchCase(node),
else => null,
@@ -2417,7 +2475,7 @@ pub fn fullSwitchCase(tree: Ast, node: Node.Index) ?full.SwitchCase {
}
pub fn fullAsm(tree: Ast, node: Node.Index) ?full.Asm {
- return switch (tree.nodes.items(.tag)[node]) {
+ return switch (tree.nodeTag(node)) {
.asm_simple => tree.asmSimple(node),
.@"asm" => tree.asmFull(node),
else => null,
@@ -2425,7 +2483,7 @@ pub fn fullAsm(tree: Ast, node: Node.Index) ?full.Asm {
}
pub fn fullCall(tree: Ast, buffer: *[1]Ast.Node.Index, node: Node.Index) ?full.Call {
- return switch (tree.nodes.items(.tag)[node]) {
+ return switch (tree.nodeTag(node)) {
.call, .call_comma, .async_call, .async_call_comma => tree.callFull(node),
.call_one, .call_one_comma, .async_call_one, .async_call_one_comma => tree.callOne(buffer, node),
else => null,
@@ -2433,37 +2491,17 @@ pub fn fullCall(tree: Ast, buffer: *[1]Ast.Node.Index, node: Node.Index) ?full.C
}
pub fn builtinCallParams(tree: Ast, buffer: *[2]Ast.Node.Index, node: Ast.Node.Index) ?[]const Node.Index {
- const data = tree.nodes.items(.data)[node];
- return switch (tree.nodes.items(.tag)[node]) {
- .builtin_call_two, .builtin_call_two_comma => {
- buffer.* = .{ data.lhs, data.rhs };
- if (data.rhs != 0) {
- return buffer[0..2];
- } else if (data.lhs != 0) {
- return buffer[0..1];
- } else {
- return buffer[0..0];
- }
- },
- .builtin_call, .builtin_call_comma => tree.extra_data[data.lhs..data.rhs],
+ return switch (tree.nodeTag(node)) {
+ .builtin_call_two, .builtin_call_two_comma => loadOptionalNodesIntoBuffer(2, buffer, tree.nodeData(node).opt_node_and_opt_node),
+ .builtin_call, .builtin_call_comma => tree.extraDataSlice(tree.nodeData(node).extra_range, Node.Index),
else => null,
};
}
pub fn blockStatements(tree: Ast, buffer: *[2]Ast.Node.Index, node: Ast.Node.Index) ?[]const Node.Index {
- const data = tree.nodes.items(.data)[node];
- return switch (tree.nodes.items(.tag)[node]) {
- .block_two, .block_two_semicolon => {
- buffer.* = .{ data.lhs, data.rhs };
- if (data.rhs != 0) {
- return buffer[0..2];
- } else if (data.lhs != 0) {
- return buffer[0..1];
- } else {
- return buffer[0..0];
- }
- },
- .block, .block_semicolon => tree.extra_data[data.lhs..data.rhs],
+ return switch (tree.nodeTag(node)) {
+ .block_two, .block_two_semicolon => loadOptionalNodesIntoBuffer(2, buffer, tree.nodeData(node).opt_node_and_opt_node),
+ .block, .block_semicolon => tree.extraDataSlice(tree.nodeData(node).extra_range, Node.Index),
else => null,
};
}
@@ -2480,11 +2518,11 @@ pub const full = struct {
pub const Components = struct {
mut_token: TokenIndex,
- type_node: Node.Index,
- align_node: Node.Index,
- addrspace_node: Node.Index,
- section_node: Node.Index,
- init_node: Node.Index,
+ type_node: Node.OptionalIndex,
+ align_node: Node.OptionalIndex,
+ addrspace_node: Node.OptionalIndex,
+ section_node: Node.OptionalIndex,
+ init_node: Node.OptionalIndex,
};
pub fn firstToken(var_decl: VarDecl) TokenIndex {
@@ -2513,7 +2551,7 @@ pub const full = struct {
payload_token: ?TokenIndex,
/// Points to the identifier after the `|`.
error_token: ?TokenIndex,
- /// Populated only if else_expr != 0.
+ /// Populated only if else_expr != .none.
else_token: TokenIndex,
ast: Components,
@@ -2521,7 +2559,7 @@ pub const full = struct {
if_token: TokenIndex,
cond_expr: Node.Index,
then_expr: Node.Index,
- else_expr: Node.Index,
+ else_expr: Node.OptionalIndex,
};
};
@@ -2531,15 +2569,15 @@ pub const full = struct {
label_token: ?TokenIndex,
payload_token: ?TokenIndex,
error_token: ?TokenIndex,
- /// Populated only if else_expr != 0.
+ /// Populated only if else_expr != none.
else_token: TokenIndex,
pub const Components = struct {
while_token: TokenIndex,
cond_expr: Node.Index,
- cont_expr: Node.Index,
+ cont_expr: Node.OptionalIndex,
then_expr: Node.Index,
- else_expr: Node.Index,
+ else_expr: Node.OptionalIndex,
};
};
@@ -2548,14 +2586,14 @@ pub const full = struct {
inline_token: ?TokenIndex,
label_token: ?TokenIndex,
payload_token: TokenIndex,
- /// Populated only if else_expr != 0.
- else_token: TokenIndex,
+ /// Populated only if else_expr != .none.
+ else_token: ?TokenIndex,
pub const Components = struct {
for_token: TokenIndex,
inputs: []const Node.Index,
then_expr: Node.Index,
- else_expr: Node.Index,
+ else_expr: Node.OptionalIndex,
};
};
@@ -2565,9 +2603,10 @@ pub const full = struct {
pub const Components = struct {
main_token: TokenIndex,
- type_expr: Node.Index,
- align_expr: Node.Index,
- value_expr: Node.Index,
+ /// Can only be `.none` after calling `convertToNonTupleLike`.
+ type_expr: Node.OptionalIndex,
+ align_expr: Node.OptionalIndex,
+ value_expr: Node.OptionalIndex,
tuple_like: bool,
};
@@ -2575,11 +2614,11 @@ pub const full = struct {
return cf.comptime_token orelse cf.ast.main_token;
}
- pub fn convertToNonTupleLike(cf: *ContainerField, nodes: NodeList.Slice) void {
+ pub fn convertToNonTupleLike(cf: *ContainerField, tree: *const Ast) void {
if (!cf.ast.tuple_like) return;
- if (nodes.items(.tag)[cf.ast.type_expr] != .identifier) return;
+ if (tree.nodeTag(cf.ast.type_expr.unwrap().?) != .identifier) return;
- cf.ast.type_expr = 0;
+ cf.ast.type_expr = .none;
cf.ast.tuple_like = false;
}
};
@@ -2595,12 +2634,12 @@ pub const full = struct {
pub const Components = struct {
proto_node: Node.Index,
fn_token: TokenIndex,
- return_type: Node.Index,
+ return_type: Node.OptionalIndex,
params: []const Node.Index,
- align_expr: Node.Index,
- addrspace_expr: Node.Index,
- section_expr: Node.Index,
- callconv_expr: Node.Index,
+ align_expr: Node.OptionalIndex,
+ addrspace_expr: Node.OptionalIndex,
+ section_expr: Node.OptionalIndex,
+ callconv_expr: Node.OptionalIndex,
};
pub const Param = struct {
@@ -2608,7 +2647,7 @@ pub const full = struct {
name_token: ?TokenIndex,
comptime_noalias: ?TokenIndex,
anytype_ellipsis3: ?TokenIndex,
- type_expr: Node.Index,
+ type_expr: ?Node.Index,
};
pub fn firstToken(fn_proto: FnProto) TokenIndex {
@@ -2628,7 +2667,7 @@ pub const full = struct {
tok_flag: bool,
pub fn next(it: *Iterator) ?Param {
- const token_tags = it.tree.tokens.items(.tag);
+ const tree = it.tree;
while (true) {
var first_doc_comment: ?TokenIndex = null;
var comptime_noalias: ?TokenIndex = null;
@@ -2638,8 +2677,8 @@ pub const full = struct {
return null;
}
const param_type = it.fn_proto.ast.params[it.param_i];
- var tok_i = it.tree.firstToken(param_type) - 1;
- while (true) : (tok_i -= 1) switch (token_tags[tok_i]) {
+ var tok_i = tree.firstToken(param_type) - 1;
+ while (true) : (tok_i -= 1) switch (tree.tokenTag(tok_i)) {
.colon => continue,
.identifier => name_token = tok_i,
.doc_comment => first_doc_comment = tok_i,
@@ -2647,9 +2686,9 @@ pub const full = struct {
else => break,
};
it.param_i += 1;
- it.tok_i = it.tree.lastToken(param_type) + 1;
+ it.tok_i = tree.lastToken(param_type) + 1;
// Look for anytype and ... params afterwards.
- if (token_tags[it.tok_i] == .comma) {
+ if (tree.tokenTag(it.tok_i) == .comma) {
it.tok_i += 1;
}
it.tok_flag = true;
@@ -2661,19 +2700,19 @@ pub const full = struct {
.type_expr = param_type,
};
}
- if (token_tags[it.tok_i] == .comma) {
+ if (tree.tokenTag(it.tok_i) == .comma) {
it.tok_i += 1;
}
- if (token_tags[it.tok_i] == .r_paren) {
+ if (tree.tokenTag(it.tok_i) == .r_paren) {
return null;
}
- if (token_tags[it.tok_i] == .doc_comment) {
+ if (tree.tokenTag(it.tok_i) == .doc_comment) {
first_doc_comment = it.tok_i;
- while (token_tags[it.tok_i] == .doc_comment) {
+ while (tree.tokenTag(it.tok_i) == .doc_comment) {
it.tok_i += 1;
}
}
- switch (token_tags[it.tok_i]) {
+ switch (tree.tokenTag(it.tok_i)) {
.ellipsis3 => {
it.tok_flag = false; // Next iteration should return null.
return Param{
@@ -2681,7 +2720,7 @@ pub const full = struct {
.comptime_noalias = null,
.name_token = null,
.anytype_ellipsis3 = it.tok_i,
- .type_expr = 0,
+ .type_expr = null,
};
},
.keyword_noalias, .keyword_comptime => {
@@ -2690,20 +2729,20 @@ pub const full = struct {
},
else => {},
}
- if (token_tags[it.tok_i] == .identifier and
- token_tags[it.tok_i + 1] == .colon)
+ if (tree.tokenTag(it.tok_i) == .identifier and
+ tree.tokenTag(it.tok_i + 1) == .colon)
{
name_token = it.tok_i;
it.tok_i += 2;
}
- if (token_tags[it.tok_i] == .keyword_anytype) {
+ if (tree.tokenTag(it.tok_i) == .keyword_anytype) {
it.tok_i += 1;
return Param{
.first_doc_comment = first_doc_comment,
.comptime_noalias = comptime_noalias,
.name_token = name_token,
.anytype_ellipsis3 = it.tok_i - 1,
- .type_expr = 0,
+ .type_expr = null,
};
}
it.tok_flag = false;
@@ -2728,7 +2767,7 @@ pub const full = struct {
pub const Components = struct {
lbrace: TokenIndex,
fields: []const Node.Index,
- type_expr: Node.Index,
+ type_expr: Node.OptionalIndex,
};
};
@@ -2738,7 +2777,7 @@ pub const full = struct {
pub const Components = struct {
lbrace: TokenIndex,
elements: []const Node.Index,
- type_expr: Node.Index,
+ type_expr: Node.OptionalIndex,
};
};
@@ -2748,7 +2787,7 @@ pub const full = struct {
pub const Components = struct {
lbracket: TokenIndex,
elem_count: Node.Index,
- sentinel: Node.Index,
+ sentinel: Node.OptionalIndex,
elem_type: Node.Index,
};
};
@@ -2762,11 +2801,11 @@ pub const full = struct {
pub const Components = struct {
main_token: TokenIndex,
- align_node: Node.Index,
- addrspace_node: Node.Index,
- sentinel: Node.Index,
- bit_range_start: Node.Index,
- bit_range_end: Node.Index,
+ align_node: Node.OptionalIndex,
+ addrspace_node: Node.OptionalIndex,
+ sentinel: Node.OptionalIndex,
+ bit_range_start: Node.OptionalIndex,
+ bit_range_end: Node.OptionalIndex,
child_type: Node.Index,
};
};
@@ -2778,8 +2817,8 @@ pub const full = struct {
sliced: Node.Index,
lbracket: TokenIndex,
start: Node.Index,
- end: Node.Index,
- sentinel: Node.Index,
+ end: Node.OptionalIndex,
+ sentinel: Node.OptionalIndex,
};
};
@@ -2792,7 +2831,7 @@ pub const full = struct {
/// Populated when main_token is Keyword_union.
enum_token: ?TokenIndex,
members: []const Node.Index,
- arg: Node.Index,
+ arg: Node.OptionalIndex,
};
};
@@ -2935,16 +2974,82 @@ pub const Error = struct {
};
};
+/// Index into `extra_data`.
+pub const ExtraIndex = enum(u32) {
+ _,
+};
+
pub const Node = struct {
tag: Tag,
main_token: TokenIndex,
data: Data,
- pub const Index = u32;
+ /// Index into `nodes`.
+ pub const Index = enum(u32) {
+ root = 0,
+ _,
+
+ pub fn toOptional(i: Index) OptionalIndex {
+ const result: OptionalIndex = @enumFromInt(@intFromEnum(i));
+ assert(result != .none);
+ return result;
+ }
+
+ pub fn toOffset(base: Index, destination: Index) Offset {
+ const base_i64: i64 = @intFromEnum(base);
+ const destination_i64: i64 = @intFromEnum(destination);
+ return @enumFromInt(destination_i64 - base_i64);
+ }
+ };
+
+ /// Index into `nodes`, or null.
+ pub const OptionalIndex = enum(u32) {
+ root = 0,
+ none = std.math.maxInt(u32),
+ _,
+
+ pub fn unwrap(oi: OptionalIndex) ?Index {
+ return if (oi == .none) null else @enumFromInt(@intFromEnum(oi));
+ }
+
+ pub fn fromOptional(oi: ?Index) OptionalIndex {
+ return if (oi) |i| i.toOptional() else .none;
+ }
+ };
+
+ /// A relative node index.
+ pub const Offset = enum(i32) {
+ zero = 0,
+ _,
+
+ pub fn toOptional(o: Offset) OptionalOffset {
+ const result: OptionalOffset = @enumFromInt(@intFromEnum(o));
+ assert(result != .none);
+ return result;
+ }
+
+ pub fn toAbsolute(offset: Offset, base: Index) Index {
+ return @enumFromInt(@as(i64, @intFromEnum(base)) + @intFromEnum(offset));
+ }
+ };
+
+ /// A relative node index, or null.
+ pub const OptionalOffset = enum(i32) {
+ none = std.math.maxInt(i32),
+ _,
+
+ pub fn unwrap(oo: OptionalOffset) ?Offset {
+ return if (oo == .none) null else @enumFromInt(@intFromEnum(oo));
+ }
+ };
comptime {
// Goal is to keep this under one byte for efficiency.
assert(@sizeOf(Tag) == 1);
+
+ if (!std.debug.runtime_safety) {
+ assert(@sizeOf(Data) == 8);
+ }
}
/// Note: The FooComma/FooSemicolon variants exist to ease the implementation of
@@ -3435,9 +3540,26 @@ pub const Node = struct {
}
};
- pub const Data = struct {
- lhs: Index,
- rhs: Index,
+ pub const Data = union {
+ node: Index,
+ opt_node: OptionalIndex,
+ token: TokenIndex,
+ node_and_node: struct { Index, Index },
+ opt_node_and_opt_node: struct { OptionalIndex, OptionalIndex },
+ node_and_opt_node: struct { Index, OptionalIndex },
+ opt_node_and_node: struct { OptionalIndex, Index },
+ node_and_extra: struct { Index, ExtraIndex },
+ extra_and_node: struct { ExtraIndex, Index },
+ extra_and_opt_node: struct { ExtraIndex, OptionalIndex },
+ node_and_token: struct { Index, TokenIndex },
+ token_and_node: struct { TokenIndex, Index },
+ token_and_token: struct { TokenIndex, TokenIndex },
+ opt_node_and_token: struct { OptionalIndex, TokenIndex },
+ opt_token_and_node: struct { OptionalTokenIndex, Index },
+ opt_token_and_opt_node: struct { OptionalTokenIndex, OptionalIndex },
+ opt_token_and_opt_token: struct { OptionalTokenIndex, OptionalTokenIndex },
+ @"for": struct { ExtraIndex, For },
+ extra_range: SubRange,
};
pub const LocalVarDecl = struct {
@@ -3451,24 +3573,24 @@ pub const Node = struct {
};
pub const PtrType = struct {
- sentinel: Index,
- align_node: Index,
- addrspace_node: Index,
+ sentinel: OptionalIndex,
+ align_node: OptionalIndex,
+ addrspace_node: OptionalIndex,
};
pub const PtrTypeBitRange = struct {
- sentinel: Index,
+ sentinel: OptionalIndex,
align_node: Index,
- addrspace_node: Index,
+ addrspace_node: OptionalIndex,
bit_range_start: Index,
bit_range_end: Index,
};
pub const SubRange = struct {
- /// Index into sub_list.
- start: Index,
- /// Index into sub_list.
- end: Index,
+ /// Index into extra_data.
+ start: ExtraIndex,
+ /// Index into extra_data.
+ end: ExtraIndex,
};
pub const If = struct {
@@ -3483,13 +3605,13 @@ pub const Node = struct {
pub const GlobalVarDecl = struct {
/// Populated if there is an explicit type ascription.
- type_node: Index,
+ type_node: OptionalIndex,
/// Populated if align(A) is present.
- align_node: Index,
+ align_node: OptionalIndex,
/// Populated if addrspace(A) is present.
- addrspace_node: Index,
+ addrspace_node: OptionalIndex,
/// Populated if linksection(A) is present.
- section_node: Index,
+ section_node: OptionalIndex,
};
pub const Slice = struct {
@@ -3499,13 +3621,13 @@ pub const Node = struct {
pub const SliceSentinel = struct {
start: Index,
- /// May be 0 if the slice is "open"
- end: Index,
+ /// May be .none if the slice is "open"
+ end: OptionalIndex,
sentinel: Index,
};
pub const While = struct {
- cont_expr: Index,
+ cont_expr: OptionalIndex,
then_expr: Index,
else_expr: Index,
};
@@ -3522,44 +3644,44 @@ pub const Node = struct {
pub const FnProtoOne = struct {
/// Populated if there is exactly 1 parameter. Otherwise there are 0 parameters.
- param: Index,
+ param: OptionalIndex,
/// Populated if align(A) is present.
- align_expr: Index,
+ align_expr: OptionalIndex,
/// Populated if addrspace(A) is present.
- addrspace_expr: Index,
+ addrspace_expr: OptionalIndex,
/// Populated if linksection(A) is present.
- section_expr: Index,
+ section_expr: OptionalIndex,
/// Populated if callconv(A) is present.
- callconv_expr: Index,
+ callconv_expr: OptionalIndex,
};
pub const FnProto = struct {
- params_start: Index,
- params_end: Index,
+ params_start: ExtraIndex,
+ params_end: ExtraIndex,
/// Populated if align(A) is present.
- align_expr: Index,
+ align_expr: OptionalIndex,
/// Populated if addrspace(A) is present.
- addrspace_expr: Index,
+ addrspace_expr: OptionalIndex,
/// Populated if linksection(A) is present.
- section_expr: Index,
+ section_expr: OptionalIndex,
/// Populated if callconv(A) is present.
- callconv_expr: Index,
+ callconv_expr: OptionalIndex,
};
pub const Asm = struct {
- items_start: Index,
- items_end: Index,
+ items_start: ExtraIndex,
+ items_end: ExtraIndex,
/// Needed to make lastToken() work.
rparen: TokenIndex,
};
};
-pub fn nodeToSpan(tree: *const Ast, node: u32) Span {
+pub fn nodeToSpan(tree: *const Ast, node: Ast.Node.Index) Span {
return tokensToSpan(
tree,
tree.firstToken(node),
tree.lastToken(node),
- tree.nodes.items(.main_token)[node],
+ tree.nodeMainToken(node),
);
}
@@ -3568,7 +3690,6 @@ pub fn tokenToSpan(tree: *const Ast, token: Ast.TokenIndex) Span {
}
pub fn tokensToSpan(tree: *const Ast, start: Ast.TokenIndex, end: Ast.TokenIndex, main: Ast.TokenIndex) Span {
- const token_starts = tree.tokens.items(.start);
var start_tok = start;
var end_tok = end;
@@ -3582,9 +3703,9 @@ pub fn tokensToSpan(tree: *const Ast, start: Ast.TokenIndex, end: Ast.TokenIndex
start_tok = main;
end_tok = main;
}
- const start_off = token_starts[start_tok];
- const end_off = token_starts[end_tok] + @as(u32, @intCast(tree.tokenSlice(end_tok).len));
- return Span{ .start = start_off, .end = end_off, .main = token_starts[main] };
+ const start_off = tree.tokenStart(start_tok);
+ const end_off = tree.tokenStart(end_tok) + @as(u32, @intCast(tree.tokenSlice(end_tok).len));
+ return Span{ .start = start_off, .end = end_off, .main = tree.tokenStart(main) };
}
const std = @import("../std.zig");
lib/std/zig/AstGen.zig
@@ -99,8 +99,18 @@ fn setExtra(astgen: *AstGen, index: usize, extra: anytype) void {
Zir.Inst.Declaration.Name,
std.zig.SimpleComptimeReason,
Zir.NullTerminatedString,
+ // Ast.TokenIndex is missing because it is a u32.
+ Ast.OptionalTokenIndex,
+ Ast.Node.Index,
+ Ast.Node.OptionalIndex,
=> @intFromEnum(@field(extra, field.name)),
+ Ast.TokenOffset,
+ Ast.OptionalTokenOffset,
+ Ast.Node.Offset,
+ Ast.Node.OptionalOffset,
+ => @bitCast(@intFromEnum(@field(extra, field.name))),
+
i32,
Zir.Inst.Call.Flags,
Zir.Inst.BuiltinCall.Flags,
@@ -168,7 +178,7 @@ pub fn generate(gpa: Allocator, tree: Ast) Allocator.Error!Zir {
.is_comptime = true,
.parent = &top_scope.base,
.anon_name_strategy = .parent,
- .decl_node_index = 0,
+ .decl_node_index = .root,
.decl_line = 0,
.astgen = &astgen,
.instructions = &gz_instructions,
@@ -182,10 +192,10 @@ pub fn generate(gpa: Allocator, tree: Ast) Allocator.Error!Zir {
if (AstGen.structDeclInner(
&gen_scope,
&gen_scope.base,
- 0,
+ .root,
tree.containerDeclRoot(),
.auto,
- 0,
+ .none,
)) |struct_decl_ref| {
assert(struct_decl_ref.toIndex().? == .main_struct_inst);
break :fatal false;
@@ -430,9 +440,7 @@ fn reachableExprComptime(
fn lvalExpr(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const node_tags = tree.nodes.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
- switch (node_tags[node]) {
+ switch (tree.nodeTag(node)) {
.root => unreachable,
.@"usingnamespace" => unreachable,
.test_decl => unreachable,
@@ -600,7 +608,7 @@ fn lvalExpr(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Ins
.builtin_call_two,
.builtin_call_two_comma,
=> {
- const builtin_token = main_tokens[node];
+ const builtin_token = tree.nodeMainToken(node);
const builtin_name = tree.tokenSlice(builtin_token);
// If the builtin is an invalid name, we don't cause an error here; instead
// let it pass, and the error will be "invalid builtin function" later.
@@ -631,10 +639,6 @@ fn lvalExpr(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Ins
fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const main_tokens = tree.nodes.items(.main_token);
- const token_tags = tree.tokens.items(.tag);
- const node_datas = tree.nodes.items(.data);
- const node_tags = tree.nodes.items(.tag);
const prev_anon_name_strategy = gz.anon_name_strategy;
defer gz.anon_name_strategy = prev_anon_name_strategy;
@@ -642,7 +646,7 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
gz.anon_name_strategy = .anon;
}
- switch (node_tags[node]) {
+ switch (tree.nodeTag(node)) {
.root => unreachable, // Top-level declaration.
.@"usingnamespace" => unreachable, // Top-level declaration.
.test_decl => unreachable, // Top-level declaration.
@@ -752,8 +756,8 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
},
// zig fmt: off
- .shl => return shiftOp(gz, scope, ri, node, node_datas[node].lhs, node_datas[node].rhs, .shl),
- .shr => return shiftOp(gz, scope, ri, node, node_datas[node].lhs, node_datas[node].rhs, .shr),
+ .shl => return shiftOp(gz, scope, ri, node, tree.nodeData(node).node_and_node[0], tree.nodeData(node).node_and_node[1], .shl),
+ .shr => return shiftOp(gz, scope, ri, node, tree.nodeData(node).node_and_node[0], tree.nodeData(node).node_and_node[1], .shr),
.add => return simpleBinOp(gz, scope, ri, node, .add),
.add_wrap => return simpleBinOp(gz, scope, ri, node, .addwrap),
@@ -783,10 +787,11 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
// This syntax form does not currently use the result type in the language specification.
// However, the result type can be used to emit more optimal code for large multiplications by
// having Sema perform a coercion before the multiplication operation.
+ const lhs_node, const rhs_node = tree.nodeData(node).node_and_node;
const result = try gz.addPlNode(.array_mul, node, Zir.Inst.ArrayMul{
.res_ty = if (try ri.rl.resultType(gz, node)) |t| t else .none,
- .lhs = try expr(gz, scope, .{ .rl = .none }, node_datas[node].lhs),
- .rhs = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, node_datas[node].rhs, .array_mul_factor),
+ .lhs = try expr(gz, scope, .{ .rl = .none }, lhs_node),
+ .rhs = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, rhs_node, .array_mul_factor),
});
return rvalue(gz, ri, result, node);
},
@@ -797,8 +802,9 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
.merge_error_sets => .merge_error_sets,
else => unreachable,
};
- const lhs = try reachableTypeExpr(gz, scope, node_datas[node].lhs, node);
- const rhs = try reachableTypeExpr(gz, scope, node_datas[node].rhs, node);
+ const lhs_node, const rhs_node = tree.nodeData(node).node_and_node;
+ const lhs = try reachableTypeExpr(gz, scope, lhs_node, node);
+ const rhs = try reachableTypeExpr(gz, scope, rhs_node, node);
const result = try gz.addPlNode(inst_tag, node, Zir.Inst.Bin{ .lhs = lhs, .rhs = rhs });
return rvalue(gz, ri, result, node);
},
@@ -806,11 +812,11 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
.bool_and => return boolBinOp(gz, scope, ri, node, .bool_br_and),
.bool_or => return boolBinOp(gz, scope, ri, node, .bool_br_or),
- .bool_not => return simpleUnOp(gz, scope, ri, node, coerced_bool_ri, node_datas[node].lhs, .bool_not),
- .bit_not => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, node_datas[node].lhs, .bit_not),
+ .bool_not => return simpleUnOp(gz, scope, ri, node, coerced_bool_ri, tree.nodeData(node).node, .bool_not),
+ .bit_not => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, tree.nodeData(node).node, .bit_not),
.negation => return negation(gz, scope, ri, node),
- .negation_wrap => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, node_datas[node].lhs, .negate_wrap),
+ .negation_wrap => return simpleUnOp(gz, scope, ri, node, .{ .rl = .none }, tree.nodeData(node).node, .negate_wrap),
.identifier => return identifier(gz, scope, ri, node, null),
@@ -866,10 +872,11 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
const if_full = tree.fullIf(node).?;
no_switch_on_err: {
const error_token = if_full.error_token orelse break :no_switch_on_err;
- const full_switch = tree.fullSwitch(if_full.ast.else_expr) orelse break :no_switch_on_err;
+ const else_node = if_full.ast.else_expr.unwrap() orelse break :no_switch_on_err;
+ const full_switch = tree.fullSwitch(else_node) orelse break :no_switch_on_err;
if (full_switch.label_token != null) break :no_switch_on_err;
- if (node_tags[full_switch.ast.condition] != .identifier) break :no_switch_on_err;
- if (!mem.eql(u8, tree.tokenSlice(error_token), tree.tokenSlice(main_tokens[full_switch.ast.condition]))) break :no_switch_on_err;
+ if (tree.nodeTag(full_switch.ast.condition) != .identifier) break :no_switch_on_err;
+ if (!mem.eql(u8, tree.tokenSlice(error_token), tree.tokenSlice(tree.nodeMainToken(full_switch.ast.condition)))) break :no_switch_on_err;
return switchExprErrUnion(gz, scope, ri.br(), node, .@"if");
}
return ifExpr(gz, scope, ri.br(), node, if_full);
@@ -887,8 +894,8 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
.slice_sentinel,
=> {
const full = tree.fullSlice(node).?;
- if (full.ast.end != 0 and
- node_tags[full.ast.sliced] == .slice_open and
+ if (full.ast.end != .none and
+ tree.nodeTag(full.ast.sliced) == .slice_open and
nodeIsTriviallyZero(tree, full.ast.start))
{
const lhs_extra = tree.sliceOpen(full.ast.sliced).ast;
@@ -896,8 +903,8 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
const lhs = try expr(gz, scope, .{ .rl = .ref }, lhs_extra.sliced);
const start = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, lhs_extra.start);
const cursor = maybeAdvanceSourceCursorToMainToken(gz, node);
- const len = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, full.ast.end);
- const sentinel = if (full.ast.sentinel != 0) try expr(gz, scope, .{ .rl = .none }, full.ast.sentinel) else .none;
+ const len = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, full.ast.end.unwrap().?);
+ const sentinel = if (full.ast.sentinel.unwrap()) |sentinel| try expr(gz, scope, .{ .rl = .none }, sentinel) else .none;
try emitDbgStmt(gz, cursor);
const result = try gz.addPlNode(.slice_length, node, Zir.Inst.SliceLength{
.lhs = lhs,
@@ -912,10 +919,10 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
const cursor = maybeAdvanceSourceCursorToMainToken(gz, node);
const start = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, full.ast.start);
- const end = if (full.ast.end != 0) try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, full.ast.end) else .none;
- const sentinel = if (full.ast.sentinel != 0) s: {
+ const end = if (full.ast.end.unwrap()) |end| try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, end) else .none;
+ const sentinel = if (full.ast.sentinel.unwrap()) |sentinel| s: {
const sentinel_ty = try gz.addUnNode(.slice_sentinel_ty, lhs, node);
- break :s try expr(gz, scope, .{ .rl = .{ .coerced_ty = sentinel_ty } }, full.ast.sentinel);
+ break :s try expr(gz, scope, .{ .rl = .{ .coerced_ty = sentinel_ty } }, sentinel);
} else .none;
try emitDbgStmt(gz, cursor);
if (sentinel != .none) {
@@ -943,7 +950,7 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
},
.deref => {
- const lhs = try expr(gz, scope, .{ .rl = .none }, node_datas[node].lhs);
+ const lhs = try expr(gz, scope, .{ .rl = .none }, tree.nodeData(node).node);
_ = try gz.addUnNode(.validate_deref, lhs, node);
switch (ri.rl) {
.ref, .ref_coerced_ty => return lhs,
@@ -958,17 +965,17 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
_ = try gz.addUnTok(.validate_ref_ty, res_ty_inst, tree.firstToken(node));
break :rl .{ .ref_coerced_ty = res_ty_inst };
} else .ref;
- const result = try expr(gz, scope, .{ .rl = operand_rl }, node_datas[node].lhs);
+ const result = try expr(gz, scope, .{ .rl = operand_rl }, tree.nodeData(node).node);
return rvalue(gz, ri, result, node);
},
.optional_type => {
- const operand = try typeExpr(gz, scope, node_datas[node].lhs);
+ const operand = try typeExpr(gz, scope, tree.nodeData(node).node);
const result = try gz.addUnNode(.optional_type, operand, node);
return rvalue(gz, ri, result, node);
},
.unwrap_optional => switch (ri.rl) {
.ref, .ref_coerced_ty => {
- const lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs);
+ const lhs = try expr(gz, scope, .{ .rl = .ref }, tree.nodeData(node).node_and_token[0]);
const cursor = maybeAdvanceSourceCursorToMainToken(gz, node);
try emitDbgStmt(gz, cursor);
@@ -976,7 +983,7 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
return gz.addUnNode(.optional_payload_safe_ptr, lhs, node);
},
else => {
- const lhs = try expr(gz, scope, .{ .rl = .none }, node_datas[node].lhs);
+ const lhs = try expr(gz, scope, .{ .rl = .none }, tree.nodeData(node).node_and_token[0]);
const cursor = maybeAdvanceSourceCursorToMainToken(gz, node);
try emitDbgStmt(gz, cursor);
@@ -994,7 +1001,7 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
return blockExpr(gz, scope, ri, node, statements, .normal);
},
.enum_literal => if (try ri.rl.resultType(gz, node)) |res_ty| {
- const str_index = try astgen.identAsString(main_tokens[node]);
+ const str_index = try astgen.identAsString(tree.nodeMainToken(node));
const res = try gz.addPlNode(.decl_literal, node, Zir.Inst.Field{
.lhs = res_ty,
.field_name_start = str_index,
@@ -1004,8 +1011,8 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
.ty, .coerced_ty => return res, // `decl_literal` does the coercion for us
.ref_coerced_ty, .ptr, .inferred_ptr, .destructure => return rvalue(gz, ri, res, node),
}
- } else return simpleStrTok(gz, ri, main_tokens[node], node, .enum_literal),
- .error_value => return simpleStrTok(gz, ri, node_datas[node].rhs, node, .error_value),
+ } else return simpleStrTok(gz, ri, tree.nodeMainToken(node), node, .enum_literal),
+ .error_value => return simpleStrTok(gz, ri, tree.nodeData(node).opt_token_and_opt_token[1].unwrap().?, node, .error_value),
// TODO restore this when implementing https://github.com/ziglang/zig/issues/6025
// .anyframe_literal => return rvalue(gz, ri, .anyframe_type, node),
.anyframe_literal => {
@@ -1013,22 +1020,22 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
return rvalue(gz, ri, result, node);
},
.anyframe_type => {
- const return_type = try typeExpr(gz, scope, node_datas[node].rhs);
+ const return_type = try typeExpr(gz, scope, tree.nodeData(node).token_and_node[1]);
const result = try gz.addUnNode(.anyframe_type, return_type, node);
return rvalue(gz, ri, result, node);
},
.@"catch" => {
- const catch_token = main_tokens[node];
- const payload_token: ?Ast.TokenIndex = if (token_tags[catch_token + 1] == .pipe)
+ const catch_token = tree.nodeMainToken(node);
+ const payload_token: ?Ast.TokenIndex = if (tree.tokenTag(catch_token + 1) == .pipe)
catch_token + 2
else
null;
no_switch_on_err: {
const capture_token = payload_token orelse break :no_switch_on_err;
- const full_switch = tree.fullSwitch(node_datas[node].rhs) orelse break :no_switch_on_err;
+ const full_switch = tree.fullSwitch(tree.nodeData(node).node_and_node[1]) orelse break :no_switch_on_err;
if (full_switch.label_token != null) break :no_switch_on_err;
- if (node_tags[full_switch.ast.condition] != .identifier) break :no_switch_on_err;
- if (!mem.eql(u8, tree.tokenSlice(capture_token), tree.tokenSlice(main_tokens[full_switch.ast.condition]))) break :no_switch_on_err;
+ if (tree.nodeTag(full_switch.ast.condition) != .identifier) break :no_switch_on_err;
+ if (!mem.eql(u8, tree.tokenSlice(capture_token), tree.tokenSlice(tree.nodeMainToken(full_switch.ast.condition)))) break :no_switch_on_err;
return switchExprErrUnion(gz, scope, ri.br(), node, .@"catch");
}
switch (ri.rl) {
@@ -1037,11 +1044,9 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
scope,
ri,
node,
- node_datas[node].lhs,
.is_non_err_ptr,
.err_union_payload_unsafe_ptr,
.err_union_code_ptr,
- node_datas[node].rhs,
payload_token,
),
else => return orelseCatchExpr(
@@ -1049,11 +1054,9 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
scope,
ri,
node,
- node_datas[node].lhs,
.is_non_err,
.err_union_payload_unsafe,
.err_union_code,
- node_datas[node].rhs,
payload_token,
),
}
@@ -1064,11 +1067,9 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
scope,
ri,
node,
- node_datas[node].lhs,
.is_non_null_ptr,
.optional_payload_unsafe_ptr,
undefined,
- node_datas[node].rhs,
null,
),
else => return orelseCatchExpr(
@@ -1076,11 +1077,9 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
scope,
ri,
node,
- node_datas[node].lhs,
.is_non_null,
.optional_payload_unsafe,
undefined,
- node_datas[node].rhs,
null,
),
},
@@ -1110,7 +1109,7 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
.@"break" => return breakExpr(gz, scope, node),
.@"continue" => return continueExpr(gz, scope, node),
- .grouped_expression => return expr(gz, scope, ri, node_datas[node].lhs),
+ .grouped_expression => return expr(gz, scope, ri, tree.nodeData(node).node_and_token[0]),
.array_type => return arrayType(gz, scope, ri, node),
.array_type_sentinel => return arrayTypeSentinel(gz, scope, ri, node),
.char_literal => return charLiteral(gz, ri, node),
@@ -1124,7 +1123,7 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
.@"await" => return awaitExpr(gz, scope, ri, node),
.@"resume" => return resumeExpr(gz, scope, ri, node),
- .@"try" => return tryExpr(gz, scope, ri, node, node_datas[node].lhs),
+ .@"try" => return tryExpr(gz, scope, ri, node, tree.nodeData(node).node),
.array_init_one,
.array_init_one_comma,
@@ -1171,16 +1170,14 @@ fn nosuspendExpr(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const body_node = node_datas[node].lhs;
- assert(body_node != 0);
- if (gz.nosuspend_node != 0) {
+ const body_node = tree.nodeData(node).node;
+ if (gz.nosuspend_node.unwrap()) |nosuspend_node| {
try astgen.appendErrorNodeNotes(node, "redundant nosuspend block", .{}, &[_]u32{
- try astgen.errNoteNode(gz.nosuspend_node, "other nosuspend block here", .{}),
+ try astgen.errNoteNode(nosuspend_node, "other nosuspend block here", .{}),
});
}
- gz.nosuspend_node = node;
- defer gz.nosuspend_node = 0;
+ gz.nosuspend_node = node.toOptional();
+ defer gz.nosuspend_node = .none;
return expr(gz, scope, ri, body_node);
}
@@ -1192,26 +1189,24 @@ fn suspendExpr(
const astgen = gz.astgen;
const gpa = astgen.gpa;
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const body_node = node_datas[node].lhs;
+ const body_node = tree.nodeData(node).node;
- if (gz.nosuspend_node != 0) {
+ if (gz.nosuspend_node.unwrap()) |nosuspend_node| {
return astgen.failNodeNotes(node, "suspend inside nosuspend block", .{}, &[_]u32{
- try astgen.errNoteNode(gz.nosuspend_node, "nosuspend block here", .{}),
+ try astgen.errNoteNode(nosuspend_node, "nosuspend block here", .{}),
});
}
- if (gz.suspend_node != 0) {
+ if (gz.suspend_node.unwrap()) |suspend_node| {
return astgen.failNodeNotes(node, "cannot suspend inside suspend block", .{}, &[_]u32{
- try astgen.errNoteNode(gz.suspend_node, "other suspend block here", .{}),
+ try astgen.errNoteNode(suspend_node, "other suspend block here", .{}),
});
}
- assert(body_node != 0);
const suspend_inst = try gz.makeBlockInst(.suspend_block, node);
try gz.instructions.append(gpa, suspend_inst);
var suspend_scope = gz.makeSubBlock(scope);
- suspend_scope.suspend_node = node;
+ suspend_scope.suspend_node = node.toOptional();
defer suspend_scope.unstack();
const body_result = try fullBodyExpr(&suspend_scope, &suspend_scope.base, .{ .rl = .none }, body_node, .normal);
@@ -1231,16 +1226,15 @@ fn awaitExpr(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const rhs_node = node_datas[node].lhs;
+ const rhs_node = tree.nodeData(node).node;
- if (gz.suspend_node != 0) {
+ if (gz.suspend_node.unwrap()) |suspend_node| {
return astgen.failNodeNotes(node, "cannot await inside suspend block", .{}, &[_]u32{
- try astgen.errNoteNode(gz.suspend_node, "suspend block here", .{}),
+ try astgen.errNoteNode(suspend_node, "suspend block here", .{}),
});
}
const operand = try expr(gz, scope, .{ .rl = .ref }, rhs_node);
- const result = if (gz.nosuspend_node != 0)
+ const result = if (gz.nosuspend_node != .none)
try gz.addExtendedPayload(.await_nosuspend, Zir.Inst.UnNode{
.node = gz.nodeIndexToRelative(node),
.operand = operand,
@@ -1259,8 +1253,7 @@ fn resumeExpr(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const rhs_node = node_datas[node].lhs;
+ const rhs_node = tree.nodeData(node).node;
const operand = try expr(gz, scope, .{ .rl = .ref }, rhs_node);
const result = try gz.addUnNode(.@"resume", operand, node);
return rvalue(gz, ri, result, node);
@@ -1275,33 +1268,33 @@ fn fnProtoExpr(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const token_tags = tree.tokens.items(.tag);
if (fn_proto.name_token) |some| {
return astgen.failTok(some, "function type cannot have a name", .{});
}
- if (fn_proto.ast.align_expr != 0) {
- return astgen.failNode(fn_proto.ast.align_expr, "function type cannot have an alignment", .{});
+ if (fn_proto.ast.align_expr.unwrap()) |align_expr| {
+ return astgen.failNode(align_expr, "function type cannot have an alignment", .{});
}
- if (fn_proto.ast.addrspace_expr != 0) {
- return astgen.failNode(fn_proto.ast.addrspace_expr, "function type cannot have an addrspace", .{});
+ if (fn_proto.ast.addrspace_expr.unwrap()) |addrspace_expr| {
+ return astgen.failNode(addrspace_expr, "function type cannot have an addrspace", .{});
}
- if (fn_proto.ast.section_expr != 0) {
- return astgen.failNode(fn_proto.ast.section_expr, "function type cannot have a linksection", .{});
+ if (fn_proto.ast.section_expr.unwrap()) |section_expr| {
+ return astgen.failNode(section_expr, "function type cannot have a linksection", .{});
}
- const maybe_bang = tree.firstToken(fn_proto.ast.return_type) - 1;
- const is_inferred_error = token_tags[maybe_bang] == .bang;
+ const return_type = fn_proto.ast.return_type.unwrap().?;
+ const maybe_bang = tree.firstToken(return_type) - 1;
+ const is_inferred_error = tree.tokenTag(maybe_bang) == .bang;
if (is_inferred_error) {
return astgen.failTok(maybe_bang, "function type cannot have an inferred error set", .{});
}
const is_extern = blk: {
const maybe_extern_token = fn_proto.extern_export_inline_token orelse break :blk false;
- break :blk token_tags[maybe_extern_token] == .keyword_extern;
+ break :blk tree.tokenTag(maybe_extern_token) == .keyword_extern;
};
assert(!is_extern);
@@ -1318,7 +1311,6 @@ fn fnProtoExprInner(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const token_tags = tree.tokens.items(.tag);
var block_scope = gz.makeSubBlock(scope);
defer block_scope.unstack();
@@ -1330,7 +1322,7 @@ fn fnProtoExprInner(
var param_type_i: usize = 0;
var it = fn_proto.iterate(tree);
while (it.next()) |param| : (param_type_i += 1) {
- const is_comptime = if (param.comptime_noalias) |token| switch (token_tags[token]) {
+ const is_comptime = if (param.comptime_noalias) |token| switch (tree.tokenTag(token)) {
.keyword_noalias => is_comptime: {
noalias_bits |= @as(u32, 1) << (std.math.cast(u5, param_type_i) orelse
return astgen.failTok(token, "this compiler implementation only supports 'noalias' on the first 32 parameters", .{}));
@@ -1341,7 +1333,7 @@ fn fnProtoExprInner(
} else false;
const is_anytype = if (param.anytype_ellipsis3) |token| blk: {
- switch (token_tags[token]) {
+ switch (tree.tokenTag(token)) {
.keyword_anytype => break :blk true,
.ellipsis3 => break :is_var_args true,
else => unreachable,
@@ -1364,16 +1356,14 @@ fn fnProtoExprInner(
.param_anytype;
_ = try block_scope.addStrTok(tag, param_name, name_token);
} else {
- const param_type_node = param.type_expr;
- assert(param_type_node != 0);
+ const param_type_node = param.type_expr.?;
var param_gz = block_scope.makeSubBlock(scope);
defer param_gz.unstack();
param_gz.is_comptime = true;
const param_type = try fullBodyExpr(¶m_gz, scope, coerced_type_ri, param_type_node, .normal);
const param_inst_expected: Zir.Inst.Index = @enumFromInt(astgen.instructions.len + 1);
_ = try param_gz.addBreakWithSrcNode(.break_inline, param_inst_expected, param_type, param_type_node);
- const main_tokens = tree.nodes.items(.main_token);
- const name_token = param.name_token orelse main_tokens[param_type_node];
+ const name_token = param.name_token orelse tree.nodeMainToken(param_type_node);
const tag: Zir.Inst.Tag = if (is_comptime) .param_comptime else .param;
// We pass `prev_param_insts` as `&.{}` here because a function prototype can't refer to previous
// arguments (we haven't set up scopes here).
@@ -1384,12 +1374,12 @@ fn fnProtoExprInner(
break :is_var_args false;
};
- const cc: Zir.Inst.Ref = if (fn_proto.ast.callconv_expr != 0)
+ const cc: Zir.Inst.Ref = if (fn_proto.ast.callconv_expr.unwrap()) |callconv_expr|
try comptimeExpr(
&block_scope,
scope,
- .{ .rl = .{ .coerced_ty = try block_scope.addBuiltinValue(fn_proto.ast.callconv_expr, .calling_convention) } },
- fn_proto.ast.callconv_expr,
+ .{ .rl = .{ .coerced_ty = try block_scope.addBuiltinValue(callconv_expr, .calling_convention) } },
+ callconv_expr,
.@"callconv",
)
else if (implicit_ccc)
@@ -1397,7 +1387,8 @@ fn fnProtoExprInner(
else
.none;
- const ret_ty = try comptimeExpr(&block_scope, scope, coerced_type_ri, fn_proto.ast.return_type, .function_ret_ty);
+ const ret_ty_node = fn_proto.ast.return_type.unwrap().?;
+ const ret_ty = try comptimeExpr(&block_scope, scope, coerced_type_ri, ret_ty_node, .function_ret_ty);
const result = try block_scope.addFunc(.{
.src_node = fn_proto.ast.proto_node,
@@ -1437,33 +1428,32 @@ fn arrayInitExpr(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const node_tags = tree.nodes.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
assert(array_init.ast.elements.len != 0); // Otherwise it would be struct init.
const array_ty: Zir.Inst.Ref, const elem_ty: Zir.Inst.Ref = inst: {
- if (array_init.ast.type_expr == 0) break :inst .{ .none, .none };
+ const type_expr = array_init.ast.type_expr.unwrap() orelse break :inst .{ .none, .none };
infer: {
- const array_type: Ast.full.ArrayType = tree.fullArrayType(array_init.ast.type_expr) orelse break :infer;
+ const array_type: Ast.full.ArrayType = tree.fullArrayType(type_expr) orelse break :infer;
// This intentionally does not support `@"_"` syntax.
- if (node_tags[array_type.ast.elem_count] == .identifier and
- mem.eql(u8, tree.tokenSlice(main_tokens[array_type.ast.elem_count]), "_"))
+ if (tree.nodeTag(array_type.ast.elem_count) == .identifier and
+ mem.eql(u8, tree.tokenSlice(tree.nodeMainToken(array_type.ast.elem_count)), "_"))
{
const len_inst = try gz.addInt(array_init.ast.elements.len);
const elem_type = try typeExpr(gz, scope, array_type.ast.elem_type);
- if (array_type.ast.sentinel == 0) {
- const array_type_inst = try gz.addPlNode(.array_type, array_init.ast.type_expr, Zir.Inst.Bin{
+ if (array_type.ast.sentinel == .none) {
+ const array_type_inst = try gz.addPlNode(.array_type, type_expr, Zir.Inst.Bin{
.lhs = len_inst,
.rhs = elem_type,
});
break :inst .{ array_type_inst, elem_type };
} else {
- const sentinel = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = elem_type } }, array_type.ast.sentinel, .array_sentinel);
+ const sentinel_node = array_type.ast.sentinel.unwrap().?;
+ const sentinel = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = elem_type } }, sentinel_node, .array_sentinel);
const array_type_inst = try gz.addPlNode(
.array_type_sentinel,
- array_init.ast.type_expr,
+ type_expr,
Zir.Inst.ArrayTypeSentinel{
.len = len_inst,
.elem_type = elem_type,
@@ -1474,7 +1464,7 @@ fn arrayInitExpr(
}
}
}
- const array_type_inst = try typeExpr(gz, scope, array_init.ast.type_expr);
+ const array_type_inst = try typeExpr(gz, scope, type_expr);
_ = try gz.addPlNode(.validate_array_init_ty, node, Zir.Inst.ArrayInit{
.ty = array_type_inst,
.init_count = @intCast(array_init.ast.elements.len),
@@ -1682,7 +1672,7 @@ fn structInitExpr(
const astgen = gz.astgen;
const tree = astgen.tree;
- if (struct_init.ast.type_expr == 0) {
+ if (struct_init.ast.type_expr == .none) {
if (struct_init.ast.fields.len == 0) {
// Anonymous init with no fields.
switch (ri.rl) {
@@ -1706,32 +1696,32 @@ fn structInitExpr(
}
}
} else array: {
- const node_tags = tree.nodes.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
- const array_type: Ast.full.ArrayType = tree.fullArrayType(struct_init.ast.type_expr) orelse {
+ const type_expr = struct_init.ast.type_expr.unwrap().?;
+ const array_type: Ast.full.ArrayType = tree.fullArrayType(type_expr) orelse {
if (struct_init.ast.fields.len == 0) {
- const ty_inst = try typeExpr(gz, scope, struct_init.ast.type_expr);
+ const ty_inst = try typeExpr(gz, scope, type_expr);
const result = try gz.addUnNode(.struct_init_empty, ty_inst, node);
return rvalue(gz, ri, result, node);
}
break :array;
};
- const is_inferred_array_len = node_tags[array_type.ast.elem_count] == .identifier and
+ const is_inferred_array_len = tree.nodeTag(array_type.ast.elem_count) == .identifier and
// This intentionally does not support `@"_"` syntax.
- mem.eql(u8, tree.tokenSlice(main_tokens[array_type.ast.elem_count]), "_");
+ mem.eql(u8, tree.tokenSlice(tree.nodeMainToken(array_type.ast.elem_count)), "_");
if (struct_init.ast.fields.len == 0) {
if (is_inferred_array_len) {
const elem_type = try typeExpr(gz, scope, array_type.ast.elem_type);
- const array_type_inst = if (array_type.ast.sentinel == 0) blk: {
- break :blk try gz.addPlNode(.array_type, struct_init.ast.type_expr, Zir.Inst.Bin{
+ const array_type_inst = if (array_type.ast.sentinel == .none) blk: {
+ break :blk try gz.addPlNode(.array_type, type_expr, Zir.Inst.Bin{
.lhs = .zero_usize,
.rhs = elem_type,
});
} else blk: {
- const sentinel = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = elem_type } }, array_type.ast.sentinel, .array_sentinel);
+ const sentinel_node = array_type.ast.sentinel.unwrap().?;
+ const sentinel = try comptimeExpr(gz, scope, .{ .rl = .{ .ty = elem_type } }, sentinel_node, .array_sentinel);
break :blk try gz.addPlNode(
.array_type_sentinel,
- struct_init.ast.type_expr,
+ type_expr,
Zir.Inst.ArrayTypeSentinel{
.len = .zero_usize,
.elem_type = elem_type,
@@ -1742,12 +1732,12 @@ fn structInitExpr(
const result = try gz.addUnNode(.struct_init_empty, array_type_inst, node);
return rvalue(gz, ri, result, node);
}
- const ty_inst = try typeExpr(gz, scope, struct_init.ast.type_expr);
+ const ty_inst = try typeExpr(gz, scope, type_expr);
const result = try gz.addUnNode(.struct_init_empty, ty_inst, node);
return rvalue(gz, ri, result, node);
} else {
return astgen.failNode(
- struct_init.ast.type_expr,
+ type_expr,
"initializing array with struct syntax",
.{},
);
@@ -1806,9 +1796,9 @@ fn structInitExpr(
}
}
- if (struct_init.ast.type_expr != 0) {
+ if (struct_init.ast.type_expr.unwrap()) |type_expr| {
// Typed inits do not use RLS for language simplicity.
- const ty_inst = try typeExpr(gz, scope, struct_init.ast.type_expr);
+ const ty_inst = try typeExpr(gz, scope, type_expr);
_ = try gz.addUnNode(.validate_struct_init_ty, ty_inst, node);
switch (ri.rl) {
.ref => return structInitExprTyped(gz, scope, node, struct_init, ty_inst, true),
@@ -1997,9 +1987,7 @@ fn comptimeExpr2(
// no need to wrap it in a block. This is hard to determine in general, but we can identify a
// common subset of trivially comptime expressions to take down the size of the ZIR a bit.
const tree = gz.astgen.tree;
- const main_tokens = tree.nodes.items(.main_token);
- const node_tags = tree.nodes.items(.tag);
- switch (node_tags[node]) {
+ switch (tree.nodeTag(node)) {
.identifier => {
// Many identifiers can be handled without a `block_comptime`, so `AstGen.identifier` has
// special handling for this case.
@@ -2052,8 +2040,7 @@ fn comptimeExpr2(
// comptime block, because that would be silly! Note that we don't bother doing this for
// unlabelled blocks, since they don't generate blocks at comptime anyway (see `blockExpr`).
.block_two, .block_two_semicolon, .block, .block_semicolon => {
- const token_tags = tree.tokens.items(.tag);
- const lbrace = main_tokens[node];
+ const lbrace = tree.nodeMainToken(node);
// Careful! We can't pass in the real result location here, since it may
// refer to runtime memory. A runtime-to-comptime boundary has to remove
// result location information, compute the result, and copy it to the true
@@ -2065,11 +2052,10 @@ fn comptimeExpr2(
else
.none,
};
- if (token_tags[lbrace - 1] == .colon and
- token_tags[lbrace - 2] == .identifier)
- {
+ if (tree.isTokenPrecededByTags(lbrace, &.{ .identifier, .colon })) {
var buf: [2]Ast.Node.Index = undefined;
const stmts = tree.blockStatements(&buf, node).?;
+
// Replace result location and copy back later - see above.
const block_ref = try labeledBlockExpr(gz, scope, ty_only_ri, node, stmts, true, .normal);
return rvalue(gz, ri, block_ref, node);
@@ -2117,8 +2103,7 @@ fn comptimeExprAst(
try astgen.appendErrorNode(node, "redundant comptime keyword in already comptime scope", .{});
}
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const body_node = node_datas[node].lhs;
+ const body_node = tree.nodeData(node).node;
return comptimeExpr2(gz, scope, ri, body_node, node, .comptime_keyword);
}
@@ -2156,9 +2141,7 @@ fn restoreErrRetIndex(
fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
const astgen = parent_gz.astgen;
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const break_label = node_datas[node].lhs;
- const rhs = node_datas[node].rhs;
+ const opt_break_label, const opt_rhs = tree.nodeData(node).opt_token_and_opt_node;
// Look for the label in the scope.
var scope = parent_scope;
@@ -2167,11 +2150,11 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn
.gen_zir => {
const block_gz = scope.cast(GenZir).?;
- if (block_gz.cur_defer_node != 0) {
+ if (block_gz.cur_defer_node.unwrap()) |cur_defer_node| {
// We are breaking out of a `defer` block.
return astgen.failNodeNotes(node, "cannot break out of defer expression", .{}, &.{
try astgen.errNoteNode(
- block_gz.cur_defer_node,
+ cur_defer_node,
"defer expression here",
.{},
),
@@ -2179,7 +2162,7 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn
}
const block_inst = blk: {
- if (break_label != 0) {
+ if (opt_break_label.unwrap()) |break_label| {
if (block_gz.label) |*label| {
if (try astgen.tokenIdentEql(label.token, break_label)) {
label.used = true;
@@ -2200,7 +2183,7 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn
else
.@"break";
- if (rhs == 0) {
+ const rhs = opt_rhs.unwrap() orelse {
_ = try rvalue(parent_gz, block_gz.break_result_info, .void_value, node);
try genDefers(parent_gz, scope, parent_scope, .normal_only);
@@ -2211,7 +2194,7 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn
_ = try parent_gz.addBreak(break_tag, block_inst, .void_value);
return Zir.Inst.Ref.unreachable_value;
- }
+ };
const operand = try reachableExpr(parent_gz, parent_scope, block_gz.break_result_info, rhs, node);
@@ -2243,7 +2226,7 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn
.top => unreachable,
}
}
- if (break_label != 0) {
+ if (opt_break_label.unwrap()) |break_label| {
const label_name = try astgen.identifierTokenString(break_label);
return astgen.failTok(break_label, "label not found: '{s}'", .{label_name});
} else {
@@ -2254,11 +2237,9 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn
fn continueExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
const astgen = parent_gz.astgen;
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const break_label = node_datas[node].lhs;
- const rhs = node_datas[node].rhs;
+ const opt_break_label, const opt_rhs = tree.nodeData(node).opt_token_and_opt_node;
- if (break_label == 0 and rhs != 0) {
+ if (opt_break_label == .none and opt_rhs != .none) {
return astgen.failNode(node, "cannot continue with operand without label", .{});
}
@@ -2269,10 +2250,10 @@ fn continueExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index)
.gen_zir => {
const gen_zir = scope.cast(GenZir).?;
- if (gen_zir.cur_defer_node != 0) {
+ if (gen_zir.cur_defer_node.unwrap()) |cur_defer_node| {
return astgen.failNodeNotes(node, "cannot continue out of defer expression", .{}, &.{
try astgen.errNoteNode(
- gen_zir.cur_defer_node,
+ cur_defer_node,
"defer expression here",
.{},
),
@@ -2282,11 +2263,11 @@ fn continueExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index)
scope = gen_zir.parent;
continue;
};
- if (break_label != 0) blk: {
+ if (opt_break_label.unwrap()) |break_label| blk: {
if (gen_zir.label) |*label| {
if (try astgen.tokenIdentEql(label.token, break_label)) {
const maybe_switch_tag = astgen.instructions.items(.tag)[@intFromEnum(label.block_inst)];
- if (rhs != 0) switch (maybe_switch_tag) {
+ if (opt_rhs != .none) switch (maybe_switch_tag) {
.switch_block, .switch_block_ref => {},
else => return astgen.failNode(node, "cannot continue loop with operand", .{}),
} else switch (maybe_switch_tag) {
@@ -2314,7 +2295,7 @@ fn continueExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index)
}
}
- if (rhs != 0) {
+ if (opt_rhs.unwrap()) |rhs| {
// We need to figure out the result info to use.
// The type should match
const operand = try reachableExpr(parent_gz, parent_scope, gen_zir.continue_result_info, rhs, node);
@@ -2353,7 +2334,7 @@ fn continueExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index)
.top => unreachable,
}
}
- if (break_label != 0) {
+ if (opt_break_label.unwrap()) |break_label| {
const label_name = try astgen.identifierTokenString(break_label);
return astgen.failTok(break_label, "label not found: '{s}'", .{label_name});
} else {
@@ -2373,15 +2354,14 @@ fn fullBodyExpr(
block_kind: BlockKind,
) InnerError!Zir.Inst.Ref {
const tree = gz.astgen.tree;
- const main_tokens = tree.nodes.items(.main_token);
- const token_tags = tree.tokens.items(.tag);
+
var stmt_buf: [2]Ast.Node.Index = undefined;
- const statements = tree.blockStatements(&stmt_buf, node).?;
+ const statements = tree.blockStatements(&stmt_buf, node) orelse
+ return expr(gz, scope, ri, node);
- const lbrace = main_tokens[node];
- if (token_tags[lbrace - 1] == .colon and
- token_tags[lbrace - 2] == .identifier)
- {
+ const lbrace = tree.nodeMainToken(node);
+
+ if (tree.isTokenPrecededByTags(lbrace, &.{ .identifier, .colon })) {
// Labeled blocks are tricky - forwarding result location information properly is non-trivial,
// plus if this block is exited with a `break_inline` we aren't allowed multiple breaks. This
// case is rare, so just treat it as a normal expression and create a nested block.
@@ -2406,13 +2386,9 @@ fn blockExpr(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const main_tokens = tree.nodes.items(.main_token);
- const token_tags = tree.tokens.items(.tag);
- const lbrace = main_tokens[block_node];
- if (token_tags[lbrace - 1] == .colon and
- token_tags[lbrace - 2] == .identifier)
- {
+ const lbrace = tree.nodeMainToken(block_node);
+ if (tree.isTokenPrecededByTags(lbrace, &.{ .identifier, .colon })) {
return labeledBlockExpr(gz, scope, ri, block_node, statements, false, kind);
}
@@ -2489,12 +2465,10 @@ fn labeledBlockExpr(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const main_tokens = tree.nodes.items(.main_token);
- const token_tags = tree.tokens.items(.tag);
- const lbrace = main_tokens[block_node];
+ const lbrace = tree.nodeMainToken(block_node);
const label_token = lbrace - 2;
- assert(token_tags[label_token] == .identifier);
+ assert(tree.tokenTag(label_token) == .identifier);
try astgen.checkLabelRedefinition(parent_scope, label_token);
@@ -2555,8 +2529,6 @@ fn labeledBlockExpr(
fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const Ast.Node.Index, block_kind: BlockKind) !void {
const astgen = gz.astgen;
const tree = astgen.tree;
- const node_tags = tree.nodes.items(.tag);
- const node_data = tree.nodes.items(.data);
if (statements.len == 0) return;
@@ -2564,17 +2536,17 @@ fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const Ast.Nod
defer block_arena.deinit();
const block_arena_allocator = block_arena.allocator();
- var noreturn_src_node: Ast.Node.Index = 0;
+ var noreturn_src_node: Ast.Node.OptionalIndex = .none;
var scope = parent_scope;
for (statements, 0..) |statement, stmt_idx| {
- if (noreturn_src_node != 0) {
+ if (noreturn_src_node.unwrap()) |src_node| {
try astgen.appendErrorNodeNotes(
statement,
"unreachable code",
.{},
&[_]u32{
try astgen.errNoteNode(
- noreturn_src_node,
+ src_node,
"control flow is diverted here",
.{},
),
@@ -2587,7 +2559,7 @@ fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const Ast.Nod
};
var inner_node = statement;
while (true) {
- switch (node_tags[inner_node]) {
+ switch (tree.nodeTag(inner_node)) {
// zig fmt: off
.global_var_decl,
.local_var_decl,
@@ -2617,7 +2589,7 @@ fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const Ast.Nod
.assign_mul_wrap => try assignOp(gz, scope, statement, .mulwrap),
.grouped_expression => {
- inner_node = node_data[statement].lhs;
+ inner_node = tree.nodeData(statement).node_and_token[0];
continue;
},
@@ -2649,15 +2621,15 @@ fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const Ast.Nod
}
}
- if (noreturn_src_node == 0) {
+ if (noreturn_src_node == .none) {
try genDefers(gz, parent_scope, scope, .normal_only);
}
try checkUsed(gz, parent_scope, scope);
}
/// Returns AST source node of the thing that is noreturn if the statement is
-/// definitely `noreturn`. Otherwise returns 0.
-fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: Ast.Node.Index) InnerError!Ast.Node.Index {
+/// definitely `noreturn`. Otherwise returns .none.
+fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: Ast.Node.Index) InnerError!Ast.Node.OptionalIndex {
try emitDbgNode(gz, statement);
// We need to emit an error if the result is not `noreturn` or `void`, but
// we want to avoid adding the ZIR instruction if possible for performance.
@@ -2665,8 +2637,8 @@ fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: Ast.Node.Index) Inner
return addEnsureResult(gz, maybe_unused_result, statement);
}
-fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: Ast.Node.Index) InnerError!Ast.Node.Index {
- var noreturn_src_node: Ast.Node.Index = 0;
+fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: Ast.Node.Index) InnerError!Ast.Node.OptionalIndex {
+ var noreturn_src_node: Ast.Node.OptionalIndex = .none;
const elide_check = if (maybe_unused_result.toIndex()) |inst| b: {
// Note that this array becomes invalid after appending more items to it
// in the above while loop.
@@ -2927,7 +2899,7 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.check_comptime_control_flow,
.switch_continue,
=> {
- noreturn_src_node = statement;
+ noreturn_src_node = statement.toOptional();
break :b true;
},
@@ -2969,7 +2941,7 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.none => unreachable,
.unreachable_value => b: {
- noreturn_src_node = statement;
+ noreturn_src_node = statement.toOptional();
break :b true;
},
@@ -3098,23 +3070,23 @@ fn checkUsed(gz: *GenZir, outer_scope: *Scope, inner_scope: *Scope) InnerError!v
.gen_zir => scope = scope.cast(GenZir).?.parent,
.local_val => {
const s = scope.cast(Scope.LocalVal).?;
- if (s.used == 0 and s.discarded == 0) {
+ if (s.used == .none and s.discarded == .none) {
try astgen.appendErrorTok(s.token_src, "unused {s}", .{@tagName(s.id_cat)});
- } else if (s.used != 0 and s.discarded != 0) {
- try astgen.appendErrorTokNotes(s.discarded, "pointless discard of {s}", .{@tagName(s.id_cat)}, &[_]u32{
- try gz.astgen.errNoteTok(s.used, "used here", .{}),
+ } else if (s.used != .none and s.discarded != .none) {
+ try astgen.appendErrorTokNotes(s.discarded.unwrap().?, "pointless discard of {s}", .{@tagName(s.id_cat)}, &[_]u32{
+ try gz.astgen.errNoteTok(s.used.unwrap().?, "used here", .{}),
});
}
scope = s.parent;
},
.local_ptr => {
const s = scope.cast(Scope.LocalPtr).?;
- if (s.used == 0 and s.discarded == 0) {
+ if (s.used == .none and s.discarded == .none) {
try astgen.appendErrorTok(s.token_src, "unused {s}", .{@tagName(s.id_cat)});
} else {
- if (s.used != 0 and s.discarded != 0) {
- try astgen.appendErrorTokNotes(s.discarded, "pointless discard of {s}", .{@tagName(s.id_cat)}, &[_]u32{
- try astgen.errNoteTok(s.used, "used here", .{}),
+ if (s.used != .none and s.discarded != .none) {
+ try astgen.appendErrorTokNotes(s.discarded.unwrap().?, "pointless discard of {s}", .{@tagName(s.id_cat)}, &[_]u32{
+ try astgen.errNoteTok(s.used.unwrap().?, "used here", .{}),
});
}
if (s.id_cat == .@"local variable" and !s.used_as_lvalue) {
@@ -3141,19 +3113,15 @@ fn deferStmt(
scope_tag: Scope.Tag,
) InnerError!*Scope {
var defer_gen = gz.makeSubBlock(scope);
- defer_gen.cur_defer_node = node;
- defer_gen.any_defer_node = node;
+ defer_gen.cur_defer_node = node.toOptional();
+ defer_gen.any_defer_node = node.toOptional();
defer defer_gen.unstack();
const tree = gz.astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const expr_node = node_datas[node].rhs;
-
- const payload_token = node_datas[node].lhs;
var local_val_scope: Scope.LocalVal = undefined;
var opt_remapped_err_code: Zir.Inst.OptionalIndex = .none;
- const have_err_code = scope_tag == .defer_error and payload_token != 0;
- const sub_scope = if (!have_err_code) &defer_gen.base else blk: {
+ const sub_scope = if (scope_tag != .defer_error) &defer_gen.base else blk: {
+ const payload_token = tree.nodeData(node).opt_token_and_node[0].unwrap() orelse break :blk &defer_gen.base;
const ident_name = try gz.astgen.identAsString(payload_token);
if (std.mem.eql(u8, tree.tokenSlice(payload_token), "_")) {
try gz.astgen.appendErrorTok(payload_token, "discard of error capture; omit it instead", .{});
@@ -3181,6 +3149,11 @@ fn deferStmt(
try gz.addDbgVar(.dbg_var_val, ident_name, remapped_err_code_ref);
break :blk &local_val_scope.base;
};
+ const expr_node = switch (scope_tag) {
+ .defer_normal => tree.nodeData(node).node,
+ .defer_error => tree.nodeData(node).opt_token_and_node[1],
+ else => unreachable,
+ };
_ = try unusedResultExpr(&defer_gen, sub_scope, expr_node);
try checkUsed(gz, scope, sub_scope);
_ = try defer_gen.addBreak(.break_inline, @enumFromInt(0), .void_value);
@@ -3215,8 +3188,6 @@ fn varDecl(
try emitDbgNode(gz, node);
const astgen = gz.astgen;
const tree = astgen.tree;
- const token_tags = tree.tokens.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
const name_token = var_decl.ast.mut_token + 1;
const ident_name_raw = tree.tokenSlice(name_token);
@@ -3230,27 +3201,27 @@ fn varDecl(
ident_name,
name_token,
ident_name_raw,
- if (token_tags[var_decl.ast.mut_token] == .keyword_const) .@"local constant" else .@"local variable",
+ if (tree.tokenTag(var_decl.ast.mut_token) == .keyword_const) .@"local constant" else .@"local variable",
);
- if (var_decl.ast.init_node == 0) {
+ const init_node = var_decl.ast.init_node.unwrap() orelse {
return astgen.failNode(node, "variables must be initialized", .{});
- }
+ };
- if (var_decl.ast.addrspace_node != 0) {
- return astgen.failTok(main_tokens[var_decl.ast.addrspace_node], "cannot set address space of local variable '{s}'", .{ident_name_raw});
+ if (var_decl.ast.addrspace_node.unwrap()) |addrspace_node| {
+ return astgen.failTok(tree.nodeMainToken(addrspace_node), "cannot set address space of local variable '{s}'", .{ident_name_raw});
}
- if (var_decl.ast.section_node != 0) {
- return astgen.failTok(main_tokens[var_decl.ast.section_node], "cannot set section of local variable '{s}'", .{ident_name_raw});
+ if (var_decl.ast.section_node.unwrap()) |section_node| {
+ return astgen.failTok(tree.nodeMainToken(section_node), "cannot set section of local variable '{s}'", .{ident_name_raw});
}
- const align_inst: Zir.Inst.Ref = if (var_decl.ast.align_node != 0)
- try expr(gz, scope, coerced_align_ri, var_decl.ast.align_node)
+ const align_inst: Zir.Inst.Ref = if (var_decl.ast.align_node.unwrap()) |align_node|
+ try expr(gz, scope, coerced_align_ri, align_node)
else
.none;
- switch (token_tags[var_decl.ast.mut_token]) {
+ switch (tree.tokenTag(var_decl.ast.mut_token)) {
.keyword_const => {
if (var_decl.comptime_token) |comptime_token| {
try astgen.appendErrorTok(comptime_token, "'comptime const' is redundant; instead wrap the initialization expression with 'comptime'", .{});
@@ -3262,25 +3233,24 @@ fn varDecl(
// Depending on the type of AST the initialization expression is, we may need an lvalue
// or an rvalue as a result location. If it is an rvalue, we can use the instruction as
// the variable, no memory location needed.
- const type_node = var_decl.ast.type_node;
if (align_inst == .none and
!astgen.nodes_need_rl.contains(node))
{
- const result_info: ResultInfo = if (type_node != 0) .{
+ const result_info: ResultInfo = if (var_decl.ast.type_node.unwrap()) |type_node| .{
.rl = .{ .ty = try typeExpr(gz, scope, type_node) },
.ctx = .const_init,
} else .{ .rl = .none, .ctx = .const_init };
const prev_anon_name_strategy = gz.anon_name_strategy;
gz.anon_name_strategy = .dbg_var;
- const init_inst = try reachableExprComptime(gz, scope, result_info, var_decl.ast.init_node, node, if (force_comptime) .comptime_keyword else null);
+ const init_inst = try reachableExprComptime(gz, scope, result_info, init_node, node, if (force_comptime) .comptime_keyword else null);
gz.anon_name_strategy = prev_anon_name_strategy;
- _ = try gz.addUnNode(.validate_const, init_inst, var_decl.ast.init_node);
+ _ = try gz.addUnNode(.validate_const, init_inst, init_node);
try gz.addDbgVar(.dbg_var_val, ident_name, init_inst);
// The const init expression may have modified the error return trace, so signal
// to Sema that it should save the new index for restoring later.
- if (nodeMayAppendToErrorTrace(tree, var_decl.ast.init_node))
+ if (nodeMayAppendToErrorTrace(tree, init_node))
_ = try gz.addSaveErrRetIndex(.{ .if_of_error_type = init_inst });
const sub_scope = try block_arena.create(Scope.LocalVal);
@@ -3296,9 +3266,9 @@ fn varDecl(
}
const is_comptime = gz.is_comptime or
- tree.nodes.items(.tag)[var_decl.ast.init_node] == .@"comptime";
+ tree.nodeTag(init_node) == .@"comptime";
- const init_rl: ResultInfo.Loc = if (type_node != 0) init_rl: {
+ const init_rl: ResultInfo.Loc = if (var_decl.ast.type_node.unwrap()) |type_node| init_rl: {
const type_inst = try typeExpr(gz, scope, type_node);
if (align_inst == .none) {
break :init_rl .{ .ptr = .{ .inst = try gz.addUnNode(.alloc, type_inst, node) } };
@@ -3339,11 +3309,11 @@ fn varDecl(
const prev_anon_name_strategy = gz.anon_name_strategy;
gz.anon_name_strategy = .dbg_var;
defer gz.anon_name_strategy = prev_anon_name_strategy;
- const init_inst = try reachableExprComptime(gz, scope, init_result_info, var_decl.ast.init_node, node, if (force_comptime) .comptime_keyword else null);
+ const init_inst = try reachableExprComptime(gz, scope, init_result_info, init_node, node, if (force_comptime) .comptime_keyword else null);
// The const init expression may have modified the error return trace, so signal
// to Sema that it should save the new index for restoring later.
- if (nodeMayAppendToErrorTrace(tree, var_decl.ast.init_node))
+ if (nodeMayAppendToErrorTrace(tree, init_node))
_ = try gz.addSaveErrRetIndex(.{ .if_of_error_type = init_inst });
const const_ptr = if (resolve_inferred)
@@ -3369,8 +3339,8 @@ fn varDecl(
if (var_decl.comptime_token != null and gz.is_comptime)
return astgen.failTok(var_decl.comptime_token.?, "'comptime var' is redundant in comptime scope", .{});
const is_comptime = var_decl.comptime_token != null or gz.is_comptime;
- const alloc: Zir.Inst.Ref, const resolve_inferred: bool, const result_info: ResultInfo = if (var_decl.ast.type_node != 0) a: {
- const type_inst = try typeExpr(gz, scope, var_decl.ast.type_node);
+ const alloc: Zir.Inst.Ref, const resolve_inferred: bool, const result_info: ResultInfo = if (var_decl.ast.type_node.unwrap()) |type_node| a: {
+ const type_inst = try typeExpr(gz, scope, type_node);
const alloc = alloc: {
if (align_inst == .none) {
const tag: Zir.Inst.Tag = if (is_comptime)
@@ -3415,7 +3385,7 @@ fn varDecl(
gz,
scope,
result_info,
- var_decl.ast.init_node,
+ init_node,
node,
if (var_decl.comptime_token != null) .comptime_keyword else null,
);
@@ -3458,15 +3428,11 @@ fn assign(gz: *GenZir, scope: *Scope, infix_node: Ast.Node.Index) InnerError!voi
try emitDbgNode(gz, infix_node);
const astgen = gz.astgen;
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const main_tokens = tree.nodes.items(.main_token);
- const node_tags = tree.nodes.items(.tag);
- const lhs = node_datas[infix_node].lhs;
- const rhs = node_datas[infix_node].rhs;
- if (node_tags[lhs] == .identifier) {
+ const lhs, const rhs = tree.nodeData(infix_node).node_and_node;
+ if (tree.nodeTag(lhs) == .identifier) {
// This intentionally does not support `@"_"` syntax.
- const ident_name = tree.tokenSlice(main_tokens[lhs]);
+ const ident_name = tree.tokenSlice(tree.nodeMainToken(lhs));
if (mem.eql(u8, ident_name, "_")) {
_ = try expr(gz, scope, .{ .rl = .discard, .ctx = .assignment }, rhs);
return;
@@ -3484,8 +3450,6 @@ fn assignDestructure(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerErro
try emitDbgNode(gz, node);
const astgen = gz.astgen;
const tree = astgen.tree;
- const main_tokens = tree.nodes.items(.main_token);
- const node_tags = tree.nodes.items(.tag);
const full = tree.assignDestructure(node);
if (full.comptime_token != null and gz.is_comptime) {
@@ -3503,9 +3467,9 @@ fn assignDestructure(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerErro
const rl_components = try astgen.arena.alloc(ResultInfo.Loc.DestructureComponent, full.ast.variables.len);
for (rl_components, full.ast.variables) |*variable_rl, variable_node| {
- if (node_tags[variable_node] == .identifier) {
+ if (tree.nodeTag(variable_node) == .identifier) {
// This intentionally does not support `@"_"` syntax.
- const ident_name = tree.tokenSlice(main_tokens[variable_node]);
+ const ident_name = tree.tokenSlice(tree.nodeMainToken(variable_node));
if (mem.eql(u8, ident_name, "_")) {
variable_rl.* = .discard;
continue;
@@ -3542,9 +3506,6 @@ fn assignDestructureMaybeDecls(
try emitDbgNode(gz, node);
const astgen = gz.astgen;
const tree = astgen.tree;
- const token_tags = tree.tokens.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
- const node_tags = tree.nodes.items(.tag);
const full = tree.assignDestructure(node);
if (full.comptime_token != null and gz.is_comptime) {
@@ -3552,7 +3513,7 @@ fn assignDestructureMaybeDecls(
}
const is_comptime = full.comptime_token != null or gz.is_comptime;
- const value_is_comptime = node_tags[full.ast.value_expr] == .@"comptime";
+ const value_is_comptime = tree.nodeTag(full.ast.value_expr) == .@"comptime";
// When declaring consts via a destructure, we always use a result pointer.
// This avoids the need to create tuple types, and is also likely easier to
@@ -3565,10 +3526,10 @@ fn assignDestructureMaybeDecls(
var any_non_const_variables = false;
var any_lvalue_expr = false;
for (rl_components, full.ast.variables) |*variable_rl, variable_node| {
- switch (node_tags[variable_node]) {
+ switch (tree.nodeTag(variable_node)) {
.identifier => {
// This intentionally does not support `@"_"` syntax.
- const ident_name = tree.tokenSlice(main_tokens[variable_node]);
+ const ident_name = tree.tokenSlice(tree.nodeMainToken(variable_node));
if (mem.eql(u8, ident_name, "_")) {
any_non_const_variables = true;
variable_rl.* = .discard;
@@ -3586,14 +3547,14 @@ fn assignDestructureMaybeDecls(
// We detect shadowing in the second pass over these, while we're creating scopes.
- if (full_var_decl.ast.addrspace_node != 0) {
- return astgen.failTok(main_tokens[full_var_decl.ast.addrspace_node], "cannot set address space of local variable '{s}'", .{ident_name_raw});
+ if (full_var_decl.ast.addrspace_node.unwrap()) |addrspace_node| {
+ return astgen.failTok(tree.nodeMainToken(addrspace_node), "cannot set address space of local variable '{s}'", .{ident_name_raw});
}
- if (full_var_decl.ast.section_node != 0) {
- return astgen.failTok(main_tokens[full_var_decl.ast.section_node], "cannot set section of local variable '{s}'", .{ident_name_raw});
+ if (full_var_decl.ast.section_node.unwrap()) |section_node| {
+ return astgen.failTok(tree.nodeMainToken(section_node), "cannot set section of local variable '{s}'", .{ident_name_raw});
}
- const is_const = switch (token_tags[full_var_decl.ast.mut_token]) {
+ const is_const = switch (tree.tokenTag(full_var_decl.ast.mut_token)) {
.keyword_var => false,
.keyword_const => true,
else => unreachable,
@@ -3603,14 +3564,14 @@ fn assignDestructureMaybeDecls(
// We also mark `const`s as comptime if the RHS is definitely comptime-known.
const this_variable_comptime = is_comptime or (is_const and value_is_comptime);
- const align_inst: Zir.Inst.Ref = if (full_var_decl.ast.align_node != 0)
- try expr(gz, scope, coerced_align_ri, full_var_decl.ast.align_node)
+ const align_inst: Zir.Inst.Ref = if (full_var_decl.ast.align_node.unwrap()) |align_node|
+ try expr(gz, scope, coerced_align_ri, align_node)
else
.none;
- if (full_var_decl.ast.type_node != 0) {
+ if (full_var_decl.ast.type_node.unwrap()) |type_node| {
// Typed alloc
- const type_inst = try typeExpr(gz, scope, full_var_decl.ast.type_node);
+ const type_inst = try typeExpr(gz, scope, type_node);
const ptr = if (align_inst == .none) ptr: {
const tag: Zir.Inst.Tag = if (is_const)
.alloc
@@ -3679,7 +3640,7 @@ fn assignDestructureMaybeDecls(
// evaluate the lvalues from within the possible block_comptime.
for (rl_components, full.ast.variables) |*variable_rl, variable_node| {
if (variable_rl.* != .typed_ptr) continue;
- switch (node_tags[variable_node]) {
+ switch (tree.nodeTag(variable_node)) {
.global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => continue,
else => {},
}
@@ -3708,7 +3669,7 @@ fn assignDestructureMaybeDecls(
// If there were any `const` decls, make the pointer constant.
var cur_scope = scope;
for (rl_components, full.ast.variables) |variable_rl, variable_node| {
- switch (node_tags[variable_node]) {
+ switch (tree.nodeTag(variable_node)) {
.local_var_decl, .simple_var_decl, .aligned_var_decl => {},
else => continue, // We were mutating an existing lvalue - nothing to do
}
@@ -3718,7 +3679,7 @@ fn assignDestructureMaybeDecls(
.typed_ptr => |typed_ptr| .{ typed_ptr.inst, false },
.inferred_ptr => |ptr_inst| .{ ptr_inst, true },
};
- const is_const = switch (token_tags[full_var_decl.ast.mut_token]) {
+ const is_const = switch (tree.tokenTag(full_var_decl.ast.mut_token)) {
.keyword_var => false,
.keyword_const => true,
else => unreachable,
@@ -3769,9 +3730,9 @@ fn assignOp(
try emitDbgNode(gz, infix_node);
const astgen = gz.astgen;
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const lhs_ptr = try lvalExpr(gz, scope, node_datas[infix_node].lhs);
+ const lhs_node, const rhs_node = tree.nodeData(infix_node).node_and_node;
+ const lhs_ptr = try lvalExpr(gz, scope, lhs_node);
const cursor = switch (op_inst_tag) {
.add, .sub, .mul, .div, .mod_rem => maybeAdvanceSourceCursorToMainToken(gz, infix_node),
@@ -3797,7 +3758,7 @@ fn assignOp(
else => try gz.addUnNode(.typeof, lhs, infix_node), // same as LHS type
};
// Not `coerced_ty` since `add`/etc won't coerce to this type.
- const rhs = try expr(gz, scope, .{ .rl = .{ .ty = rhs_res_ty } }, node_datas[infix_node].rhs);
+ const rhs = try expr(gz, scope, .{ .rl = .{ .ty = rhs_res_ty } }, rhs_node);
switch (op_inst_tag) {
.add, .sub, .mul, .div, .mod_rem => {
@@ -3824,12 +3785,12 @@ fn assignShift(
try emitDbgNode(gz, infix_node);
const astgen = gz.astgen;
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const lhs_ptr = try lvalExpr(gz, scope, node_datas[infix_node].lhs);
+ const lhs_node, const rhs_node = tree.nodeData(infix_node).node_and_node;
+ const lhs_ptr = try lvalExpr(gz, scope, lhs_node);
const lhs = try gz.addUnNode(.load, lhs_ptr, infix_node);
const rhs_type = try gz.addUnNode(.typeof_log2_int_type, lhs, infix_node);
- const rhs = try expr(gz, scope, .{ .rl = .{ .ty = rhs_type } }, node_datas[infix_node].rhs);
+ const rhs = try expr(gz, scope, .{ .rl = .{ .ty = rhs_type } }, rhs_node);
const result = try gz.addPlNode(op_inst_tag, infix_node, Zir.Inst.Bin{
.lhs = lhs,
@@ -3845,12 +3806,12 @@ fn assignShiftSat(gz: *GenZir, scope: *Scope, infix_node: Ast.Node.Index) InnerE
try emitDbgNode(gz, infix_node);
const astgen = gz.astgen;
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const lhs_ptr = try lvalExpr(gz, scope, node_datas[infix_node].lhs);
+ const lhs_node, const rhs_node = tree.nodeData(infix_node).node_and_node;
+ const lhs_ptr = try lvalExpr(gz, scope, lhs_node);
const lhs = try gz.addUnNode(.load, lhs_ptr, infix_node);
// Saturating shift-left allows any integer type for both the LHS and RHS.
- const rhs = try expr(gz, scope, .{ .rl = .none }, node_datas[infix_node].rhs);
+ const rhs = try expr(gz, scope, .{ .rl = .none }, rhs_node);
const result = try gz.addPlNode(.shl_sat, infix_node, Zir.Inst.Bin{
.lhs = lhs,
@@ -3885,7 +3846,7 @@ fn ptrType(
var bit_end_ref: Zir.Inst.Ref = .none;
var trailing_count: u32 = 0;
- if (ptr_info.ast.sentinel != 0) {
+ if (ptr_info.ast.sentinel.unwrap()) |sentinel| {
// These attributes can appear in any order and they all come before the
// element type so we need to reset the source cursor before generating them.
gz.astgen.source_offset = source_offset;
@@ -3896,7 +3857,7 @@ fn ptrType(
gz,
scope,
.{ .rl = .{ .ty = elem_type } },
- ptr_info.ast.sentinel,
+ sentinel,
switch (ptr_info.size) {
.slice => .slice_sentinel,
else => .pointer_sentinel,
@@ -3904,27 +3865,27 @@ fn ptrType(
);
trailing_count += 1;
}
- if (ptr_info.ast.addrspace_node != 0) {
+ if (ptr_info.ast.addrspace_node.unwrap()) |addrspace_node| {
gz.astgen.source_offset = source_offset;
gz.astgen.source_line = source_line;
gz.astgen.source_column = source_column;
- const addrspace_ty = try gz.addBuiltinValue(ptr_info.ast.addrspace_node, .address_space);
- addrspace_ref = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = addrspace_ty } }, ptr_info.ast.addrspace_node, .@"addrspace");
+ const addrspace_ty = try gz.addBuiltinValue(addrspace_node, .address_space);
+ addrspace_ref = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = addrspace_ty } }, addrspace_node, .@"addrspace");
trailing_count += 1;
}
- if (ptr_info.ast.align_node != 0) {
+ if (ptr_info.ast.align_node.unwrap()) |align_node| {
gz.astgen.source_offset = source_offset;
gz.astgen.source_line = source_line;
gz.astgen.source_column = source_column;
- align_ref = try comptimeExpr(gz, scope, coerced_align_ri, ptr_info.ast.align_node, .@"align");
+ align_ref = try comptimeExpr(gz, scope, coerced_align_ri, align_node, .@"align");
trailing_count += 1;
}
- if (ptr_info.ast.bit_range_start != 0) {
- assert(ptr_info.ast.bit_range_end != 0);
- bit_start_ref = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .u16_type } }, ptr_info.ast.bit_range_start, .type);
- bit_end_ref = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .u16_type } }, ptr_info.ast.bit_range_end, .type);
+ if (ptr_info.ast.bit_range_start.unwrap()) |bit_range_start| {
+ const bit_range_end = ptr_info.ast.bit_range_end.unwrap().?;
+ bit_start_ref = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .u16_type } }, bit_range_start, .type);
+ bit_end_ref = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .u16_type } }, bit_range_end, .type);
trailing_count += 2;
}
@@ -3977,18 +3938,15 @@ fn ptrType(
fn arrayType(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) !Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const node_tags = tree.nodes.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
- const len_node = node_datas[node].lhs;
- if (node_tags[len_node] == .identifier and
- mem.eql(u8, tree.tokenSlice(main_tokens[len_node]), "_"))
+ const len_node, const elem_type_node = tree.nodeData(node).node_and_node;
+ if (tree.nodeTag(len_node) == .identifier and
+ mem.eql(u8, tree.tokenSlice(tree.nodeMainToken(len_node)), "_"))
{
return astgen.failNode(len_node, "unable to infer array size", .{});
}
const len = try reachableExprComptime(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, len_node, node, .type);
- const elem_type = try typeExpr(gz, scope, node_datas[node].rhs);
+ const elem_type = try typeExpr(gz, scope, elem_type_node);
const result = try gz.addPlNode(.array_type, node, Zir.Inst.Bin{
.lhs = len,
@@ -4000,14 +3958,12 @@ fn arrayType(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) !
fn arrayTypeSentinel(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) !Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const node_tags = tree.nodes.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
- const extra = tree.extraData(node_datas[node].rhs, Ast.Node.ArrayTypeSentinel);
-
- const len_node = node_datas[node].lhs;
- if (node_tags[len_node] == .identifier and
- mem.eql(u8, tree.tokenSlice(main_tokens[len_node]), "_"))
+
+ const len_node, const extra_index = tree.nodeData(node).node_and_extra;
+ const extra = tree.extraData(extra_index, Ast.Node.ArrayTypeSentinel);
+
+ if (tree.nodeTag(len_node) == .identifier and
+ mem.eql(u8, tree.tokenSlice(tree.nodeMainToken(len_node)), "_"))
{
return astgen.failNode(len_node, "unable to infer array size", .{});
}
@@ -4107,11 +4063,10 @@ fn fnDecl(
scope: *Scope,
wip_members: *WipMembers,
decl_node: Ast.Node.Index,
- body_node: Ast.Node.Index,
+ body_node: Ast.Node.OptionalIndex,
fn_proto: Ast.full.FnProto,
) InnerError!void {
const tree = astgen.tree;
- const token_tags = tree.tokens.items(.tag);
const old_hasher = astgen.src_hasher;
defer astgen.src_hasher = old_hasher;
@@ -4140,15 +4095,15 @@ fn fnDecl(
const is_pub = fn_proto.visib_token != null;
const is_export = blk: {
const maybe_export_token = fn_proto.extern_export_inline_token orelse break :blk false;
- break :blk token_tags[maybe_export_token] == .keyword_export;
+ break :blk tree.tokenTag(maybe_export_token) == .keyword_export;
};
const is_extern = blk: {
const maybe_extern_token = fn_proto.extern_export_inline_token orelse break :blk false;
- break :blk token_tags[maybe_extern_token] == .keyword_extern;
+ break :blk tree.tokenTag(maybe_extern_token) == .keyword_extern;
};
const has_inline_keyword = blk: {
const maybe_inline_token = fn_proto.extern_export_inline_token orelse break :blk false;
- break :blk token_tags[maybe_inline_token] == .keyword_inline;
+ break :blk tree.tokenTag(maybe_inline_token) == .keyword_inline;
};
const lib_name = if (fn_proto.lib_name) |lib_name_token| blk: {
const lib_name_str = try astgen.strLitAsString(lib_name_token);
@@ -4160,16 +4115,18 @@ fn fnDecl(
}
break :blk lib_name_str.index;
} else .empty;
- if (fn_proto.ast.callconv_expr != 0 and has_inline_keyword) {
+ if (fn_proto.ast.callconv_expr != .none and has_inline_keyword) {
return astgen.failNode(
- fn_proto.ast.callconv_expr,
+ fn_proto.ast.callconv_expr.unwrap().?,
"explicit callconv incompatible with inline keyword",
.{},
);
}
- const maybe_bang = tree.firstToken(fn_proto.ast.return_type) - 1;
- const is_inferred_error = token_tags[maybe_bang] == .bang;
- if (body_node == 0) {
+
+ const return_type = fn_proto.ast.return_type.unwrap().?;
+ const maybe_bang = tree.firstToken(return_type) - 1;
+ const is_inferred_error = tree.tokenTag(maybe_bang) == .bang;
+ if (body_node == .none) {
if (!is_extern) {
return astgen.failTok(fn_proto.ast.fn_token, "non-extern function has no body", .{});
}
@@ -4202,28 +4159,28 @@ fn fnDecl(
var align_gz = type_gz.makeSubBlock(scope);
defer align_gz.unstack();
- if (fn_proto.ast.align_expr != 0) {
+ if (fn_proto.ast.align_expr.unwrap()) |align_expr| {
astgen.restoreSourceCursor(saved_cursor);
- const inst = try expr(&align_gz, &align_gz.base, coerced_align_ri, fn_proto.ast.align_expr);
+ const inst = try expr(&align_gz, &align_gz.base, coerced_align_ri, align_expr);
_ = try align_gz.addBreakWithSrcNode(.break_inline, decl_inst, inst, decl_node);
}
var linksection_gz = align_gz.makeSubBlock(scope);
defer linksection_gz.unstack();
- if (fn_proto.ast.section_expr != 0) {
+ if (fn_proto.ast.section_expr.unwrap()) |section_expr| {
astgen.restoreSourceCursor(saved_cursor);
- const inst = try expr(&linksection_gz, &linksection_gz.base, coerced_linksection_ri, fn_proto.ast.section_expr);
+ const inst = try expr(&linksection_gz, &linksection_gz.base, coerced_linksection_ri, section_expr);
_ = try linksection_gz.addBreakWithSrcNode(.break_inline, decl_inst, inst, decl_node);
}
var addrspace_gz = linksection_gz.makeSubBlock(scope);
defer addrspace_gz.unstack();
- if (fn_proto.ast.addrspace_expr != 0) {
+ if (fn_proto.ast.addrspace_expr.unwrap()) |addrspace_expr| {
astgen.restoreSourceCursor(saved_cursor);
- const addrspace_ty = try addrspace_gz.addBuiltinValue(fn_proto.ast.addrspace_expr, .address_space);
- const inst = try expr(&addrspace_gz, &addrspace_gz.base, .{ .rl = .{ .coerced_ty = addrspace_ty } }, fn_proto.ast.addrspace_expr);
+ const addrspace_ty = try addrspace_gz.addBuiltinValue(addrspace_expr, .address_space);
+ const inst = try expr(&addrspace_gz, &addrspace_gz.base, .{ .rl = .{ .coerced_ty = addrspace_ty } }, addrspace_expr);
_ = try addrspace_gz.addBreakWithSrcNode(.break_inline, decl_inst, inst, decl_node);
}
@@ -4233,7 +4190,7 @@ fn fnDecl(
if (!is_extern) {
// We include a function *value*, not a type.
astgen.restoreSourceCursor(saved_cursor);
- try astgen.fnDeclInner(&value_gz, &value_gz.base, saved_cursor, decl_inst, decl_node, body_node, fn_proto);
+ try astgen.fnDeclInner(&value_gz, &value_gz.base, saved_cursor, decl_inst, decl_node, body_node.unwrap().?, fn_proto);
}
// *Now* we can incorporate the full source code into the hasher.
@@ -4272,18 +4229,19 @@ fn fnDeclInner(
fn_proto: Ast.full.FnProto,
) InnerError!void {
const tree = astgen.tree;
- const token_tags = tree.tokens.items(.tag);
const is_noinline = blk: {
const maybe_noinline_token = fn_proto.extern_export_inline_token orelse break :blk false;
- break :blk token_tags[maybe_noinline_token] == .keyword_noinline;
+ break :blk tree.tokenTag(maybe_noinline_token) == .keyword_noinline;
};
const has_inline_keyword = blk: {
const maybe_inline_token = fn_proto.extern_export_inline_token orelse break :blk false;
- break :blk token_tags[maybe_inline_token] == .keyword_inline;
+ break :blk tree.tokenTag(maybe_inline_token) == .keyword_inline;
};
- const maybe_bang = tree.firstToken(fn_proto.ast.return_type) - 1;
- const is_inferred_error = token_tags[maybe_bang] == .bang;
+
+ const return_type = fn_proto.ast.return_type.unwrap().?;
+ const maybe_bang = tree.firstToken(return_type) - 1;
+ const is_inferred_error = tree.tokenTag(maybe_bang) == .bang;
// Note that the capacity here may not be sufficient, as this does not include `anytype` parameters.
var param_insts: std.ArrayListUnmanaged(Zir.Inst.Index) = try .initCapacity(astgen.arena, fn_proto.ast.params.len);
@@ -4297,7 +4255,7 @@ fn fnDeclInner(
var param_type_i: usize = 0;
var it = fn_proto.iterate(tree);
while (it.next()) |param| : (param_type_i += 1) {
- const is_comptime = if (param.comptime_noalias) |token| switch (token_tags[token]) {
+ const is_comptime = if (param.comptime_noalias) |token| switch (tree.tokenTag(token)) {
.keyword_noalias => is_comptime: {
noalias_bits |= @as(u32, 1) << (std.math.cast(u5, param_type_i) orelse
return astgen.failTok(token, "this compiler implementation only supports 'noalias' on the first 32 parameters", .{}));
@@ -4308,7 +4266,7 @@ fn fnDeclInner(
} else false;
const is_anytype = if (param.anytype_ellipsis3) |token| blk: {
- switch (token_tags[token]) {
+ switch (tree.tokenTag(token)) {
.keyword_anytype => break :blk true,
.ellipsis3 => break :is_var_args true,
else => unreachable,
@@ -4327,30 +4285,31 @@ fn fnDeclInner(
if (param.anytype_ellipsis3) |tok| {
return astgen.failTok(tok, "missing parameter name", .{});
} else {
+ const type_expr = param.type_expr.?;
ambiguous: {
- if (tree.nodes.items(.tag)[param.type_expr] != .identifier) break :ambiguous;
- const main_token = tree.nodes.items(.main_token)[param.type_expr];
+ if (tree.nodeTag(type_expr) != .identifier) break :ambiguous;
+ const main_token = tree.nodeMainToken(type_expr);
const identifier_str = tree.tokenSlice(main_token);
if (isPrimitive(identifier_str)) break :ambiguous;
return astgen.failNodeNotes(
- param.type_expr,
+ type_expr,
"missing parameter name or type",
.{},
&[_]u32{
try astgen.errNoteNode(
- param.type_expr,
+ type_expr,
"if this is a name, annotate its type '{s}: T'",
.{identifier_str},
),
try astgen.errNoteNode(
- param.type_expr,
+ type_expr,
"if this is a type, give it a name '<name>: {s}'",
.{identifier_str},
),
},
);
}
- return astgen.failNode(param.type_expr, "missing parameter name", .{});
+ return astgen.failNode(type_expr, "missing parameter name", .{});
}
};
@@ -4362,8 +4321,7 @@ fn fnDeclInner(
.param_anytype;
break :param try decl_gz.addStrTok(tag, param_name, name_token);
} else param: {
- const param_type_node = param.type_expr;
- assert(param_type_node != 0);
+ const param_type_node = param.type_expr.?;
any_param_used = false; // we will check this later
var param_gz = decl_gz.makeSubBlock(scope);
defer param_gz.unstack();
@@ -4372,8 +4330,7 @@ fn fnDeclInner(
_ = try param_gz.addBreakWithSrcNode(.break_inline, param_inst_expected, param_type, param_type_node);
const param_type_is_generic = any_param_used;
- const main_tokens = tree.nodes.items(.main_token);
- const name_token = param.name_token orelse main_tokens[param_type_node];
+ const name_token = param.name_token orelse tree.nodeMainToken(param_type_node);
const tag: Zir.Inst.Tag = if (is_comptime) .param_comptime else .param;
const param_inst = try decl_gz.addParam(¶m_gz, param_insts.items, param_type_is_generic, tag, name_token, param_name);
assert(param_inst_expected == param_inst);
@@ -4409,7 +4366,7 @@ fn fnDeclInner(
// Parameters are in scope for the return type, so we use `params_scope` here.
// The calling convention will not have parameters in scope, so we'll just use `scope`.
// See #22263 for a proposal to solve the inconsistency here.
- const inst = try fullBodyExpr(&ret_gz, params_scope, coerced_type_ri, fn_proto.ast.return_type, .normal);
+ const inst = try fullBodyExpr(&ret_gz, params_scope, coerced_type_ri, fn_proto.ast.return_type.unwrap().?, .normal);
if (ret_gz.instructionsSlice().len == 0) {
// In this case we will send a len=0 body which can be encoded more efficiently.
break :inst inst;
@@ -4426,12 +4383,12 @@ fn fnDeclInner(
var cc_gz = decl_gz.makeSubBlock(scope);
defer cc_gz.unstack();
const cc_ref: Zir.Inst.Ref = blk: {
- if (fn_proto.ast.callconv_expr != 0) {
+ if (fn_proto.ast.callconv_expr.unwrap()) |callconv_expr| {
const inst = try expr(
&cc_gz,
scope,
- .{ .rl = .{ .coerced_ty = try cc_gz.addBuiltinValue(fn_proto.ast.callconv_expr, .calling_convention) } },
- fn_proto.ast.callconv_expr,
+ .{ .rl = .{ .coerced_ty = try cc_gz.addBuiltinValue(callconv_expr, .calling_convention) } },
+ callconv_expr,
);
if (cc_gz.instructionsSlice().len == 0) {
// In this case we will send a len=0 body which can be encoded more efficiently.
@@ -4470,7 +4427,7 @@ fn fnDeclInner(
// Leave `astgen.src_hasher` unmodified; this will be used for hashing
// the *whole* function declaration, including its body.
var proto_hasher = astgen.src_hasher;
- const proto_node = tree.nodes.items(.data)[decl_node].lhs;
+ const proto_node = tree.nodeData(decl_node).node_and_node[0];
proto_hasher.update(tree.getNodeSource(proto_node));
var proto_hash: std.zig.SrcHash = undefined;
proto_hasher.final(&proto_hash);
@@ -4540,7 +4497,6 @@ fn globalVarDecl(
var_decl: Ast.full.VarDecl,
) InnerError!void {
const tree = astgen.tree;
- const token_tags = tree.tokens.items(.tag);
const old_hasher = astgen.src_hasher;
defer astgen.src_hasher = old_hasher;
@@ -4548,16 +4504,16 @@ fn globalVarDecl(
astgen.src_hasher.update(tree.getNodeSource(node));
astgen.src_hasher.update(std.mem.asBytes(&astgen.source_column));
- const is_mutable = token_tags[var_decl.ast.mut_token] == .keyword_var;
+ const is_mutable = tree.tokenTag(var_decl.ast.mut_token) == .keyword_var;
const name_token = var_decl.ast.mut_token + 1;
const is_pub = var_decl.visib_token != null;
const is_export = blk: {
const maybe_export_token = var_decl.extern_export_token orelse break :blk false;
- break :blk token_tags[maybe_export_token] == .keyword_export;
+ break :blk tree.tokenTag(maybe_export_token) == .keyword_export;
};
const is_extern = blk: {
const maybe_extern_token = var_decl.extern_export_token orelse break :blk false;
- break :blk token_tags[maybe_extern_token] == .keyword_extern;
+ break :blk tree.tokenTag(maybe_extern_token) == .keyword_extern;
};
const is_threadlocal = if (var_decl.threadlocal_token) |tok| blk: {
if (!is_mutable) {
@@ -4583,10 +4539,10 @@ fn globalVarDecl(
const decl_inst = try gz.makeDeclaration(node);
wip_members.nextDecl(decl_inst);
- if (var_decl.ast.init_node != 0) {
+ if (var_decl.ast.init_node.unwrap()) |init_node| {
if (is_extern) {
return astgen.failNode(
- var_decl.ast.init_node,
+ init_node,
"extern variables have no initializers",
.{},
);
@@ -4597,7 +4553,7 @@ fn globalVarDecl(
}
}
- if (is_extern and var_decl.ast.type_node == 0) {
+ if (is_extern and var_decl.ast.type_node == .none) {
return astgen.failNode(node, "unable to infer variable type", .{});
}
@@ -4614,45 +4570,45 @@ fn globalVarDecl(
};
defer type_gz.unstack();
- if (var_decl.ast.type_node != 0) {
- const type_inst = try expr(&type_gz, &type_gz.base, coerced_type_ri, var_decl.ast.type_node);
+ if (var_decl.ast.type_node.unwrap()) |type_node| {
+ const type_inst = try expr(&type_gz, &type_gz.base, coerced_type_ri, type_node);
_ = try type_gz.addBreakWithSrcNode(.break_inline, decl_inst, type_inst, node);
}
var align_gz = type_gz.makeSubBlock(scope);
defer align_gz.unstack();
- if (var_decl.ast.align_node != 0) {
- const align_inst = try expr(&align_gz, &align_gz.base, coerced_align_ri, var_decl.ast.align_node);
+ if (var_decl.ast.align_node.unwrap()) |align_node| {
+ const align_inst = try expr(&align_gz, &align_gz.base, coerced_align_ri, align_node);
_ = try align_gz.addBreakWithSrcNode(.break_inline, decl_inst, align_inst, node);
}
var linksection_gz = type_gz.makeSubBlock(scope);
defer linksection_gz.unstack();
- if (var_decl.ast.section_node != 0) {
- const linksection_inst = try expr(&linksection_gz, &linksection_gz.base, coerced_linksection_ri, var_decl.ast.section_node);
+ if (var_decl.ast.section_node.unwrap()) |section_node| {
+ const linksection_inst = try expr(&linksection_gz, &linksection_gz.base, coerced_linksection_ri, section_node);
_ = try linksection_gz.addBreakWithSrcNode(.break_inline, decl_inst, linksection_inst, node);
}
var addrspace_gz = type_gz.makeSubBlock(scope);
defer addrspace_gz.unstack();
- if (var_decl.ast.addrspace_node != 0) {
- const addrspace_ty = try addrspace_gz.addBuiltinValue(var_decl.ast.addrspace_node, .address_space);
- const addrspace_inst = try expr(&addrspace_gz, &addrspace_gz.base, .{ .rl = .{ .coerced_ty = addrspace_ty } }, var_decl.ast.addrspace_node);
+ if (var_decl.ast.addrspace_node.unwrap()) |addrspace_node| {
+ const addrspace_ty = try addrspace_gz.addBuiltinValue(addrspace_node, .address_space);
+ const addrspace_inst = try expr(&addrspace_gz, &addrspace_gz.base, .{ .rl = .{ .coerced_ty = addrspace_ty } }, addrspace_node);
_ = try addrspace_gz.addBreakWithSrcNode(.break_inline, decl_inst, addrspace_inst, node);
}
var init_gz = type_gz.makeSubBlock(scope);
defer init_gz.unstack();
- if (var_decl.ast.init_node != 0) {
+ if (var_decl.ast.init_node.unwrap()) |init_node| {
init_gz.anon_name_strategy = .parent;
- const init_ri: ResultInfo = if (var_decl.ast.type_node != 0) .{
+ const init_ri: ResultInfo = if (var_decl.ast.type_node != .none) .{
.rl = .{ .coerced_ty = decl_inst.toRef() },
} else .{ .rl = .none };
- const init_inst = try expr(&init_gz, &init_gz.base, init_ri, var_decl.ast.init_node);
+ const init_inst = try expr(&init_gz, &init_gz.base, init_ri, init_node);
_ = try init_gz.addBreakWithSrcNode(.break_inline, decl_inst, init_inst, node);
}
@@ -4686,8 +4642,7 @@ fn comptimeDecl(
node: Ast.Node.Index,
) InnerError!void {
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const body_node = node_datas[node].lhs;
+ const body_node = tree.nodeData(node).node;
const old_hasher = astgen.src_hasher;
defer astgen.src_hasher = old_hasher;
@@ -4750,7 +4705,6 @@ fn usingnamespaceDecl(
node: Ast.Node.Index,
) InnerError!void {
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
const old_hasher = astgen.src_hasher;
defer astgen.src_hasher = old_hasher;
@@ -4758,13 +4712,9 @@ fn usingnamespaceDecl(
astgen.src_hasher.update(tree.getNodeSource(node));
astgen.src_hasher.update(std.mem.asBytes(&astgen.source_column));
- const type_expr = node_datas[node].lhs;
- const is_pub = blk: {
- const main_tokens = tree.nodes.items(.main_token);
- const token_tags = tree.tokens.items(.tag);
- const main_token = main_tokens[node];
- break :blk (main_token > 0 and token_tags[main_token - 1] == .keyword_pub);
- };
+ const type_expr = tree.nodeData(node).node;
+ const is_pub = tree.isTokenPrecededByTags(tree.nodeMainToken(node), &.{.keyword_pub});
+
// Up top so the ZIR instruction index marks the start range of this
// top-level declaration.
const decl_inst = try gz.makeDeclaration(node);
@@ -4818,8 +4768,7 @@ fn testDecl(
node: Ast.Node.Index,
) InnerError!void {
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const body_node = node_datas[node].rhs;
+ _, const body_node = tree.nodeData(node).opt_token_and_node;
const old_hasher = astgen.src_hasher;
defer astgen.src_hasher = old_hasher;
@@ -4851,12 +4800,10 @@ fn testDecl(
const decl_column = astgen.source_column;
- const main_tokens = tree.nodes.items(.main_token);
- const token_tags = tree.tokens.items(.tag);
- const test_token = main_tokens[node];
+ const test_token = tree.nodeMainToken(node);
const test_name_token = test_token + 1;
- const test_name: Zir.NullTerminatedString = switch (token_tags[test_name_token]) {
+ const test_name: Zir.NullTerminatedString = switch (tree.tokenTag(test_name_token)) {
else => .empty,
.string_literal => name: {
const name = try astgen.strLitAsString(test_name_token);
@@ -4888,7 +4835,7 @@ fn testDecl(
.local_val => {
const local_val = s.cast(Scope.LocalVal).?;
if (local_val.name == name_str_index) {
- local_val.used = test_name_token;
+ local_val.used = .fromToken(test_name_token);
return astgen.failTokNotes(test_name_token, "cannot test a {s}", .{
@tagName(local_val.id_cat),
}, &[_]u32{
@@ -4902,7 +4849,7 @@ fn testDecl(
.local_ptr => {
const local_ptr = s.cast(Scope.LocalPtr).?;
if (local_ptr.name == name_str_index) {
- local_ptr.used = test_name_token;
+ local_ptr.used = .fromToken(test_name_token);
return astgen.failTokNotes(test_name_token, "cannot test a {s}", .{
@tagName(local_ptr.id_cat),
}, &[_]u32{
@@ -5013,7 +4960,7 @@ fn testDecl(
.src_line = decl_block.decl_line,
.src_column = decl_column,
- .kind = switch (token_tags[test_name_token]) {
+ .kind = switch (tree.tokenTag(test_name_token)) {
.string_literal => .@"test",
.identifier => .decltest,
else => .unnamed_test,
@@ -5037,7 +4984,7 @@ fn structDeclInner(
node: Ast.Node.Index,
container_decl: Ast.full.ContainerDecl,
layout: std.builtin.Type.ContainerLayout,
- backing_int_node: Ast.Node.Index,
+ backing_int_node: Ast.Node.OptionalIndex,
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const gpa = astgen.gpa;
@@ -5049,7 +4996,7 @@ fn structDeclInner(
if (container_field.ast.tuple_like) break member_node;
} else break :is_tuple;
- if (node == 0) {
+ if (node == .root) {
return astgen.failNode(tuple_field_node, "file cannot be a tuple", .{});
} else {
return tupleDecl(gz, scope, node, container_decl, layout, backing_int_node);
@@ -5058,7 +5005,7 @@ fn structDeclInner(
const decl_inst = try gz.reserveInstructionIndex();
- if (container_decl.ast.members.len == 0 and backing_int_node == 0) {
+ if (container_decl.ast.members.len == 0 and backing_int_node == .none) {
try gz.setStruct(decl_inst, .{
.src_node = node,
.layout = layout,
@@ -5105,11 +5052,11 @@ fn structDeclInner(
var backing_int_body_len: usize = 0;
const backing_int_ref: Zir.Inst.Ref = blk: {
- if (backing_int_node != 0) {
+ if (backing_int_node.unwrap()) |arg| {
if (layout != .@"packed") {
- return astgen.failNode(backing_int_node, "non-packed struct does not support backing integer type", .{});
+ return astgen.failNode(arg, "non-packed struct does not support backing integer type", .{});
} else {
- const backing_int_ref = try typeExpr(&block_scope, &namespace.base, backing_int_node);
+ const backing_int_ref = try typeExpr(&block_scope, &namespace.base, arg);
if (!block_scope.isEmpty()) {
if (!block_scope.endsWithNoReturn()) {
_ = try block_scope.addBreak(.break_inline, decl_inst, backing_int_ref);
@@ -5154,8 +5101,8 @@ fn structDeclInner(
defer astgen.src_hasher = old_hasher;
astgen.src_hasher = std.zig.SrcHasher.init(.{});
astgen.src_hasher.update(@tagName(layout));
- if (backing_int_node != 0) {
- astgen.src_hasher.update(tree.getNodeSource(backing_int_node));
+ if (backing_int_node.unwrap()) |arg| {
+ astgen.src_hasher.update(tree.getNodeSource(arg));
}
var known_non_opv = false;
@@ -5172,18 +5119,18 @@ fn structDeclInner(
astgen.src_hasher.update(tree.getNodeSource(member_node));
const field_name = try astgen.identAsString(member.ast.main_token);
- member.convertToNonTupleLike(astgen.tree.nodes);
+ member.convertToNonTupleLike(astgen.tree);
assert(!member.ast.tuple_like);
wip_members.appendToField(@intFromEnum(field_name));
- if (member.ast.type_expr == 0) {
+ const type_expr = member.ast.type_expr.unwrap() orelse {
return astgen.failTok(member.ast.main_token, "struct field missing type", .{});
- }
+ };
- const field_type = try typeExpr(&block_scope, &namespace.base, member.ast.type_expr);
+ const field_type = try typeExpr(&block_scope, &namespace.base, type_expr);
const have_type_body = !block_scope.isEmpty();
- const have_align = member.ast.align_expr != 0;
- const have_value = member.ast.value_expr != 0;
+ const have_align = member.ast.align_expr != .none;
+ const have_value = member.ast.value_expr != .none;
const is_comptime = member.comptime_token != null;
if (is_comptime) {
@@ -5193,9 +5140,9 @@ fn structDeclInner(
}
} else {
known_non_opv = known_non_opv or
- nodeImpliesMoreThanOnePossibleValue(tree, member.ast.type_expr);
+ nodeImpliesMoreThanOnePossibleValue(tree, type_expr);
known_comptime_only = known_comptime_only or
- nodeImpliesComptimeOnly(tree, member.ast.type_expr);
+ nodeImpliesComptimeOnly(tree, type_expr);
}
wip_members.nextField(bits_per_field, .{ have_align, have_value, is_comptime, have_type_body });
@@ -5213,12 +5160,12 @@ fn structDeclInner(
wip_members.appendToField(@intFromEnum(field_type));
}
- if (have_align) {
+ if (member.ast.align_expr.unwrap()) |align_expr| {
if (layout == .@"packed") {
- return astgen.failNode(member.ast.align_expr, "unable to override alignment of packed struct fields", .{});
+ return astgen.failNode(align_expr, "unable to override alignment of packed struct fields", .{});
}
any_aligned_fields = true;
- const align_ref = try expr(&block_scope, &namespace.base, coerced_align_ri, member.ast.align_expr);
+ const align_ref = try expr(&block_scope, &namespace.base, coerced_align_ri, align_expr);
if (!block_scope.endsWithNoReturn()) {
_ = try block_scope.addBreak(.break_inline, decl_inst, align_ref);
}
@@ -5230,14 +5177,14 @@ fn structDeclInner(
block_scope.instructions.items.len = block_scope.instructions_top;
}
- if (have_value) {
+ if (member.ast.value_expr.unwrap()) |value_expr| {
any_default_inits = true;
// The decl_inst is used as here so that we can easily reconstruct a mapping
// between it and the field type when the fields inits are analyzed.
const ri: ResultInfo = .{ .rl = if (field_type == .none) .none else .{ .coerced_ty = decl_inst.toRef() } };
- const default_inst = try expr(&block_scope, &namespace.base, ri, member.ast.value_expr);
+ const default_inst = try expr(&block_scope, &namespace.base, ri, value_expr);
if (!block_scope.endsWithNoReturn()) {
_ = try block_scope.addBreak(.break_inline, decl_inst, default_inst);
}
@@ -5300,21 +5247,19 @@ fn tupleDecl(
node: Ast.Node.Index,
container_decl: Ast.full.ContainerDecl,
layout: std.builtin.Type.ContainerLayout,
- backing_int_node: Ast.Node.Index,
+ backing_int_node: Ast.Node.OptionalIndex,
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const gpa = astgen.gpa;
const tree = astgen.tree;
- const node_tags = tree.nodes.items(.tag);
-
switch (layout) {
.auto => {},
.@"extern", .@"packed" => return astgen.failNode(node, "{s} tuples are not supported", .{@tagName(layout)}),
}
- if (backing_int_node != 0) {
- return astgen.failNode(backing_int_node, "tuple does not support backing integer type", .{});
+ if (backing_int_node.unwrap()) |arg| {
+ return astgen.failNode(arg, "tuple does not support backing integer type", .{});
}
// We will use the scratch buffer, starting here, for the field data:
@@ -5329,7 +5274,7 @@ fn tupleDecl(
for (container_decl.ast.members) |member_node| {
const field = tree.fullContainerField(member_node) orelse {
- const tuple_member = for (container_decl.ast.members) |maybe_tuple| switch (node_tags[maybe_tuple]) {
+ const tuple_member = for (container_decl.ast.members) |maybe_tuple| switch (tree.nodeTag(maybe_tuple)) {
.container_field_init,
.container_field_align,
.container_field,
@@ -5348,23 +5293,23 @@ fn tupleDecl(
return astgen.failTok(field.ast.main_token, "tuple field has a name", .{});
}
- if (field.ast.align_expr != 0) {
+ if (field.ast.align_expr != .none) {
return astgen.failTok(field.ast.main_token, "tuple field has alignment", .{});
}
- if (field.ast.value_expr != 0 and field.comptime_token == null) {
+ if (field.ast.value_expr != .none and field.comptime_token == null) {
return astgen.failTok(field.ast.main_token, "non-comptime tuple field has default initialization value", .{});
}
- if (field.ast.value_expr == 0 and field.comptime_token != null) {
+ if (field.ast.value_expr == .none and field.comptime_token != null) {
return astgen.failTok(field.comptime_token.?, "comptime field without default initialization value", .{});
}
- const field_type_ref = try typeExpr(gz, scope, field.ast.type_expr);
+ const field_type_ref = try typeExpr(gz, scope, field.ast.type_expr.unwrap().?);
astgen.scratch.appendAssumeCapacity(@intFromEnum(field_type_ref));
- if (field.ast.value_expr != 0) {
- const field_init_ref = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = field_type_ref } }, field.ast.value_expr, .tuple_field_default_value);
+ if (field.ast.value_expr.unwrap()) |value_expr| {
+ const field_init_ref = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = field_type_ref } }, value_expr, .tuple_field_default_value);
astgen.scratch.appendAssumeCapacity(@intFromEnum(field_init_ref));
} else {
astgen.scratch.appendAssumeCapacity(@intFromEnum(Zir.Inst.Ref.none));
@@ -5399,7 +5344,7 @@ fn unionDeclInner(
node: Ast.Node.Index,
members: []const Ast.Node.Index,
layout: std.builtin.Type.ContainerLayout,
- arg_node: Ast.Node.Index,
+ opt_arg_node: Ast.Node.OptionalIndex,
auto_enum_tok: ?Ast.TokenIndex,
) InnerError!Zir.Inst.Ref {
const decl_inst = try gz.reserveInstructionIndex();
@@ -5434,15 +5379,15 @@ fn unionDeclInner(
const decl_count = try astgen.scanContainer(&namespace, members, .@"union");
const field_count: u32 = @intCast(members.len - decl_count);
- if (layout != .auto and (auto_enum_tok != null or arg_node != 0)) {
- if (arg_node != 0) {
+ if (layout != .auto and (auto_enum_tok != null or opt_arg_node != .none)) {
+ if (opt_arg_node.unwrap()) |arg_node| {
return astgen.failNode(arg_node, "{s} union does not support enum tag type", .{@tagName(layout)});
} else {
return astgen.failTok(auto_enum_tok.?, "{s} union does not support enum tag type", .{@tagName(layout)});
}
}
- const arg_inst: Zir.Inst.Ref = if (arg_node != 0)
+ const arg_inst: Zir.Inst.Ref = if (opt_arg_node.unwrap()) |arg_node|
try typeExpr(&block_scope, &namespace.base, arg_node)
else
.none;
@@ -5458,7 +5403,7 @@ fn unionDeclInner(
astgen.src_hasher = std.zig.SrcHasher.init(.{});
astgen.src_hasher.update(@tagName(layout));
astgen.src_hasher.update(&.{@intFromBool(auto_enum_tok != null)});
- if (arg_node != 0) {
+ if (opt_arg_node.unwrap()) |arg_node| {
astgen.src_hasher.update(astgen.tree.getNodeSource(arg_node));
}
@@ -5468,7 +5413,7 @@ fn unionDeclInner(
.field => |field| field,
};
astgen.src_hasher.update(astgen.tree.getNodeSource(member_node));
- member.convertToNonTupleLike(astgen.tree.nodes);
+ member.convertToNonTupleLike(astgen.tree);
if (member.ast.tuple_like) {
return astgen.failTok(member.ast.main_token, "union field missing name", .{});
}
@@ -5479,24 +5424,24 @@ fn unionDeclInner(
const field_name = try astgen.identAsString(member.ast.main_token);
wip_members.appendToField(@intFromEnum(field_name));
- const have_type = member.ast.type_expr != 0;
- const have_align = member.ast.align_expr != 0;
- const have_value = member.ast.value_expr != 0;
+ const have_type = member.ast.type_expr != .none;
+ const have_align = member.ast.align_expr != .none;
+ const have_value = member.ast.value_expr != .none;
const unused = false;
wip_members.nextField(bits_per_field, .{ have_type, have_align, have_value, unused });
- if (have_type) {
- const field_type = try typeExpr(&block_scope, &namespace.base, member.ast.type_expr);
+ if (member.ast.type_expr.unwrap()) |type_expr| {
+ const field_type = try typeExpr(&block_scope, &namespace.base, type_expr);
wip_members.appendToField(@intFromEnum(field_type));
} else if (arg_inst == .none and auto_enum_tok == null) {
return astgen.failNode(member_node, "union field missing type", .{});
}
- if (have_align) {
- const align_inst = try expr(&block_scope, &block_scope.base, coerced_align_ri, member.ast.align_expr);
+ if (member.ast.align_expr.unwrap()) |align_expr| {
+ const align_inst = try expr(&block_scope, &block_scope.base, coerced_align_ri, align_expr);
wip_members.appendToField(@intFromEnum(align_inst));
any_aligned_fields = true;
}
- if (have_value) {
+ if (member.ast.value_expr.unwrap()) |value_expr| {
if (arg_inst == .none) {
return astgen.failNodeNotes(
node,
@@ -5504,7 +5449,7 @@ fn unionDeclInner(
.{},
&[_]u32{
try astgen.errNoteNode(
- member.ast.value_expr,
+ value_expr,
"tag value specified here",
.{},
),
@@ -5518,14 +5463,14 @@ fn unionDeclInner(
.{},
&[_]u32{
try astgen.errNoteNode(
- member.ast.value_expr,
+ value_expr,
"tag value specified here",
.{},
),
},
);
}
- const tag_value = try expr(&block_scope, &block_scope.base, .{ .rl = .{ .ty = arg_inst } }, member.ast.value_expr);
+ const tag_value = try expr(&block_scope, &block_scope.base, .{ .rl = .{ .ty = arg_inst } }, value_expr);
wip_members.appendToField(@intFromEnum(tag_value));
}
}
@@ -5577,7 +5522,6 @@ fn containerDecl(
const astgen = gz.astgen;
const gpa = astgen.gpa;
const tree = astgen.tree;
- const token_tags = tree.tokens.items(.tag);
const prev_fn_block = astgen.fn_block;
astgen.fn_block = null;
@@ -5586,9 +5530,9 @@ fn containerDecl(
// We must not create any types until Sema. Here the goal is only to generate
// ZIR for all the field types, alignments, and default value expressions.
- switch (token_tags[container_decl.ast.main_token]) {
+ switch (tree.tokenTag(container_decl.ast.main_token)) {
.keyword_struct => {
- const layout: std.builtin.Type.ContainerLayout = if (container_decl.layout_token) |t| switch (token_tags[t]) {
+ const layout: std.builtin.Type.ContainerLayout = if (container_decl.layout_token) |t| switch (tree.tokenTag(t)) {
.keyword_packed => .@"packed",
.keyword_extern => .@"extern",
else => unreachable,
@@ -5598,7 +5542,7 @@ fn containerDecl(
return rvalue(gz, ri, result, node);
},
.keyword_union => {
- const layout: std.builtin.Type.ContainerLayout = if (container_decl.layout_token) |t| switch (token_tags[t]) {
+ const layout: std.builtin.Type.ContainerLayout = if (container_decl.layout_token) |t| switch (tree.tokenTag(t)) {
.keyword_packed => .@"packed",
.keyword_extern => .@"extern",
else => unreachable,
@@ -5616,23 +5560,23 @@ fn containerDecl(
var values: usize = 0;
var total_fields: usize = 0;
var decls: usize = 0;
- var nonexhaustive_node: Ast.Node.Index = 0;
+ var opt_nonexhaustive_node: Ast.Node.OptionalIndex = .none;
var nonfinal_nonexhaustive = false;
for (container_decl.ast.members) |member_node| {
var member = tree.fullContainerField(member_node) orelse {
decls += 1;
continue;
};
- member.convertToNonTupleLike(astgen.tree.nodes);
+ member.convertToNonTupleLike(astgen.tree);
if (member.ast.tuple_like) {
return astgen.failTok(member.ast.main_token, "enum field missing name", .{});
}
if (member.comptime_token) |comptime_token| {
return astgen.failTok(comptime_token, "enum fields cannot be marked comptime", .{});
}
- if (member.ast.type_expr != 0) {
+ if (member.ast.type_expr.unwrap()) |type_expr| {
return astgen.failNodeNotes(
- member.ast.type_expr,
+ type_expr,
"enum fields do not have types",
.{},
&[_]u32{
@@ -5644,13 +5588,13 @@ fn containerDecl(
},
);
}
- if (member.ast.align_expr != 0) {
- return astgen.failNode(member.ast.align_expr, "enum fields cannot be aligned", .{});
+ if (member.ast.align_expr.unwrap()) |align_expr| {
+ return astgen.failNode(align_expr, "enum fields cannot be aligned", .{});
}
const name_token = member.ast.main_token;
if (mem.eql(u8, tree.tokenSlice(name_token), "_")) {
- if (nonexhaustive_node != 0) {
+ if (opt_nonexhaustive_node.unwrap()) |nonexhaustive_node| {
return astgen.failNodeNotes(
member_node,
"redundant non-exhaustive enum mark",
@@ -5664,40 +5608,41 @@ fn containerDecl(
},
);
}
- nonexhaustive_node = member_node;
- if (member.ast.value_expr != 0) {
- return astgen.failNode(member.ast.value_expr, "'_' is used to mark an enum as non-exhaustive and cannot be assigned a value", .{});
+ opt_nonexhaustive_node = member_node.toOptional();
+ if (member.ast.value_expr.unwrap()) |value_expr| {
+ return astgen.failNode(value_expr, "'_' is used to mark an enum as non-exhaustive and cannot be assigned a value", .{});
}
continue;
- } else if (nonexhaustive_node != 0) {
+ } else if (opt_nonexhaustive_node != .none) {
nonfinal_nonexhaustive = true;
}
total_fields += 1;
- if (member.ast.value_expr != 0) {
- if (container_decl.ast.arg == 0) {
- return astgen.failNode(member.ast.value_expr, "value assigned to enum tag with inferred tag type", .{});
+ if (member.ast.value_expr.unwrap()) |value_expr| {
+ if (container_decl.ast.arg == .none) {
+ return astgen.failNode(value_expr, "value assigned to enum tag with inferred tag type", .{});
}
values += 1;
}
}
if (nonfinal_nonexhaustive) {
- return astgen.failNode(nonexhaustive_node, "'_' field of non-exhaustive enum must be last", .{});
+ return astgen.failNode(opt_nonexhaustive_node.unwrap().?, "'_' field of non-exhaustive enum must be last", .{});
}
break :blk .{
.total_fields = total_fields,
.values = values,
.decls = decls,
- .nonexhaustive_node = nonexhaustive_node,
+ .nonexhaustive_node = opt_nonexhaustive_node,
};
};
- if (counts.nonexhaustive_node != 0 and container_decl.ast.arg == 0) {
+ if (counts.nonexhaustive_node != .none and container_decl.ast.arg == .none) {
+ const nonexhaustive_node = counts.nonexhaustive_node.unwrap().?;
return astgen.failNodeNotes(
node,
"non-exhaustive enum missing integer tag type",
.{},
&[_]u32{
try astgen.errNoteNode(
- counts.nonexhaustive_node,
+ nonexhaustive_node,
"marked non-exhaustive here",
.{},
),
@@ -5706,7 +5651,7 @@ fn containerDecl(
}
// In this case we must generate ZIR code for the tag values, similar to
// how structs are handled above.
- const nonexhaustive = counts.nonexhaustive_node != 0;
+ const nonexhaustive = counts.nonexhaustive_node != .none;
const decl_inst = try gz.reserveInstructionIndex();
@@ -5736,8 +5681,8 @@ fn containerDecl(
_ = try astgen.scanContainer(&namespace, container_decl.ast.members, .@"enum");
namespace.base.tag = .namespace;
- const arg_inst: Zir.Inst.Ref = if (container_decl.ast.arg != 0)
- try comptimeExpr(&block_scope, &namespace.base, coerced_type_ri, container_decl.ast.arg, .type)
+ const arg_inst: Zir.Inst.Ref = if (container_decl.ast.arg.unwrap()) |arg|
+ try comptimeExpr(&block_scope, &namespace.base, coerced_type_ri, arg, .type)
else
.none;
@@ -5749,31 +5694,31 @@ fn containerDecl(
const old_hasher = astgen.src_hasher;
defer astgen.src_hasher = old_hasher;
astgen.src_hasher = std.zig.SrcHasher.init(.{});
- if (container_decl.ast.arg != 0) {
- astgen.src_hasher.update(tree.getNodeSource(container_decl.ast.arg));
+ if (container_decl.ast.arg.unwrap()) |arg| {
+ astgen.src_hasher.update(tree.getNodeSource(arg));
}
astgen.src_hasher.update(&.{@intFromBool(nonexhaustive)});
for (container_decl.ast.members) |member_node| {
- if (member_node == counts.nonexhaustive_node)
+ if (member_node.toOptional() == counts.nonexhaustive_node)
continue;
astgen.src_hasher.update(tree.getNodeSource(member_node));
var member = switch (try containerMember(&block_scope, &namespace.base, &wip_members, member_node)) {
.decl => continue,
.field => |field| field,
};
- member.convertToNonTupleLike(astgen.tree.nodes);
+ member.convertToNonTupleLike(astgen.tree);
assert(member.comptime_token == null);
- assert(member.ast.type_expr == 0);
- assert(member.ast.align_expr == 0);
+ assert(member.ast.type_expr == .none);
+ assert(member.ast.align_expr == .none);
const field_name = try astgen.identAsString(member.ast.main_token);
wip_members.appendToField(@intFromEnum(field_name));
- const have_value = member.ast.value_expr != 0;
+ const have_value = member.ast.value_expr != .none;
wip_members.nextField(bits_per_field, .{have_value});
- if (have_value) {
+ if (member.ast.value_expr.unwrap()) |value_expr| {
if (arg_inst == .none) {
return astgen.failNodeNotes(
node,
@@ -5781,14 +5726,14 @@ fn containerDecl(
.{},
&[_]u32{
try astgen.errNoteNode(
- member.ast.value_expr,
+ value_expr,
"tag value specified here",
.{},
),
},
);
}
- const tag_value_inst = try expr(&block_scope, &namespace.base, .{ .rl = .{ .ty = arg_inst } }, member.ast.value_expr);
+ const tag_value_inst = try expr(&block_scope, &namespace.base, .{ .rl = .{ .ty = arg_inst } }, value_expr);
wip_members.appendToField(@intFromEnum(tag_value_inst));
}
}
@@ -5828,7 +5773,7 @@ fn containerDecl(
return rvalue(gz, ri, decl_inst.toRef(), node);
},
.keyword_opaque => {
- assert(container_decl.ast.arg == 0);
+ assert(container_decl.ast.arg == .none);
const decl_inst = try gz.reserveInstructionIndex();
@@ -5899,9 +5844,7 @@ fn containerMember(
) InnerError!ContainerMemberResult {
const astgen = gz.astgen;
const tree = astgen.tree;
- const node_tags = tree.nodes.items(.tag);
- const node_datas = tree.nodes.items(.data);
- switch (node_tags[member_node]) {
+ switch (tree.nodeTag(member_node)) {
.container_field_init,
.container_field_align,
.container_field,
@@ -5915,7 +5858,11 @@ fn containerMember(
=> {
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, member_node).?;
- const body = if (node_tags[member_node] == .fn_decl) node_datas[member_node].rhs else 0;
+
+ const body: Ast.Node.OptionalIndex = if (tree.nodeTag(member_node) == .fn_decl)
+ tree.nodeData(member_node).node_and_node[1].toOptional()
+ else
+ .none;
const prev_decl_index = wip_members.decl_index;
astgen.fnDecl(gz, scope, wip_members, member_node, body, full) catch |err| switch (err) {
@@ -5986,12 +5933,7 @@ fn containerMember(
.@"usingnamespace",
.empty,
member_node,
- is_pub: {
- const main_tokens = tree.nodes.items(.main_token);
- const token_tags = tree.tokens.items(.tag);
- const main_token = main_tokens[member_node];
- break :is_pub main_token > 0 and token_tags[main_token - 1] == .keyword_pub;
- },
+ tree.isTokenPrecededByTags(tree.nodeMainToken(member_node), &.{.keyword_pub}),
);
},
};
@@ -6025,8 +5967,6 @@ fn errorSetDecl(gz: *GenZir, ri: ResultInfo, node: Ast.Node.Index) InnerError!Zi
const astgen = gz.astgen;
const gpa = astgen.gpa;
const tree = astgen.tree;
- const main_tokens = tree.nodes.items(.main_token);
- const token_tags = tree.tokens.items(.tag);
const payload_index = try reserveExtra(astgen, @typeInfo(Zir.Inst.ErrorSetDecl).@"struct".fields.len);
var fields_len: usize = 0;
@@ -6034,10 +5974,10 @@ fn errorSetDecl(gz: *GenZir, ri: ResultInfo, node: Ast.Node.Index) InnerError!Zi
var idents: std.AutoHashMapUnmanaged(Zir.NullTerminatedString, Ast.TokenIndex) = .empty;
defer idents.deinit(gpa);
- const error_token = main_tokens[node];
+ const error_token = tree.nodeMainToken(node);
var tok_i = error_token + 2;
while (true) : (tok_i += 1) {
- switch (token_tags[tok_i]) {
+ switch (tree.tokenTag(tok_i)) {
.doc_comment, .comma => {},
.identifier => {
const str_index = try astgen.identAsString(tok_i);
@@ -6089,10 +6029,10 @@ fn tryExpr(
return astgen.failNode(node, "'try' outside function scope", .{});
};
- if (parent_gz.any_defer_node != 0) {
+ if (parent_gz.any_defer_node.unwrap()) |any_defer_node| {
return astgen.failNodeNotes(node, "'try' not allowed inside defer expression", .{}, &.{
try astgen.errNoteNode(
- parent_gz.any_defer_node,
+ any_defer_node,
"defer expression here",
.{},
),
@@ -6155,16 +6095,16 @@ fn orelseCatchExpr(
scope: *Scope,
ri: ResultInfo,
node: Ast.Node.Index,
- lhs: Ast.Node.Index,
cond_op: Zir.Inst.Tag,
unwrap_op: Zir.Inst.Tag,
unwrap_code_op: Zir.Inst.Tag,
- rhs: Ast.Node.Index,
payload_token: ?Ast.TokenIndex,
) InnerError!Zir.Inst.Ref {
const astgen = parent_gz.astgen;
const tree = astgen.tree;
+ const lhs, const rhs = tree.nodeData(node).node_and_node;
+
const need_rl = astgen.nodes_need_rl.contains(node);
const block_ri: ResultInfo = if (need_rl) ri else .{
.rl = switch (ri.rl) {
@@ -6297,12 +6237,8 @@ fn addFieldAccess(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const main_tokens = tree.nodes.items(.main_token);
- const node_datas = tree.nodes.items(.data);
- const object_node = node_datas[node].lhs;
- const dot_token = main_tokens[node];
- const field_ident = dot_token + 1;
+ const object_node, const field_ident = tree.nodeData(node).node_and_token;
const str_index = try astgen.identAsString(field_ident);
const lhs = try expr(gz, scope, lhs_ri, object_node);
@@ -6322,24 +6258,25 @@ fn arrayAccess(
node: Ast.Node.Index,
) InnerError!Zir.Inst.Ref {
const tree = gz.astgen.tree;
- const node_datas = tree.nodes.items(.data);
switch (ri.rl) {
.ref, .ref_coerced_ty => {
- const lhs = try expr(gz, scope, .{ .rl = .ref }, node_datas[node].lhs);
+ const lhs_node, const rhs_node = tree.nodeData(node).node_and_node;
+ const lhs = try expr(gz, scope, .{ .rl = .ref }, lhs_node);
const cursor = maybeAdvanceSourceCursorToMainToken(gz, node);
- const rhs = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, node_datas[node].rhs);
+ const rhs = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, rhs_node);
try emitDbgStmt(gz, cursor);
return gz.addPlNode(.elem_ptr_node, node, Zir.Inst.Bin{ .lhs = lhs, .rhs = rhs });
},
else => {
- const lhs = try expr(gz, scope, .{ .rl = .none }, node_datas[node].lhs);
+ const lhs_node, const rhs_node = tree.nodeData(node).node_and_node;
+ const lhs = try expr(gz, scope, .{ .rl = .none }, lhs_node);
const cursor = maybeAdvanceSourceCursorToMainToken(gz, node);
- const rhs = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, node_datas[node].rhs);
+ const rhs = try expr(gz, scope, .{ .rl = .{ .coerced_ty = .usize_type } }, rhs_node);
try emitDbgStmt(gz, cursor);
return rvalue(gz, ri, try gz.addPlNode(.elem_val_node, node, Zir.Inst.Bin{ .lhs = lhs, .rhs = rhs }), node);
@@ -6356,22 +6293,22 @@ fn simpleBinOp(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
+
+ const lhs_node, const rhs_node = tree.nodeData(node).node_and_node;
if (op_inst_tag == .cmp_neq or op_inst_tag == .cmp_eq) {
- const node_tags = tree.nodes.items(.tag);
const str = if (op_inst_tag == .cmp_eq) "==" else "!=";
- if (node_tags[node_datas[node].lhs] == .string_literal or
- node_tags[node_datas[node].rhs] == .string_literal)
+ if (tree.nodeTag(lhs_node) == .string_literal or
+ tree.nodeTag(rhs_node) == .string_literal)
return astgen.failNode(node, "cannot compare strings with {s}", .{str});
}
- const lhs = try reachableExpr(gz, scope, .{ .rl = .none }, node_datas[node].lhs, node);
+ const lhs = try reachableExpr(gz, scope, .{ .rl = .none }, lhs_node, node);
const cursor = switch (op_inst_tag) {
.add, .sub, .mul, .div, .mod_rem => maybeAdvanceSourceCursorToMainToken(gz, node),
else => undefined,
};
- const rhs = try reachableExpr(gz, scope, .{ .rl = .none }, node_datas[node].rhs, node);
+ const rhs = try reachableExpr(gz, scope, .{ .rl = .none }, rhs_node, node);
switch (op_inst_tag) {
.add, .sub, .mul, .div, .mod_rem => {
@@ -6405,16 +6342,16 @@ fn boolBinOp(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const lhs = try expr(gz, scope, coerced_bool_ri, node_datas[node].lhs);
+ const lhs_node, const rhs_node = tree.nodeData(node).node_and_node;
+ const lhs = try expr(gz, scope, coerced_bool_ri, lhs_node);
const bool_br = (try gz.addPlNodePayloadIndex(zir_tag, node, undefined)).toIndex().?;
var rhs_scope = gz.makeSubBlock(scope);
defer rhs_scope.unstack();
- const rhs = try fullBodyExpr(&rhs_scope, &rhs_scope.base, coerced_bool_ri, node_datas[node].rhs, .allow_branch_hint);
+ const rhs = try fullBodyExpr(&rhs_scope, &rhs_scope.base, coerced_bool_ri, rhs_node, .allow_branch_hint);
if (!gz.refIsNoReturn(rhs)) {
- _ = try rhs_scope.addBreakWithSrcNode(.break_inline, bool_br, rhs, node_datas[node].rhs);
+ _ = try rhs_scope.addBreakWithSrcNode(.break_inline, bool_br, rhs, rhs_node);
}
try rhs_scope.setBoolBrBody(bool_br, lhs);
@@ -6431,7 +6368,6 @@ fn ifExpr(
) InnerError!Zir.Inst.Ref {
const astgen = parent_gz.astgen;
const tree = astgen.tree;
- const token_tags = tree.tokens.items(.tag);
const do_err_trace = astgen.fn_block != null and if_full.error_token != null;
@@ -6454,7 +6390,7 @@ fn ifExpr(
defer block_scope.unstack();
const payload_is_ref = if (if_full.payload_token) |payload_token|
- token_tags[payload_token] == .asterisk
+ tree.tokenTag(payload_token) == .asterisk
else
false;
@@ -6532,7 +6468,7 @@ fn ifExpr(
break :s &then_scope.base;
}
} else if (if_full.payload_token) |payload_token| {
- const ident_token = if (payload_is_ref) payload_token + 1 else payload_token;
+ const ident_token = payload_token + @intFromBool(payload_is_ref);
const tag: Zir.Inst.Tag = if (payload_is_ref)
.optional_payload_unsafe_ptr
else
@@ -6574,8 +6510,7 @@ fn ifExpr(
if (do_err_trace and nodeMayAppendToErrorTrace(tree, if_full.ast.cond_expr))
_ = try else_scope.addSaveErrRetIndex(.always);
- const else_node = if_full.ast.else_expr;
- if (else_node != 0) {
+ if (if_full.ast.else_expr.unwrap()) |else_node| {
const sub_scope = s: {
if (if_full.error_token) |error_token| {
const tag: Zir.Inst.Tag = if (payload_is_ref)
@@ -6663,8 +6598,6 @@ fn whileExpr(
) InnerError!Zir.Inst.Ref {
const astgen = parent_gz.astgen;
const tree = astgen.tree;
- const token_tags = tree.tokens.items(.tag);
- const token_starts = tree.tokens.items(.start);
const need_rl = astgen.nodes_need_rl.contains(node);
const block_ri: ResultInfo = if (need_rl) ri else .{
@@ -6701,7 +6634,7 @@ fn whileExpr(
defer cond_scope.unstack();
const payload_is_ref = if (while_full.payload_token) |payload_token|
- token_tags[payload_token] == .asterisk
+ tree.tokenTag(payload_token) == .asterisk
else
false;
@@ -6787,7 +6720,6 @@ fn whileExpr(
break :s &then_scope.base;
}
} else if (while_full.payload_token) |payload_token| {
- const ident_token = if (payload_is_ref) payload_token + 1 else payload_token;
const tag: Zir.Inst.Tag = if (payload_is_ref)
.optional_payload_unsafe_ptr
else
@@ -6795,6 +6727,7 @@ fn whileExpr(
// will add this instruction to then_scope.instructions below
const payload_inst = try then_scope.makeUnNode(tag, cond.inst, while_full.ast.cond_expr);
opt_payload_inst = payload_inst.toOptional();
+ const ident_token = payload_token + @intFromBool(payload_is_ref);
const ident_name = try astgen.identAsString(ident_token);
const ident_bytes = tree.tokenSlice(ident_token);
if (mem.eql(u8, "_", ident_bytes)) {
@@ -6849,8 +6782,8 @@ fn whileExpr(
// are no jumps to it. This happens when the last statement of a while body is noreturn
// and there are no `continue` statements.
// Tracking issue: https://github.com/ziglang/zig/issues/9185
- if (while_full.ast.cont_expr != 0) {
- _ = try unusedResultExpr(&then_scope, then_sub_scope, while_full.ast.cont_expr);
+ if (while_full.ast.cont_expr.unwrap()) |cont_expr| {
+ _ = try unusedResultExpr(&then_scope, then_sub_scope, cont_expr);
}
continue_scope.instructions_top = continue_scope.instructions.items.len;
@@ -6862,7 +6795,7 @@ fn whileExpr(
try checkUsed(parent_gz, &then_scope.base, then_sub_scope);
const break_tag: Zir.Inst.Tag = if (is_inline) .break_inline else .@"break";
if (!continue_scope.endsWithNoReturn()) {
- astgen.advanceSourceCursor(token_starts[tree.lastToken(then_node)]);
+ astgen.advanceSourceCursor(tree.tokenStart(tree.lastToken(then_node)));
try emitDbgStmt(parent_gz, .{ astgen.source_line - parent_gz.decl_line, astgen.source_column });
_ = try parent_gz.add(.{
.tag = .extended,
@@ -6880,8 +6813,7 @@ fn whileExpr(
var else_scope = parent_gz.makeSubBlock(&cond_scope.base);
defer else_scope.unstack();
- const else_node = while_full.ast.else_expr;
- if (else_node != 0) {
+ if (while_full.ast.else_expr.unwrap()) |else_node| {
const sub_scope = s: {
if (while_full.error_token) |error_token| {
const tag: Zir.Inst.Tag = if (payload_is_ref)
@@ -6979,10 +6911,6 @@ fn forExpr(
try astgen.appendErrorTok(for_full.inline_token.?, "redundant inline keyword in comptime scope", .{});
}
const tree = astgen.tree;
- const token_tags = tree.tokens.items(.tag);
- const token_starts = tree.tokens.items(.start);
- const node_tags = tree.nodes.items(.tag);
- const node_data = tree.nodes.items(.data);
const gpa = astgen.gpa;
// For counters, this is the start value; for indexables, this is the base
@@ -7012,7 +6940,7 @@ fn forExpr(
{
var capture_token = for_full.payload_token;
for (for_full.ast.inputs, indexables, lens) |input, *indexable_ref, *len_refs| {
- const capture_is_ref = token_tags[capture_token] == .asterisk;
+ const capture_is_ref = tree.tokenTag(capture_token) == .asterisk;
const ident_tok = capture_token + @intFromBool(capture_is_ref);
const is_discard = mem.eql(u8, tree.tokenSlice(ident_tok), "_");
@@ -7023,16 +6951,15 @@ fn forExpr(
capture_token = ident_tok + 2;
try emitDbgNode(parent_gz, input);
- if (node_tags[input] == .for_range) {
+ if (tree.nodeTag(input) == .for_range) {
if (capture_is_ref) {
return astgen.failTok(ident_tok, "cannot capture reference to range", .{});
}
- const start_node = node_data[input].lhs;
+ const start_node, const end_node = tree.nodeData(input).node_and_opt_node;
const start_val = try expr(parent_gz, scope, .{ .rl = .{ .ty = .usize_type } }, start_node);
- const end_node = node_data[input].rhs;
- const end_val = if (end_node != 0)
- try expr(parent_gz, scope, .{ .rl = .{ .ty = .usize_type } }, node_data[input].rhs)
+ const end_val = if (end_node.unwrap()) |end|
+ try expr(parent_gz, scope, .{ .rl = .{ .ty = .usize_type } }, end)
else
.none;
@@ -7125,7 +7052,7 @@ fn forExpr(
var capture_token = for_full.payload_token;
var capture_sub_scope: *Scope = &then_scope.base;
for (for_full.ast.inputs, indexables, capture_scopes) |input, indexable_ref, *capture_scope| {
- const capture_is_ref = token_tags[capture_token] == .asterisk;
+ const capture_is_ref = tree.tokenTag(capture_token) == .asterisk;
const ident_tok = capture_token + @intFromBool(capture_is_ref);
const capture_name = tree.tokenSlice(ident_tok);
// Skip over the comma, and on to the next capture (or the ending pipe character).
@@ -7137,7 +7064,7 @@ fn forExpr(
try astgen.detectLocalShadowing(capture_sub_scope, name_str_index, ident_tok, capture_name, .capture);
const capture_inst = inst: {
- const is_counter = node_tags[input] == .for_range;
+ const is_counter = tree.nodeTag(input) == .for_range;
if (indexable_ref == .none) {
// Special case: the main index can be used directly.
@@ -7184,7 +7111,7 @@ fn forExpr(
try checkUsed(parent_gz, &then_scope.base, then_sub_scope);
- astgen.advanceSourceCursor(token_starts[tree.lastToken(then_node)]);
+ astgen.advanceSourceCursor(tree.tokenStart(tree.lastToken(then_node)));
try emitDbgStmt(parent_gz, .{ astgen.source_line - parent_gz.decl_line, astgen.source_column });
_ = try parent_gz.add(.{
.tag = .extended,
@@ -7201,8 +7128,7 @@ fn forExpr(
var else_scope = parent_gz.makeSubBlock(&cond_scope.base);
defer else_scope.unstack();
- const else_node = for_full.ast.else_expr;
- if (else_node != 0) {
+ if (for_full.ast.else_expr.unwrap()) |else_node| {
const sub_scope = &else_scope.base;
// Remove the continue block and break block so that `continue` and `break`
// control flow apply to outer loops; not this one.
@@ -7270,10 +7196,6 @@ fn switchExprErrUnion(
const astgen = parent_gz.astgen;
const gpa = astgen.gpa;
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const node_tags = tree.nodes.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
- const token_tags = tree.tokens.items(.tag);
const if_full = switch (node_ty) {
.@"catch" => undefined,
@@ -7282,23 +7204,19 @@ fn switchExprErrUnion(
const switch_node, const operand_node, const error_payload = switch (node_ty) {
.@"catch" => .{
- node_datas[catch_or_if_node].rhs,
- node_datas[catch_or_if_node].lhs,
- main_tokens[catch_or_if_node] + 2,
+ tree.nodeData(catch_or_if_node).node_and_node[1],
+ tree.nodeData(catch_or_if_node).node_and_node[0],
+ tree.nodeMainToken(catch_or_if_node) + 2,
},
.@"if" => .{
- if_full.ast.else_expr,
+ if_full.ast.else_expr.unwrap().?,
if_full.ast.cond_expr,
if_full.error_token.?,
},
};
- assert(node_tags[switch_node] == .@"switch" or node_tags[switch_node] == .switch_comma);
+ const switch_full = tree.fullSwitch(switch_node).?;
const do_err_trace = astgen.fn_block != null;
-
- const extra = tree.extraData(node_datas[switch_node].rhs, Ast.Node.SubRange);
- const case_nodes = tree.extra_data[extra.start..extra.end];
-
const need_rl = astgen.nodes_need_rl.contains(catch_or_if_node);
const block_ri: ResultInfo = if (need_rl) ri else .{
.rl = switch (ri.rl) {
@@ -7310,7 +7228,7 @@ fn switchExprErrUnion(
};
const payload_is_ref = switch (node_ty) {
- .@"if" => if_full.payload_token != null and token_tags[if_full.payload_token.?] == .asterisk,
+ .@"if" => if_full.payload_token != null and tree.tokenTag(if_full.payload_token.?) == .asterisk,
.@"catch" => ri.rl == .ref or ri.rl == .ref_coerced_ty,
};
@@ -7322,9 +7240,9 @@ fn switchExprErrUnion(
var multi_cases_len: u32 = 0;
var inline_cases_len: u32 = 0;
var has_else = false;
- var else_node: Ast.Node.Index = 0;
+ var else_node: Ast.Node.OptionalIndex = .none;
var else_src: ?Ast.TokenIndex = null;
- for (case_nodes) |case_node| {
+ for (switch_full.ast.cases) |case_node| {
const case = tree.fullSwitchCase(case_node).?;
if (case.ast.values.len == 0) {
@@ -7344,12 +7262,12 @@ fn switchExprErrUnion(
);
}
has_else = true;
- else_node = case_node;
+ else_node = case_node.toOptional();
else_src = case_src;
continue;
} else if (case.ast.values.len == 1 and
- node_tags[case.ast.values[0]] == .identifier and
- mem.eql(u8, tree.tokenSlice(main_tokens[case.ast.values[0]]), "_"))
+ tree.nodeTag(case.ast.values[0]) == .identifier and
+ mem.eql(u8, tree.tokenSlice(tree.nodeMainToken(case.ast.values[0])), "_"))
{
const case_src = case.ast.arrow_token - 1;
return astgen.failTokNotes(
@@ -7367,11 +7285,11 @@ fn switchExprErrUnion(
}
for (case.ast.values) |val| {
- if (node_tags[val] == .string_literal)
+ if (tree.nodeTag(val) == .string_literal)
return astgen.failNode(val, "cannot switch on strings", .{});
}
- if (case.ast.values.len == 1 and node_tags[case.ast.values[0]] != .switch_range) {
+ if (case.ast.values.len == 1 and tree.nodeTag(case.ast.values[0]) != .switch_range) {
scalar_cases_len += 1;
} else {
multi_cases_len += 1;
@@ -7564,11 +7482,11 @@ fn switchExprErrUnion(
var multi_case_index: u32 = 0;
var scalar_case_index: u32 = 0;
var any_uses_err_capture = false;
- for (case_nodes) |case_node| {
+ for (switch_full.ast.cases) |case_node| {
const case = tree.fullSwitchCase(case_node).?;
const is_multi_case = case.ast.values.len > 1 or
- (case.ast.values.len == 1 and node_tags[case.ast.values[0]] == .switch_range);
+ (case.ast.values.len == 1 and tree.nodeTag(case.ast.values[0]) == .switch_range);
var dbg_var_name: Zir.NullTerminatedString = .empty;
var dbg_var_inst: Zir.Inst.Ref = undefined;
@@ -7586,7 +7504,7 @@ fn switchExprErrUnion(
};
const capture_token = case.payload_token orelse break :blk &err_scope.base;
- if (token_tags[capture_token] != .identifier) {
+ if (tree.tokenTag(capture_token) != .identifier) {
return astgen.failTok(capture_token + 1, "error set cannot be captured by reference", .{});
}
@@ -7622,7 +7540,7 @@ fn switchExprErrUnion(
// items
var items_len: u32 = 0;
for (case.ast.values) |item_node| {
- if (node_tags[item_node] == .switch_range) continue;
+ if (tree.nodeTag(item_node) == .switch_range) continue;
items_len += 1;
const item_inst = try comptimeExpr(parent_gz, scope, item_ri, item_node, .switch_item);
@@ -7632,11 +7550,12 @@ fn switchExprErrUnion(
// ranges
var ranges_len: u32 = 0;
for (case.ast.values) |range| {
- if (node_tags[range] != .switch_range) continue;
+ if (tree.nodeTag(range) != .switch_range) continue;
ranges_len += 1;
- const first = try comptimeExpr(parent_gz, scope, item_ri, node_datas[range].lhs, .switch_item);
- const last = try comptimeExpr(parent_gz, scope, item_ri, node_datas[range].rhs, .switch_item);
+ const first_node, const last_node = tree.nodeData(range).node_and_node;
+ const first = try comptimeExpr(parent_gz, scope, item_ri, first_node, .switch_item);
+ const last = try comptimeExpr(parent_gz, scope, item_ri, last_node, .switch_item);
try payloads.appendSlice(gpa, &[_]u32{
@intFromEnum(first), @intFromEnum(last),
});
@@ -7645,7 +7564,7 @@ fn switchExprErrUnion(
payloads.items[header_index] = items_len;
payloads.items[header_index + 1] = ranges_len;
break :blk header_index + 2;
- } else if (case_node == else_node) blk: {
+ } else if (case_node.toOptional() == else_node) blk: {
payloads.items[case_table_start + 1] = header_index;
try payloads.resize(gpa, header_index + 1); // body_len
break :blk header_index;
@@ -7675,7 +7594,7 @@ fn switchExprErrUnion(
const case_result = try fullBodyExpr(&case_scope, sub_scope, block_scope.break_result_info, target_expr_node, .allow_branch_hint);
// check capture_scope, not err_scope to avoid false positive unused error capture
try checkUsed(parent_gz, &case_scope.base, err_scope.parent);
- const uses_err = err_scope.used != 0 or err_scope.discarded != 0;
+ const uses_err = err_scope.used != .none or err_scope.discarded != .none;
if (uses_err) {
try case_scope.addDbgVar(.dbg_var_val, err_name, err_inst.toRef());
any_uses_err_capture = true;
@@ -7775,10 +7694,6 @@ fn switchExpr(
const astgen = parent_gz.astgen;
const gpa = astgen.gpa;
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const node_tags = tree.nodes.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
- const token_tags = tree.tokens.items(.tag);
const operand_node = switch_full.ast.condition;
const case_nodes = switch_full.ast.cases;
@@ -7810,17 +7725,17 @@ fn switchExpr(
var multi_cases_len: u32 = 0;
var inline_cases_len: u32 = 0;
var special_prong: Zir.SpecialProng = .none;
- var special_node: Ast.Node.Index = 0;
+ var special_node: Ast.Node.OptionalIndex = .none;
var else_src: ?Ast.TokenIndex = null;
var underscore_src: ?Ast.TokenIndex = null;
for (case_nodes) |case_node| {
const case = tree.fullSwitchCase(case_node).?;
if (case.payload_token) |payload_token| {
- const ident = if (token_tags[payload_token] == .asterisk) blk: {
+ const ident = if (tree.tokenTag(payload_token) == .asterisk) blk: {
any_payload_is_ref = true;
break :blk payload_token + 1;
} else payload_token;
- if (token_tags[ident + 1] == .comma) {
+ if (tree.tokenTag(ident + 1) == .comma) {
any_has_tag_capture = true;
}
@@ -7868,13 +7783,13 @@ fn switchExpr(
},
);
}
- special_node = case_node;
+ special_node = case_node.toOptional();
special_prong = .@"else";
else_src = case_src;
continue;
} else if (case.ast.values.len == 1 and
- node_tags[case.ast.values[0]] == .identifier and
- mem.eql(u8, tree.tokenSlice(main_tokens[case.ast.values[0]]), "_"))
+ tree.nodeTag(case.ast.values[0]) == .identifier and
+ mem.eql(u8, tree.tokenSlice(tree.nodeMainToken(case.ast.values[0])), "_"))
{
const case_src = case.ast.arrow_token - 1;
if (underscore_src) |src| {
@@ -7912,18 +7827,18 @@ fn switchExpr(
if (case.inline_token != null) {
return astgen.failTok(case_src, "cannot inline '_' prong", .{});
}
- special_node = case_node;
+ special_node = case_node.toOptional();
special_prong = .under;
underscore_src = case_src;
continue;
}
for (case.ast.values) |val| {
- if (node_tags[val] == .string_literal)
+ if (tree.nodeTag(val) == .string_literal)
return astgen.failNode(val, "cannot switch on strings", .{});
}
- if (case.ast.values.len == 1 and node_tags[case.ast.values[0]] != .switch_range) {
+ if (case.ast.values.len == 1 and tree.nodeTag(case.ast.values[0]) != .switch_range) {
scalar_cases_len += 1;
} else {
multi_cases_len += 1;
@@ -8012,7 +7927,7 @@ fn switchExpr(
const case = tree.fullSwitchCase(case_node).?;
const is_multi_case = case.ast.values.len > 1 or
- (case.ast.values.len == 1 and node_tags[case.ast.values[0]] == .switch_range);
+ (case.ast.values.len == 1 and tree.nodeTag(case.ast.values[0]) == .switch_range);
var dbg_var_name: Zir.NullTerminatedString = .empty;
var dbg_var_inst: Zir.Inst.Ref = undefined;
@@ -8026,18 +7941,15 @@ fn switchExpr(
const sub_scope = blk: {
const payload_token = case.payload_token orelse break :blk &case_scope.base;
- const ident = if (token_tags[payload_token] == .asterisk)
- payload_token + 1
- else
- payload_token;
+ const capture_is_ref = tree.tokenTag(payload_token) == .asterisk;
+ const ident = payload_token + @intFromBool(capture_is_ref);
- const is_ptr = ident != payload_token;
- capture = if (is_ptr) .by_ref else .by_val;
+ capture = if (capture_is_ref) .by_ref else .by_val;
const ident_slice = tree.tokenSlice(ident);
var payload_sub_scope: *Scope = undefined;
if (mem.eql(u8, ident_slice, "_")) {
- if (is_ptr) {
+ if (capture_is_ref) {
return astgen.failTok(payload_token, "pointer modifier invalid on discard", .{});
}
payload_sub_scope = &case_scope.base;
@@ -8057,7 +7969,7 @@ fn switchExpr(
payload_sub_scope = &capture_val_scope.base;
}
- const tag_token = if (token_tags[ident + 1] == .comma)
+ const tag_token = if (tree.tokenTag(ident + 1) == .comma)
ident + 2
else
break :blk payload_sub_scope;
@@ -8095,7 +8007,7 @@ fn switchExpr(
// items
var items_len: u32 = 0;
for (case.ast.values) |item_node| {
- if (node_tags[item_node] == .switch_range) continue;
+ if (tree.nodeTag(item_node) == .switch_range) continue;
items_len += 1;
const item_inst = try comptimeExpr(parent_gz, scope, item_ri, item_node, .switch_item);
@@ -8105,11 +8017,12 @@ fn switchExpr(
// ranges
var ranges_len: u32 = 0;
for (case.ast.values) |range| {
- if (node_tags[range] != .switch_range) continue;
+ if (tree.nodeTag(range) != .switch_range) continue;
ranges_len += 1;
- const first = try comptimeExpr(parent_gz, scope, item_ri, node_datas[range].lhs, .switch_item);
- const last = try comptimeExpr(parent_gz, scope, item_ri, node_datas[range].rhs, .switch_item);
+ const first_node, const last_node = tree.nodeData(range).node_and_node;
+ const first = try comptimeExpr(parent_gz, scope, item_ri, first_node, .switch_item);
+ const last = try comptimeExpr(parent_gz, scope, item_ri, last_node, .switch_item);
try payloads.appendSlice(gpa, &[_]u32{
@intFromEnum(first), @intFromEnum(last),
});
@@ -8118,7 +8031,7 @@ fn switchExpr(
payloads.items[header_index] = items_len;
payloads.items[header_index + 1] = ranges_len;
break :blk header_index + 2;
- } else if (case_node == special_node) blk: {
+ } else if (case_node.toOptional() == special_node) blk: {
payloads.items[case_table_start] = header_index;
try payloads.resize(gpa, header_index + 1); // body_len
break :blk header_index;
@@ -8231,17 +8144,15 @@ fn switchExpr(
fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const node_tags = tree.nodes.items(.tag);
if (astgen.fn_block == null) {
return astgen.failNode(node, "'return' outside function scope", .{});
}
- if (gz.any_defer_node != 0) {
+ if (gz.any_defer_node.unwrap()) |any_defer_node| {
return astgen.failNodeNotes(node, "cannot return from defer expression", .{}, &.{
try astgen.errNoteNode(
- gz.any_defer_node,
+ any_defer_node,
"defer expression here",
.{},
),
@@ -8259,8 +8170,7 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
const defer_outer = &astgen.fn_block.?.base;
- const operand_node = node_datas[node].lhs;
- if (operand_node == 0) {
+ const operand_node = tree.nodeData(node).opt_node.unwrap() orelse {
// Returning a void value; skip error defers.
try genDefers(gz, defer_outer, scope, .normal_only);
@@ -8269,12 +8179,12 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
_ = try gz.addUnNode(.ret_node, .void_value, node);
return Zir.Inst.Ref.unreachable_value;
- }
+ };
- if (node_tags[operand_node] == .error_value) {
+ if (tree.nodeTag(operand_node) == .error_value) {
// Hot path for `return error.Foo`. This bypasses result location logic as well as logic
// for detecting whether to add something to the function's inferred error set.
- const ident_token = node_datas[operand_node].rhs;
+ const ident_token = tree.nodeData(operand_node).opt_token_and_opt_token[1].unwrap().?;
const err_name_str_index = try astgen.identAsString(ident_token);
const defer_counts = countDefers(defer_outer, scope);
if (!defer_counts.need_err_code) {
@@ -8405,9 +8315,8 @@ fn identifier(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const main_tokens = tree.nodes.items(.main_token);
- const ident_token = main_tokens[ident];
+ const ident_token = tree.nodeMainToken(ident);
const ident_name_raw = tree.tokenSlice(ident_token);
if (mem.eql(u8, ident_name_raw, "_")) {
return astgen.failNode(ident, "'_' used as an identifier without @\"_\" syntax", .{});
@@ -8509,9 +8418,9 @@ fn localVarRef(
// Locals cannot shadow anything, so we do not need to look for ambiguous
// references in this case.
if (ri.rl == .discard and ri.ctx == .assignment) {
- local_val.discarded = ident_token;
+ local_val.discarded = .fromToken(ident_token);
} else {
- local_val.used = ident_token;
+ local_val.used = .fromToken(ident_token);
}
if (local_val.is_used_or_discarded) |ptr| ptr.* = true;
@@ -8533,9 +8442,9 @@ fn localVarRef(
const local_ptr = s.cast(Scope.LocalPtr).?;
if (local_ptr.name == name_str_index) {
if (ri.rl == .discard and ri.ctx == .assignment) {
- local_ptr.discarded = ident_token;
+ local_ptr.discarded = .fromToken(ident_token);
} else {
- local_ptr.used = ident_token;
+ local_ptr.used = .fromToken(ident_token);
}
// Can't close over a runtime variable
@@ -8748,8 +8657,7 @@ fn stringLiteral(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const main_tokens = tree.nodes.items(.main_token);
- const str_lit_token = main_tokens[node];
+ const str_lit_token = tree.nodeMainToken(node);
const str = try astgen.strLitAsString(str_lit_token);
const result = try gz.add(.{
.tag = .str,
@@ -8781,8 +8689,7 @@ fn multilineStringLiteral(
fn charLiteral(gz: *GenZir, ri: ResultInfo, node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const main_tokens = tree.nodes.items(.main_token);
- const main_token = main_tokens[node];
+ const main_token = tree.nodeMainToken(node);
const slice = tree.tokenSlice(main_token);
switch (std.zig.parseCharLiteral(slice)) {
@@ -8799,8 +8706,7 @@ const Sign = enum { negative, positive };
fn numberLiteral(gz: *GenZir, ri: ResultInfo, node: Ast.Node.Index, source_node: Ast.Node.Index, sign: Sign) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const main_tokens = tree.nodes.items(.main_token);
- const num_token = main_tokens[node];
+ const num_token = tree.nodeMainToken(node);
const bytes = tree.tokenSlice(num_token);
const result: Zir.Inst.Ref = switch (std.zig.parseNumberLiteral(bytes)) {
@@ -8918,16 +8824,12 @@ fn asmExpr(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const main_tokens = tree.nodes.items(.main_token);
- const node_datas = tree.nodes.items(.data);
- const node_tags = tree.nodes.items(.tag);
- const token_tags = tree.tokens.items(.tag);
const TagAndTmpl = struct { tag: Zir.Inst.Extended, tmpl: Zir.NullTerminatedString };
- const tag_and_tmpl: TagAndTmpl = switch (node_tags[full.ast.template]) {
+ const tag_and_tmpl: TagAndTmpl = switch (tree.nodeTag(full.ast.template)) {
.string_literal => .{
.tag = .@"asm",
- .tmpl = (try astgen.strLitAsString(main_tokens[full.ast.template])).index,
+ .tmpl = (try astgen.strLitAsString(tree.nodeMainToken(full.ast.template))).index,
},
.multiline_string_literal => .{
.tag = .@"asm",
@@ -8962,17 +8864,17 @@ fn asmExpr(
var output_type_bits: u32 = 0;
for (full.outputs, 0..) |output_node, i| {
- const symbolic_name = main_tokens[output_node];
+ const symbolic_name = tree.nodeMainToken(output_node);
const name = try astgen.identAsString(symbolic_name);
const constraint_token = symbolic_name + 2;
const constraint = (try astgen.strLitAsString(constraint_token)).index;
- const has_arrow = token_tags[symbolic_name + 4] == .arrow;
+ const has_arrow = tree.tokenTag(symbolic_name + 4) == .arrow;
if (has_arrow) {
if (output_type_bits != 0) {
return astgen.failNode(output_node, "inline assembly allows up to one output value", .{});
}
output_type_bits |= @as(u32, 1) << @intCast(i);
- const out_type_node = node_datas[output_node].lhs;
+ const out_type_node = tree.nodeData(output_node).opt_node_and_token[0].unwrap().?;
const out_type_inst = try typeExpr(gz, scope, out_type_node);
outputs[i] = .{
.name = name,
@@ -8999,11 +8901,11 @@ fn asmExpr(
const inputs = inputs_buffer[0..full.inputs.len];
for (full.inputs, 0..) |input_node, i| {
- const symbolic_name = main_tokens[input_node];
+ const symbolic_name = tree.nodeMainToken(input_node);
const name = try astgen.identAsString(symbolic_name);
const constraint_token = symbolic_name + 2;
const constraint = (try astgen.strLitAsString(constraint_token)).index;
- const operand = try expr(gz, scope, .{ .rl = .none }, node_datas[input_node].lhs);
+ const operand = try expr(gz, scope, .{ .rl = .none }, tree.nodeData(input_node).node_and_token[0]);
inputs[i] = .{
.name = name,
.constraint = constraint,
@@ -9024,10 +8926,10 @@ fn asmExpr(
clobbers_buffer[clobber_i] = @intFromEnum((try astgen.strLitAsString(tok_i)).index);
clobber_i += 1;
tok_i += 1;
- switch (token_tags[tok_i]) {
+ switch (tree.tokenTag(tok_i)) {
.r_paren => break :clobbers,
.comma => {
- if (token_tags[tok_i + 1] == .r_paren) {
+ if (tree.tokenTag(tok_i + 1) == .r_paren) {
break :clobbers;
} else {
continue;
@@ -9119,9 +9021,6 @@ fn ptrCast(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const main_tokens = tree.nodes.items(.main_token);
- const node_datas = tree.nodes.items(.data);
- const node_tags = tree.nodes.items(.tag);
const FlagsInt = @typeInfo(Zir.Inst.FullPtrCastFlags).@"struct".backing_integer.?;
var flags: Zir.Inst.FullPtrCastFlags = .{};
@@ -9130,11 +9029,11 @@ fn ptrCast(
// to handle `builtin_call_two`.
var node = root_node;
while (true) {
- switch (node_tags[node]) {
+ switch (tree.nodeTag(node)) {
.builtin_call_two, .builtin_call_two_comma => {},
.grouped_expression => {
// Handle the chaining even with redundant parentheses
- node = node_datas[node].lhs;
+ node = tree.nodeData(node).node_and_token[0];
continue;
},
else => break,
@@ -9144,7 +9043,9 @@ fn ptrCast(
const args = tree.builtinCallParams(&buf, node).?;
std.debug.assert(args.len <= 2);
- const builtin_token = main_tokens[node];
+ if (args.len == 0) break; // 0 args
+
+ const builtin_token = tree.nodeMainToken(node);
const builtin_name = tree.tokenSlice(builtin_token);
const info = BuiltinFn.list.get(builtin_name) orelse break;
if (args.len == 1) {
@@ -9344,9 +9245,8 @@ fn builtinCall(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const main_tokens = tree.nodes.items(.main_token);
- const builtin_token = main_tokens[node];
+ const builtin_token = tree.nodeMainToken(node);
const builtin_name = tree.tokenSlice(builtin_token);
// We handle the different builtins manually because they have different semantics depending
@@ -9387,14 +9287,13 @@ fn builtinCall(
return rvalue(gz, ri, .void_value, node);
},
.import => {
- const node_tags = tree.nodes.items(.tag);
const operand_node = params[0];
- if (node_tags[operand_node] != .string_literal) {
+ if (tree.nodeTag(operand_node) != .string_literal) {
// Spec reference: https://github.com/ziglang/zig/issues/2206
return astgen.failNode(operand_node, "@import operand must be a string literal", .{});
}
- const str_lit_token = main_tokens[operand_node];
+ const str_lit_token = tree.nodeMainToken(operand_node);
const str = try astgen.strLitAsString(str_lit_token);
const str_slice = astgen.string_bytes.items[@intFromEnum(str.index)..][0..str.len];
if (mem.indexOfScalar(u8, str_slice, 0) != null) {
@@ -9505,8 +9404,7 @@ fn builtinCall(
std.mem.asBytes(&astgen.source_column),
);
- const token_starts = tree.tokens.items(.start);
- const node_start = token_starts[tree.firstToken(node)];
+ const node_start = tree.tokenStart(tree.firstToken(node));
astgen.advanceSourceCursor(node_start);
const result = try gz.addExtendedPayload(.builtin_src, Zir.Inst.Src{
.node = gz.nodeIndexToRelative(node),
@@ -9786,7 +9684,7 @@ fn builtinCall(
.callee = callee,
.args = args,
.flags = .{
- .is_nosuspend = gz.nosuspend_node != 0,
+ .is_nosuspend = gz.nosuspend_node != .none,
.ensure_result_used = false,
},
});
@@ -10011,13 +9909,11 @@ fn negation(
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
const tree = astgen.tree;
- const node_tags = tree.nodes.items(.tag);
- const node_datas = tree.nodes.items(.data);
// Check for float literal as the sub-expression because we want to preserve
// its negativity rather than having it go through comptime subtraction.
- const operand_node = node_datas[node].lhs;
- if (node_tags[operand_node] == .number_literal) {
+ const operand_node = tree.nodeData(node).node;
+ if (tree.nodeTag(operand_node) == .number_literal) {
return numberLiteral(gz, ri, operand_node, node, .negative);
}
@@ -10133,7 +10029,7 @@ fn shiftOp(
) InnerError!Zir.Inst.Ref {
const lhs = try expr(gz, scope, .{ .rl = .none }, lhs_node);
- const cursor = switch (gz.astgen.tree.nodes.items(.tag)[node]) {
+ const cursor = switch (gz.astgen.tree.nodeTag(node)) {
.shl, .shr => maybeAdvanceSourceCursorToMainToken(gz, node),
else => undefined,
};
@@ -10141,7 +10037,7 @@ fn shiftOp(
const log2_int_type = try gz.addUnNode(.typeof_log2_int_type, lhs, lhs_node);
const rhs = try expr(gz, scope, .{ .rl = .{ .ty = log2_int_type }, .ctx = .shift_op }, rhs_node);
- switch (gz.astgen.tree.nodes.items(.tag)[node]) {
+ switch (gz.astgen.tree.nodeTag(node)) {
.shl, .shr => try emitDbgStmt(gz, cursor),
else => undefined,
}
@@ -10217,14 +10113,14 @@ fn callExpr(
if (call.async_token != null) {
break :blk .async_kw;
}
- if (gz.nosuspend_node != 0) {
+ if (gz.nosuspend_node != .none) {
break :blk .no_async;
}
break :blk .auto;
};
{
- astgen.advanceSourceCursor(astgen.tree.tokens.items(.start)[call.ast.lparen]);
+ astgen.advanceSourceCursor(astgen.tree.tokenStart(call.ast.lparen));
const line = astgen.source_line - gz.decl_line;
const column = astgen.source_column;
// Sema expects a dbg_stmt immediately before call,
@@ -10235,7 +10131,6 @@ fn callExpr(
.direct => |obj| assert(obj != .none),
.field => |field| assert(field.obj_ptr != .none),
}
- assert(node != 0);
const call_index: Zir.Inst.Index = @enumFromInt(astgen.instructions.len);
const call_inst = call_index.toRef();
@@ -10346,14 +10241,10 @@ fn calleeExpr(
const astgen = gz.astgen;
const tree = astgen.tree;
- const tag = tree.nodes.items(.tag)[node];
+ const tag = tree.nodeTag(node);
switch (tag) {
.field_access => {
- const main_tokens = tree.nodes.items(.main_token);
- const node_datas = tree.nodes.items(.data);
- const object_node = node_datas[node].lhs;
- const dot_token = main_tokens[node];
- const field_ident = dot_token + 1;
+ const object_node, const field_ident = tree.nodeData(node).node_and_token;
const str_index = try astgen.identAsString(field_ident);
// Capture the object by reference so we can promote it to an
// address in Sema if needed.
@@ -10378,7 +10269,7 @@ fn calleeExpr(
// Decl literal call syntax, e.g.
// `const foo: T = .init();`
// Look up `init` in `T`, but don't try and coerce it.
- const str_index = try astgen.identAsString(tree.nodes.items(.main_token)[node]);
+ const str_index = try astgen.identAsString(tree.nodeMainToken(node));
const callee = try gz.addPlNode(.decl_literal_no_coerce, node, Zir.Inst.Field{
.lhs = res_ty,
.field_name_start = str_index,
@@ -10450,12 +10341,9 @@ comptime {
}
fn nodeIsTriviallyZero(tree: *const Ast, node: Ast.Node.Index) bool {
- const node_tags = tree.nodes.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
-
- switch (node_tags[node]) {
+ switch (tree.nodeTag(node)) {
.number_literal => {
- const ident = main_tokens[node];
+ const ident = tree.nodeMainToken(node);
return switch (std.zig.parseNumberLiteral(tree.tokenSlice(ident))) {
.int => |number| switch (number) {
0 => true,
@@ -10469,12 +10357,9 @@ fn nodeIsTriviallyZero(tree: *const Ast, node: Ast.Node.Index) bool {
}
fn nodeMayAppendToErrorTrace(tree: *const Ast, start_node: Ast.Node.Index) bool {
- const node_tags = tree.nodes.items(.tag);
- const node_datas = tree.nodes.items(.data);
-
var node = start_node;
while (true) {
- switch (node_tags[node]) {
+ switch (tree.nodeTag(node)) {
// These don't have the opportunity to call any runtime functions.
.error_value,
.identifier,
@@ -10482,11 +10367,12 @@ fn nodeMayAppendToErrorTrace(tree: *const Ast, start_node: Ast.Node.Index) bool
=> return false,
// Forward the question to the LHS sub-expression.
- .grouped_expression,
.@"try",
.@"nosuspend",
+ => node = tree.nodeData(node).node,
+ .grouped_expression,
.unwrap_optional,
- => node = node_datas[node].lhs,
+ => node = tree.nodeData(node).node_and_token[0],
// Anything that does not eval to an error is guaranteed to pop any
// additions to the error trace, so it effectively does not append.
@@ -10496,14 +10382,9 @@ fn nodeMayAppendToErrorTrace(tree: *const Ast, start_node: Ast.Node.Index) bool
}
fn nodeMayEvalToError(tree: *const Ast, start_node: Ast.Node.Index) BuiltinFn.EvalToError {
- const node_tags = tree.nodes.items(.tag);
- const node_datas = tree.nodes.items(.data);
- const main_tokens = tree.nodes.items(.main_token);
- const token_tags = tree.tokens.items(.tag);
-
var node = start_node;
while (true) {
- switch (node_tags[node]) {
+ switch (tree.nodeTag(node)) {
.root,
.@"usingnamespace",
.test_decl,
@@ -10666,13 +10547,14 @@ fn nodeMayEvalToError(tree: *const Ast, start_node: Ast.Node.Index) BuiltinFn.Ev
=> return .never,
// Forward the question to the LHS sub-expression.
- .grouped_expression,
.@"try",
.@"await",
.@"comptime",
.@"nosuspend",
+ => node = tree.nodeData(node).node,
+ .grouped_expression,
.unwrap_optional,
- => node = node_datas[node].lhs,
+ => node = tree.nodeData(node).node_and_token[0],
// LHS sub-expression may still be an error under the outer optional or error union
.@"catch",
@@ -10684,8 +10566,8 @@ fn nodeMayEvalToError(tree: *const Ast, start_node: Ast.Node.Index) BuiltinFn.Ev
.block,
.block_semicolon,
=> {
- const lbrace = main_tokens[node];
- if (token_tags[lbrace - 1] == .colon) {
+ const lbrace = tree.nodeMainToken(node);
+ if (tree.tokenTag(lbrace - 1) == .colon) {
// Labeled blocks may need a memory location to forward
// to their break statements.
return .maybe;
@@ -10699,7 +10581,7 @@ fn nodeMayEvalToError(tree: *const Ast, start_node: Ast.Node.Index) BuiltinFn.Ev
.builtin_call_two,
.builtin_call_two_comma,
=> {
- const builtin_token = main_tokens[node];
+ const builtin_token = tree.nodeMainToken(node);
const builtin_name = tree.tokenSlice(builtin_token);
// If the builtin is an invalid name, we don't cause an error here; instead
// let it pass, and the error will be "invalid builtin function" later.
@@ -10713,12 +10595,9 @@ fn nodeMayEvalToError(tree: *const Ast, start_node: Ast.Node.Index) BuiltinFn.Ev
/// Returns `true` if it is known the type expression has more than one possible value;
/// `false` otherwise.
fn nodeImpliesMoreThanOnePossibleValue(tree: *const Ast, start_node: Ast.Node.Index) bool {
- const node_tags = tree.nodes.items(.tag);
- const node_datas = tree.nodes.items(.data);
-
var node = start_node;
while (true) {
- switch (node_tags[node]) {
+ switch (tree.nodeTag(node)) {
.root,
.@"usingnamespace",
.test_decl,
@@ -10881,13 +10760,14 @@ fn nodeImpliesMoreThanOnePossibleValue(tree: *const Ast, start_node: Ast.Node.In
=> return false,
// Forward the question to the LHS sub-expression.
- .grouped_expression,
.@"try",
.@"await",
.@"comptime",
.@"nosuspend",
+ => node = tree.nodeData(node).node,
+ .grouped_expression,
.unwrap_optional,
- => node = node_datas[node].lhs,
+ => node = tree.nodeData(node).node_and_token[0],
.ptr_type_aligned,
.ptr_type_sentinel,
@@ -10899,8 +10779,7 @@ fn nodeImpliesMoreThanOnePossibleValue(tree: *const Ast, start_node: Ast.Node.In
=> return true,
.identifier => {
- const main_tokens = tree.nodes.items(.main_token);
- const ident_bytes = tree.tokenSlice(main_tokens[node]);
+ const ident_bytes = tree.tokenSlice(tree.nodeMainToken(node));
if (primitive_instrs.get(ident_bytes)) |primitive| switch (primitive) {
.anyerror_type,
.anyframe_type,
@@ -10960,12 +10839,9 @@ fn nodeImpliesMoreThanOnePossibleValue(tree: *const Ast, start_node: Ast.Node.In
/// Returns `true` if it is known the expression is a type that cannot be used at runtime;
/// `false` otherwise.
fn nodeImpliesComptimeOnly(tree: *const Ast, start_node: Ast.Node.Index) bool {
- const node_tags = tree.nodes.items(.tag);
- const node_datas = tree.nodes.items(.data);
-
var node = start_node;
while (true) {
- switch (node_tags[node]) {
+ switch (tree.nodeTag(node)) {
.root,
.@"usingnamespace",
.test_decl,
@@ -11137,17 +11013,17 @@ fn nodeImpliesComptimeOnly(tree: *const Ast, start_node: Ast.Node.Index) bool {
=> return true,
// Forward the question to the LHS sub-expression.
- .grouped_expression,
.@"try",
.@"await",
.@"comptime",
.@"nosuspend",
+ => node = tree.nodeData(node).node,
+ .grouped_expression,
.unwrap_optional,
- => node = node_datas[node].lhs,
+ => node = tree.nodeData(node).node_and_token[0],
.identifier => {
- const main_tokens = tree.nodes.items(.main_token);
- const ident_bytes = tree.tokenSlice(main_tokens[node]);
+ const ident_bytes = tree.tokenSlice(tree.nodeMainToken(node));
if (primitive_instrs.get(ident_bytes)) |primitive| switch (primitive) {
.anyerror_type,
.anyframe_type,
@@ -11206,8 +11082,7 @@ fn nodeImpliesComptimeOnly(tree: *const Ast, start_node: Ast.Node.Index) bool {
/// Returns `true` if the node uses `gz.anon_name_strategy`.
fn nodeUsesAnonNameStrategy(tree: *const Ast, node: Ast.Node.Index) bool {
- const node_tags = tree.nodes.items(.tag);
- switch (node_tags[node]) {
+ switch (tree.nodeTag(node)) {
.container_decl,
.container_decl_trailing,
.container_decl_two,
@@ -11222,7 +11097,7 @@ fn nodeUsesAnonNameStrategy(tree: *const Ast, node: Ast.Node.Index) bool {
.tagged_union_enum_tag_trailing,
=> return true,
.builtin_call_two, .builtin_call_two_comma, .builtin_call, .builtin_call_comma => {
- const builtin_token = tree.nodes.items(.main_token)[node];
+ const builtin_token = tree.nodeMainToken(node);
const builtin_name = tree.tokenSlice(builtin_token);
return std.mem.eql(u8, builtin_name, "@Type");
},
@@ -11455,8 +11330,7 @@ fn rvalueInner(
/// See also `appendIdentStr` and `parseStrLit`.
fn identifierTokenString(astgen: *AstGen, token: Ast.TokenIndex) InnerError![]const u8 {
const tree = astgen.tree;
- const token_tags = tree.tokens.items(.tag);
- assert(token_tags[token] == .identifier);
+ assert(tree.tokenTag(token) == .identifier);
const ident_name = tree.tokenSlice(token);
if (!mem.startsWith(u8, ident_name, "@")) {
return ident_name;
@@ -11482,8 +11356,7 @@ fn appendIdentStr(
buf: *ArrayListUnmanaged(u8),
) InnerError!void {
const tree = astgen.tree;
- const token_tags = tree.tokens.items(.tag);
- assert(token_tags[token] == .identifier);
+ assert(tree.tokenTag(token) == .identifier);
const ident_name = tree.tokenSlice(token);
if (!mem.startsWith(u8, ident_name, "@")) {
return buf.appendSlice(astgen.gpa, ident_name);
@@ -11572,8 +11445,8 @@ fn appendErrorNodeNotes(
} else 0;
try astgen.compile_errors.append(astgen.gpa, .{
.msg = msg,
- .node = node,
- .token = 0,
+ .node = node.toOptional(),
+ .token = .none,
.byte_offset = 0,
.notes = notes_index,
});
@@ -11664,8 +11537,8 @@ fn appendErrorTokNotesOff(
} else 0;
try astgen.compile_errors.append(gpa, .{
.msg = msg,
- .node = 0,
- .token = token,
+ .node = .none,
+ .token = .fromToken(token),
.byte_offset = byte_offset,
.notes = notes_index,
});
@@ -11693,8 +11566,8 @@ fn errNoteTokOff(
try string_bytes.writer(astgen.gpa).print(format ++ "\x00", args);
return astgen.addExtra(Zir.Inst.CompileErrors.Item{
.msg = msg,
- .node = 0,
- .token = token,
+ .node = .none,
+ .token = .fromToken(token),
.byte_offset = byte_offset,
.notes = 0,
});
@@ -11712,8 +11585,8 @@ fn errNoteNode(
try string_bytes.writer(astgen.gpa).print(format ++ "\x00", args);
return astgen.addExtra(Zir.Inst.CompileErrors.Item{
.msg = msg,
- .node = node,
- .token = 0,
+ .node = node.toOptional(),
+ .token = .none,
.byte_offset = 0,
.notes = 0,
});
@@ -11779,10 +11652,8 @@ fn strLitAsString(astgen: *AstGen, str_lit_token: Ast.TokenIndex) !IndexSlice {
fn strLitNodeAsString(astgen: *AstGen, node: Ast.Node.Index) !IndexSlice {
const tree = astgen.tree;
- const node_datas = tree.nodes.items(.data);
- const start = node_datas[node].lhs;
- const end = node_datas[node].rhs;
+ const start, const end = tree.nodeData(node).token_and_token;
const gpa = astgen.gpa;
const string_bytes = &astgen.string_bytes;
@@ -11877,11 +11748,11 @@ const Scope = struct {
/// Source location of the corresponding variable declaration.
token_src: Ast.TokenIndex,
/// Track the first identifier where it is referenced.
- /// 0 means never referenced.
- used: Ast.TokenIndex = 0,
+ /// .none means never referenced.
+ used: Ast.OptionalTokenIndex = .none,
/// Track the identifier where it is discarded, like this `_ = foo;`.
- /// 0 means never discarded.
- discarded: Ast.TokenIndex = 0,
+ /// .none means never discarded.
+ discarded: Ast.OptionalTokenIndex = .none,
is_used_or_discarded: ?*bool = null,
/// String table index.
name: Zir.NullTerminatedString,
@@ -11901,11 +11772,11 @@ const Scope = struct {
/// Source location of the corresponding variable declaration.
token_src: Ast.TokenIndex,
/// Track the first identifier where it is referenced.
- /// 0 means never referenced.
- used: Ast.TokenIndex = 0,
+ /// .none means never referenced.
+ used: Ast.OptionalTokenIndex = .none,
/// Track the identifier where it is discarded, like this `_ = foo;`.
- /// 0 means never discarded.
- discarded: Ast.TokenIndex = 0,
+ /// .none means never discarded.
+ discarded: Ast.OptionalTokenIndex = .none,
/// Whether this value is used as an lvalue after initialization.
/// If not, we know it can be `const`, so will emit a compile error if it is `var`.
used_as_lvalue: bool = false,
@@ -12000,12 +11871,12 @@ const GenZir = struct {
break_result_info: AstGen.ResultInfo = undefined,
continue_result_info: AstGen.ResultInfo = undefined,
- suspend_node: Ast.Node.Index = 0,
- nosuspend_node: Ast.Node.Index = 0,
+ suspend_node: Ast.Node.OptionalIndex = .none,
+ nosuspend_node: Ast.Node.OptionalIndex = .none,
/// Set if this GenZir is a defer.
- cur_defer_node: Ast.Node.Index = 0,
+ cur_defer_node: Ast.Node.OptionalIndex = .none,
// Set if this GenZir is a defer or it is inside a defer.
- any_defer_node: Ast.Node.Index = 0,
+ any_defer_node: Ast.Node.OptionalIndex = .none,
const unstacked_top = std.math.maxInt(usize);
/// Call unstack before adding any new instructions to containing GenZir.
@@ -12086,12 +11957,12 @@ const GenZir = struct {
return false;
}
- fn nodeIndexToRelative(gz: GenZir, node_index: Ast.Node.Index) i32 {
- return @as(i32, @bitCast(node_index)) - @as(i32, @bitCast(gz.decl_node_index));
+ fn nodeIndexToRelative(gz: GenZir, node_index: Ast.Node.Index) Ast.Node.Offset {
+ return gz.decl_node_index.toOffset(node_index);
}
- fn tokenIndexToRelative(gz: GenZir, token: Ast.TokenIndex) u32 {
- return token - gz.srcToken();
+ fn tokenIndexToRelative(gz: GenZir, token: Ast.TokenIndex) Ast.TokenOffset {
+ return .init(gz.srcToken(), token);
}
fn srcToken(gz: GenZir) Ast.TokenIndex {
@@ -12244,7 +12115,7 @@ const GenZir = struct {
proto_hash: std.zig.SrcHash,
},
) !Zir.Inst.Ref {
- assert(args.src_node != 0);
+ assert(args.src_node != .root);
const astgen = gz.astgen;
const gpa = astgen.gpa;
const ret_ref = if (args.ret_ref == .void_type) .none else args.ret_ref;
@@ -12276,13 +12147,13 @@ const GenZir = struct {
var src_locs_and_hash_buffer: [7]u32 = undefined;
const src_locs_and_hash: []const u32 = if (args.body_gz != null) src_locs_and_hash: {
const tree = astgen.tree;
- const node_tags = tree.nodes.items(.tag);
- const node_datas = tree.nodes.items(.data);
- const token_starts = tree.tokens.items(.start);
const fn_decl = args.src_node;
- assert(node_tags[fn_decl] == .fn_decl or node_tags[fn_decl] == .test_decl);
- const block = node_datas[fn_decl].rhs;
- const rbrace_start = token_starts[tree.lastToken(block)];
+ const block = switch (tree.nodeTag(fn_decl)) {
+ .fn_decl => tree.nodeData(fn_decl).node_and_node[1],
+ .test_decl => tree.nodeData(fn_decl).opt_token_and_node[1],
+ else => unreachable,
+ };
+ const rbrace_start = tree.tokenStart(tree.lastToken(block));
astgen.advanceSourceCursor(rbrace_start);
const rbrace_line: u32 = @intCast(astgen.source_line - gz.decl_line);
const rbrace_column: u32 = @intCast(astgen.source_column);
@@ -12689,7 +12560,7 @@ const GenZir = struct {
.data = .{ .extended = .{
.opcode = opcode,
.small = small,
- .operand = @bitCast(gz.nodeIndexToRelative(src_node)),
+ .operand = @bitCast(@intFromEnum(gz.nodeIndexToRelative(src_node))),
} },
});
gz.instructions.appendAssumeCapacity(new_index);
@@ -12878,9 +12749,9 @@ const GenZir = struct {
.operand = operand,
.payload_index = gz.astgen.addExtraAssumeCapacity(Zir.Inst.Break{
.operand_src_node = if (operand_src_node) |src_node|
- gz.nodeIndexToRelative(src_node)
+ gz.nodeIndexToRelative(src_node).toOptional()
else
- Zir.Inst.Break.no_src_node,
+ .none,
.block_inst = block_inst,
}),
} },
@@ -12969,7 +12840,7 @@ const GenZir = struct {
.data = .{ .extended = .{
.opcode = opcode,
.small = undefined,
- .operand = @bitCast(gz.nodeIndexToRelative(src_node)),
+ .operand = @bitCast(@intFromEnum(gz.nodeIndexToRelative(src_node))),
} },
});
}
@@ -13149,8 +13020,8 @@ const GenZir = struct {
const astgen = gz.astgen;
const gpa = astgen.gpa;
- // Node 0 is valid for the root `struct_decl` of a file!
- assert(args.src_node != 0 or gz.parent.tag == .top);
+ // Node .root is valid for the root `struct_decl` of a file!
+ assert(args.src_node != .root or gz.parent.tag == .top);
const fields_hash_arr: [4]u32 = @bitCast(args.fields_hash);
@@ -13210,7 +13081,7 @@ const GenZir = struct {
const astgen = gz.astgen;
const gpa = astgen.gpa;
- assert(args.src_node != 0);
+ assert(args.src_node != .root);
const fields_hash_arr: [4]u32 = @bitCast(args.fields_hash);
@@ -13272,7 +13143,7 @@ const GenZir = struct {
const astgen = gz.astgen;
const gpa = astgen.gpa;
- assert(args.src_node != 0);
+ assert(args.src_node != .root);
const fields_hash_arr: [4]u32 = @bitCast(args.fields_hash);
@@ -13327,7 +13198,7 @@ const GenZir = struct {
const astgen = gz.astgen;
const gpa = astgen.gpa;
- assert(args.src_node != 0);
+ assert(args.src_node != .root);
try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.OpaqueDecl).@"struct".fields.len + 2);
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.OpaqueDecl{
@@ -13521,9 +13392,7 @@ fn maybeAdvanceSourceCursorToMainToken(gz: *GenZir, node: Ast.Node.Index) LineCo
if (gz.is_comptime) return .{ gz.astgen.source_line - gz.decl_line, gz.astgen.source_column };
const tree = gz.astgen.tree;
- const token_starts = tree.tokens.items(.start);
- const main_tokens = tree.nodes.items(.main_token);
- const node_start = token_starts[main_tokens[node]];
+ const node_start = tree.tokenStart(tree.nodeMainToken(node));
gz.astgen.advanceSourceCursor(node_start);
return .{ gz.astgen.source_line - gz.decl_line, gz.astgen.source_column };
@@ -13532,8 +13401,7 @@ fn maybeAdvanceSourceCursorToMainToken(gz: *GenZir, node: Ast.Node.Index) LineCo
/// Advances the source cursor to the beginning of `node`.
fn advanceSourceCursorToNode(astgen: *AstGen, node: Ast.Node.Index) void {
const tree = astgen.tree;
- const token_starts = tree.tokens.items(.start);
- const node_start = token_starts[tree.firstToken(node)];
+ const node_start = tree.tokenStart(tree.firstToken(node));
astgen.advanceSourceCursor(node_start);
}
@@ -13588,9 +13456,6 @@ fn scanContainer(
) !u32 {
const gpa = astgen.gpa;
const tree = astgen.tree;
- const node_tags = tree.nodes.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
- const token_tags = tree.tokens.items(.tag);
var any_invalid_declarations = false;
@@ -13620,7 +13485,7 @@ fn scanContainer(
var decl_count: u32 = 0;
for (members) |member_node| {
const Kind = enum { decl, field };
- const kind: Kind, const name_token = switch (node_tags[member_node]) {
+ const kind: Kind, const name_token = switch (tree.nodeTag(member_node)) {
.container_field_init,
.container_field_align,
.container_field,
@@ -13628,7 +13493,7 @@ fn scanContainer(
var full = tree.fullContainerField(member_node).?;
switch (container_kind) {
.@"struct", .@"opaque" => {},
- .@"union", .@"enum" => full.convertToNonTupleLike(astgen.tree.nodes),
+ .@"union", .@"enum" => full.convertToNonTupleLike(astgen.tree),
}
if (full.ast.tuple_like) continue;
break :blk .{ .field, full.ast.main_token };
@@ -13640,7 +13505,7 @@ fn scanContainer(
.aligned_var_decl,
=> blk: {
decl_count += 1;
- break :blk .{ .decl, main_tokens[member_node] + 1 };
+ break :blk .{ .decl, tree.nodeMainToken(member_node) + 1 };
},
.fn_proto_simple,
@@ -13650,8 +13515,8 @@ fn scanContainer(
.fn_decl,
=> blk: {
decl_count += 1;
- const ident = main_tokens[member_node] + 1;
- if (token_tags[ident] != .identifier) {
+ const ident = tree.nodeMainToken(member_node) + 1;
+ if (tree.tokenTag(ident) != .identifier) {
try astgen.appendErrorNode(member_node, "missing function name", .{});
any_invalid_declarations = true;
continue;
@@ -13668,12 +13533,12 @@ fn scanContainer(
decl_count += 1;
// We don't want shadowing detection here, and test names work a bit differently, so
// we must do the redeclaration detection ourselves.
- const test_name_token = main_tokens[member_node] + 1;
+ const test_name_token = tree.nodeMainToken(member_node) + 1;
const new_ent: NameEntry = .{
.tok = test_name_token,
.next = null,
};
- switch (token_tags[test_name_token]) {
+ switch (tree.tokenTag(test_name_token)) {
else => {}, // unnamed test
.string_literal => {
const name = try astgen.strLitAsString(test_name_token);
@@ -14275,3 +14140,7 @@ fn fetchRemoveRefEntries(astgen: *AstGen, param_insts: []const Zir.Inst.Index) !
}
return refs.items;
}
+
+test {
+ _ = &generate;
+}
lib/std/zig/AstRlAnnotate.zig
@@ -92,27 +92,26 @@ fn containerDecl(
full: Ast.full.ContainerDecl,
) !void {
const tree = astrl.tree;
- const token_tags = tree.tokens.items(.tag);
- switch (token_tags[full.ast.main_token]) {
+ switch (tree.tokenTag(full.ast.main_token)) {
.keyword_struct => {
- if (full.ast.arg != 0) {
- _ = try astrl.expr(full.ast.arg, block, ResultInfo.type_only);
+ if (full.ast.arg.unwrap()) |arg| {
+ _ = try astrl.expr(arg, block, ResultInfo.type_only);
}
for (full.ast.members) |member_node| {
_ = try astrl.expr(member_node, block, ResultInfo.none);
}
},
.keyword_union => {
- if (full.ast.arg != 0) {
- _ = try astrl.expr(full.ast.arg, block, ResultInfo.type_only);
+ if (full.ast.arg.unwrap()) |arg| {
+ _ = try astrl.expr(arg, block, ResultInfo.type_only);
}
for (full.ast.members) |member_node| {
_ = try astrl.expr(member_node, block, ResultInfo.none);
}
},
.keyword_enum => {
- if (full.ast.arg != 0) {
- _ = try astrl.expr(full.ast.arg, block, ResultInfo.type_only);
+ if (full.ast.arg.unwrap()) |arg| {
+ _ = try astrl.expr(arg, block, ResultInfo.type_only);
}
for (full.ast.members) |member_node| {
_ = try astrl.expr(member_node, block, ResultInfo.none);
@@ -130,10 +129,7 @@ fn containerDecl(
/// Returns true if `rl` provides a result pointer and the expression consumes it.
fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultInfo) Allocator.Error!bool {
const tree = astrl.tree;
- const token_tags = tree.tokens.items(.tag);
- const node_datas = tree.nodes.items(.data);
- const node_tags = tree.nodes.items(.tag);
- switch (node_tags[node]) {
+ switch (tree.nodeTag(node)) {
.root,
.switch_case_one,
.switch_case_inline_one,
@@ -145,8 +141,12 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.asm_input,
=> unreachable,
- .@"errdefer", .@"defer" => {
- _ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
+ .@"errdefer" => {
+ _ = try astrl.expr(tree.nodeData(node).opt_token_and_node[1], block, ResultInfo.none);
+ return false;
+ },
+ .@"defer" => {
+ _ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
return false;
},
@@ -155,21 +155,22 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.container_field,
=> {
const full = tree.fullContainerField(node).?;
- _ = try astrl.expr(full.ast.type_expr, block, ResultInfo.type_only);
- if (full.ast.align_expr != 0) {
- _ = try astrl.expr(full.ast.align_expr, block, ResultInfo.type_only);
+ const type_expr = full.ast.type_expr.unwrap().?;
+ _ = try astrl.expr(type_expr, block, ResultInfo.type_only);
+ if (full.ast.align_expr.unwrap()) |align_expr| {
+ _ = try astrl.expr(align_expr, block, ResultInfo.type_only);
}
- if (full.ast.value_expr != 0) {
- _ = try astrl.expr(full.ast.value_expr, block, ResultInfo.type_only);
+ if (full.ast.value_expr.unwrap()) |value_expr| {
+ _ = try astrl.expr(value_expr, block, ResultInfo.type_only);
}
return false;
},
.@"usingnamespace" => {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.type_only);
+ _ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.type_only);
return false;
},
.test_decl => {
- _ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
+ _ = try astrl.expr(tree.nodeData(node).opt_token_and_node[1], block, ResultInfo.none);
return false;
},
.global_var_decl,
@@ -178,17 +179,17 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.aligned_var_decl,
=> {
const full = tree.fullVarDecl(node).?;
- const init_ri = if (full.ast.type_node != 0) init_ri: {
- _ = try astrl.expr(full.ast.type_node, block, ResultInfo.type_only);
+ const init_ri = if (full.ast.type_node.unwrap()) |type_node| init_ri: {
+ _ = try astrl.expr(type_node, block, ResultInfo.type_only);
break :init_ri ResultInfo.typed_ptr;
} else ResultInfo.inferred_ptr;
- if (full.ast.init_node == 0) {
+ const init_node = full.ast.init_node.unwrap() orelse {
// No init node, so we're done.
return false;
- }
- switch (token_tags[full.ast.mut_token]) {
+ };
+ switch (tree.tokenTag(full.ast.mut_token)) {
.keyword_const => {
- const init_consumes_rl = try astrl.expr(full.ast.init_node, block, init_ri);
+ const init_consumes_rl = try astrl.expr(init_node, block, init_ri);
if (init_consumes_rl) {
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
}
@@ -197,7 +198,7 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.keyword_var => {
// We'll create an alloc either way, so don't care if the
// result pointer is consumed.
- _ = try astrl.expr(full.ast.init_node, block, init_ri);
+ _ = try astrl.expr(init_node, block, init_ri);
return false;
},
else => unreachable,
@@ -213,8 +214,9 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
return false;
},
.assign => {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
- _ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.typed_ptr);
+ const lhs, const rhs = tree.nodeData(node).node_and_node;
+ _ = try astrl.expr(lhs, block, ResultInfo.none);
+ _ = try astrl.expr(rhs, block, ResultInfo.typed_ptr);
return false;
},
.assign_shl,
@@ -235,13 +237,15 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.assign_mul_wrap,
.assign_mul_sat,
=> {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
- _ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
+ const lhs, const rhs = tree.nodeData(node).node_and_node;
+ _ = try astrl.expr(lhs, block, ResultInfo.none);
+ _ = try astrl.expr(rhs, block, ResultInfo.none);
return false;
},
.shl, .shr => {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
- _ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
+ const lhs, const rhs = tree.nodeData(node).node_and_node;
+ _ = try astrl.expr(lhs, block, ResultInfo.none);
+ _ = try astrl.expr(rhs, block, ResultInfo.type_only);
return false;
},
.add,
@@ -267,33 +271,38 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.less_or_equal,
.array_cat,
=> {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
- _ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
+ const lhs, const rhs = tree.nodeData(node).node_and_node;
+ _ = try astrl.expr(lhs, block, ResultInfo.none);
+ _ = try astrl.expr(rhs, block, ResultInfo.none);
return false;
},
+
.array_mult => {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
- _ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
+ const lhs, const rhs = tree.nodeData(node).node_and_node;
+ _ = try astrl.expr(lhs, block, ResultInfo.none);
+ _ = try astrl.expr(rhs, block, ResultInfo.type_only);
return false;
},
.error_union, .merge_error_sets => {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
- _ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
+ const lhs, const rhs = tree.nodeData(node).node_and_node;
+ _ = try astrl.expr(lhs, block, ResultInfo.none);
+ _ = try astrl.expr(rhs, block, ResultInfo.none);
return false;
},
.bool_and,
.bool_or,
=> {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.type_only);
- _ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
+ const lhs, const rhs = tree.nodeData(node).node_and_node;
+ _ = try astrl.expr(lhs, block, ResultInfo.type_only);
+ _ = try astrl.expr(rhs, block, ResultInfo.type_only);
return false;
},
.bool_not => {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.type_only);
+ _ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.type_only);
return false;
},
.bit_not, .negation, .negation_wrap => {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
+ _ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
return false;
},
@@ -338,7 +347,7 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
for (full.ast.params) |param_node| {
_ = try astrl.expr(param_node, block, ResultInfo.type_only);
}
- return switch (node_tags[node]) {
+ return switch (tree.nodeTag(node)) {
.call_one,
.call_one_comma,
.call,
@@ -354,8 +363,8 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
},
.@"return" => {
- if (node_datas[node].lhs != 0) {
- const ret_val_consumes_rl = try astrl.expr(node_datas[node].lhs, block, ResultInfo.typed_ptr);
+ if (tree.nodeData(node).opt_node.unwrap()) |lhs| {
+ const ret_val_consumes_rl = try astrl.expr(lhs, block, ResultInfo.typed_ptr);
if (ret_val_consumes_rl) {
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
}
@@ -364,7 +373,8 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
},
.field_access => {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
+ const lhs, _ = tree.nodeData(node).node_and_token;
+ _ = try astrl.expr(lhs, block, ResultInfo.none);
return false;
},
@@ -376,15 +386,15 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
_ = try astrl.expr(full.ast.cond_expr, block, ResultInfo.type_only); // bool
}
- if (full.ast.else_expr == 0) {
- _ = try astrl.expr(full.ast.then_expr, block, ResultInfo.none);
- return false;
- } else {
+ if (full.ast.else_expr.unwrap()) |else_expr| {
const then_uses_rl = try astrl.expr(full.ast.then_expr, block, ri);
- const else_uses_rl = try astrl.expr(full.ast.else_expr, block, ri);
+ const else_uses_rl = try astrl.expr(else_expr, block, ri);
const uses_rl = then_uses_rl or else_uses_rl;
if (uses_rl) try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
return uses_rl;
+ } else {
+ _ = try astrl.expr(full.ast.then_expr, block, ResultInfo.none);
+ return false;
}
},
@@ -405,12 +415,12 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.ri = ri,
.consumes_res_ptr = false,
};
- if (full.ast.cont_expr != 0) {
- _ = try astrl.expr(full.ast.cont_expr, &new_block, ResultInfo.none);
+ if (full.ast.cont_expr.unwrap()) |cont_expr| {
+ _ = try astrl.expr(cont_expr, &new_block, ResultInfo.none);
}
_ = try astrl.expr(full.ast.then_expr, &new_block, ResultInfo.none);
- const else_consumes_rl = if (full.ast.else_expr != 0) else_rl: {
- break :else_rl try astrl.expr(full.ast.else_expr, block, ri);
+ const else_consumes_rl = if (full.ast.else_expr.unwrap()) |else_expr| else_rl: {
+ break :else_rl try astrl.expr(else_expr, block, ri);
} else false;
if (new_block.consumes_res_ptr or else_consumes_rl) {
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
@@ -426,10 +436,11 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
break :label try astrl.identString(label_token);
} else null;
for (full.ast.inputs) |input| {
- if (node_tags[input] == .for_range) {
- _ = try astrl.expr(node_datas[input].lhs, block, ResultInfo.type_only);
- if (node_datas[input].rhs != 0) {
- _ = try astrl.expr(node_datas[input].rhs, block, ResultInfo.type_only);
+ if (tree.nodeTag(input) == .for_range) {
+ const lhs, const opt_rhs = tree.nodeData(input).node_and_opt_node;
+ _ = try astrl.expr(lhs, block, ResultInfo.type_only);
+ if (opt_rhs.unwrap()) |rhs| {
+ _ = try astrl.expr(rhs, block, ResultInfo.type_only);
}
} else {
_ = try astrl.expr(input, block, ResultInfo.none);
@@ -443,8 +454,8 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.consumes_res_ptr = false,
};
_ = try astrl.expr(full.ast.then_expr, &new_block, ResultInfo.none);
- const else_consumes_rl = if (full.ast.else_expr != 0) else_rl: {
- break :else_rl try astrl.expr(full.ast.else_expr, block, ri);
+ const else_consumes_rl = if (full.ast.else_expr.unwrap()) |else_expr| else_rl: {
+ break :else_rl try astrl.expr(else_expr, block, ri);
} else false;
if (new_block.consumes_res_ptr or else_consumes_rl) {
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
@@ -455,45 +466,49 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
},
.slice_open => {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
- _ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
+ const sliced, const start = tree.nodeData(node).node_and_node;
+ _ = try astrl.expr(sliced, block, ResultInfo.none);
+ _ = try astrl.expr(start, block, ResultInfo.type_only);
return false;
},
.slice => {
- const extra = tree.extraData(node_datas[node].rhs, Ast.Node.Slice);
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
+ const sliced, const extra_index = tree.nodeData(node).node_and_extra;
+ const extra = tree.extraData(extra_index, Ast.Node.Slice);
+ _ = try astrl.expr(sliced, block, ResultInfo.none);
_ = try astrl.expr(extra.start, block, ResultInfo.type_only);
_ = try astrl.expr(extra.end, block, ResultInfo.type_only);
return false;
},
.slice_sentinel => {
- const extra = tree.extraData(node_datas[node].rhs, Ast.Node.SliceSentinel);
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
+ const sliced, const extra_index = tree.nodeData(node).node_and_extra;
+ const extra = tree.extraData(extra_index, Ast.Node.SliceSentinel);
+ _ = try astrl.expr(sliced, block, ResultInfo.none);
_ = try astrl.expr(extra.start, block, ResultInfo.type_only);
- if (extra.end != 0) {
- _ = try astrl.expr(extra.end, block, ResultInfo.type_only);
+ if (extra.end.unwrap()) |end| {
+ _ = try astrl.expr(end, block, ResultInfo.type_only);
}
_ = try astrl.expr(extra.sentinel, block, ResultInfo.none);
return false;
},
.deref => {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
+ _ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
return false;
},
.address_of => {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
+ _ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
return false;
},
.optional_type => {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.type_only);
+ _ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.type_only);
return false;
},
- .grouped_expression,
.@"try",
.@"await",
.@"nosuspend",
+ => return astrl.expr(tree.nodeData(node).node, block, ri),
+ .grouped_expression,
.unwrap_optional,
- => return astrl.expr(node_datas[node].lhs, block, ri),
+ => return astrl.expr(tree.nodeData(node).node_and_token[0], block, ri),
.block_two,
.block_two_semicolon,
@@ -505,12 +520,14 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
return astrl.blockExpr(block, ri, node, statements);
},
.anyframe_type => {
- _ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
+ _, const child_type = tree.nodeData(node).token_and_node;
+ _ = try astrl.expr(child_type, block, ResultInfo.type_only);
return false;
},
.@"catch", .@"orelse" => {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
- const rhs_consumes_rl = try astrl.expr(node_datas[node].rhs, block, ri);
+ const lhs, const rhs = tree.nodeData(node).node_and_node;
+ _ = try astrl.expr(lhs, block, ResultInfo.none);
+ const rhs_consumes_rl = try astrl.expr(rhs, block, ri);
if (rhs_consumes_rl) {
try astrl.nodes_need_rl.putNoClobber(astrl.gpa, node, {});
}
@@ -524,19 +541,19 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
=> {
const full = tree.fullPtrType(node).?;
_ = try astrl.expr(full.ast.child_type, block, ResultInfo.type_only);
- if (full.ast.sentinel != 0) {
- _ = try astrl.expr(full.ast.sentinel, block, ResultInfo.type_only);
+ if (full.ast.sentinel.unwrap()) |sentinel| {
+ _ = try astrl.expr(sentinel, block, ResultInfo.type_only);
}
- if (full.ast.addrspace_node != 0) {
- _ = try astrl.expr(full.ast.addrspace_node, block, ResultInfo.type_only);
+ if (full.ast.addrspace_node.unwrap()) |addrspace_node| {
+ _ = try astrl.expr(addrspace_node, block, ResultInfo.type_only);
}
- if (full.ast.align_node != 0) {
- _ = try astrl.expr(full.ast.align_node, block, ResultInfo.type_only);
+ if (full.ast.align_node.unwrap()) |align_node| {
+ _ = try astrl.expr(align_node, block, ResultInfo.type_only);
}
- if (full.ast.bit_range_start != 0) {
- assert(full.ast.bit_range_end != 0);
- _ = try astrl.expr(full.ast.bit_range_start, block, ResultInfo.type_only);
- _ = try astrl.expr(full.ast.bit_range_end, block, ResultInfo.type_only);
+ if (full.ast.bit_range_start.unwrap()) |bit_range_start| {
+ const bit_range_end = full.ast.bit_range_end.unwrap().?;
+ _ = try astrl.expr(bit_range_start, block, ResultInfo.type_only);
+ _ = try astrl.expr(bit_range_end, block, ResultInfo.type_only);
}
return false;
},
@@ -560,63 +577,66 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
},
.@"break" => {
- if (node_datas[node].rhs == 0) {
+ const opt_label, const opt_rhs = tree.nodeData(node).opt_token_and_opt_node;
+ const rhs = opt_rhs.unwrap() orelse {
// Breaks with void are not interesting
return false;
- }
+ };
var opt_cur_block = block;
- if (node_datas[node].lhs == 0) {
- // No label - we're breaking from a loop.
+ if (opt_label.unwrap()) |label_token| {
+ const break_label = try astrl.identString(label_token);
while (opt_cur_block) |cur_block| : (opt_cur_block = cur_block.parent) {
- if (cur_block.is_loop) break;
+ const block_label = cur_block.label orelse continue;
+ if (std.mem.eql(u8, block_label, break_label)) break;
}
} else {
- const break_label = try astrl.identString(node_datas[node].lhs);
+ // No label - we're breaking from a loop.
while (opt_cur_block) |cur_block| : (opt_cur_block = cur_block.parent) {
- const block_label = cur_block.label orelse continue;
- if (std.mem.eql(u8, block_label, break_label)) break;
+ if (cur_block.is_loop) break;
}
}
if (opt_cur_block) |target_block| {
- const consumes_break_rl = try astrl.expr(node_datas[node].rhs, block, target_block.ri);
+ const consumes_break_rl = try astrl.expr(rhs, block, target_block.ri);
if (consumes_break_rl) target_block.consumes_res_ptr = true;
} else {
// No corresponding scope to break from - AstGen will emit an error.
- _ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.none);
+ _ = try astrl.expr(rhs, block, ResultInfo.none);
}
return false;
},
.array_type => {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.type_only);
- _ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
+ const lhs, const rhs = tree.nodeData(node).node_and_node;
+ _ = try astrl.expr(lhs, block, ResultInfo.type_only);
+ _ = try astrl.expr(rhs, block, ResultInfo.type_only);
return false;
},
.array_type_sentinel => {
- const extra = tree.extraData(node_datas[node].rhs, Ast.Node.ArrayTypeSentinel);
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.type_only);
+ const len_expr, const extra_index = tree.nodeData(node).node_and_extra;
+ const extra = tree.extraData(extra_index, Ast.Node.ArrayTypeSentinel);
+ _ = try astrl.expr(len_expr, block, ResultInfo.type_only);
_ = try astrl.expr(extra.elem_type, block, ResultInfo.type_only);
_ = try astrl.expr(extra.sentinel, block, ResultInfo.type_only);
return false;
},
.array_access => {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
- _ = try astrl.expr(node_datas[node].rhs, block, ResultInfo.type_only);
+ const lhs, const rhs = tree.nodeData(node).node_and_node;
+ _ = try astrl.expr(lhs, block, ResultInfo.none);
+ _ = try astrl.expr(rhs, block, ResultInfo.type_only);
return false;
},
.@"comptime" => {
// AstGen will emit an error if the scope is already comptime, so we can assume it is
// not. This means the result location is not forwarded.
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
+ _ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
return false;
},
.@"switch", .switch_comma => {
- const operand_node = node_datas[node].lhs;
- const extra = tree.extraData(node_datas[node].rhs, Ast.Node.SubRange);
- const case_nodes = tree.extra_data[extra.start..extra.end];
+ const operand_node, const extra_index = tree.nodeData(node).node_and_extra;
+ const case_nodes = tree.extraDataSlice(tree.extraData(extra_index, Ast.Node.SubRange), Ast.Node.Index);
_ = try astrl.expr(operand_node, block, ResultInfo.none);
@@ -624,9 +644,10 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
for (case_nodes) |case_node| {
const case = tree.fullSwitchCase(case_node).?;
for (case.ast.values) |item_node| {
- if (node_tags[item_node] == .switch_range) {
- _ = try astrl.expr(node_datas[item_node].lhs, block, ResultInfo.none);
- _ = try astrl.expr(node_datas[item_node].rhs, block, ResultInfo.none);
+ if (tree.nodeTag(item_node) == .switch_range) {
+ const lhs, const rhs = tree.nodeData(item_node).node_and_node;
+ _ = try astrl.expr(lhs, block, ResultInfo.none);
+ _ = try astrl.expr(rhs, block, ResultInfo.none);
} else {
_ = try astrl.expr(item_node, block, ResultInfo.none);
}
@@ -641,11 +662,11 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
return any_prong_consumed_rl;
},
.@"suspend" => {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
+ _ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
return false;
},
.@"resume" => {
- _ = try astrl.expr(node_datas[node].lhs, block, ResultInfo.none);
+ _ = try astrl.expr(tree.nodeData(node).node, block, ResultInfo.none);
return false;
},
@@ -661,9 +682,9 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
var buf: [2]Ast.Node.Index = undefined;
const full = tree.fullArrayInit(&buf, node).?;
- if (full.ast.type_expr != 0) {
+ if (full.ast.type_expr.unwrap()) |type_expr| {
// Explicitly typed init does not participate in RLS
- _ = try astrl.expr(full.ast.type_expr, block, ResultInfo.none);
+ _ = try astrl.expr(type_expr, block, ResultInfo.none);
for (full.ast.elements) |elem_init| {
_ = try astrl.expr(elem_init, block, ResultInfo.type_only);
}
@@ -698,9 +719,9 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
var buf: [2]Ast.Node.Index = undefined;
const full = tree.fullStructInit(&buf, node).?;
- if (full.ast.type_expr != 0) {
+ if (full.ast.type_expr.unwrap()) |type_expr| {
// Explicitly typed init does not participate in RLS
- _ = try astrl.expr(full.ast.type_expr, block, ResultInfo.none);
+ _ = try astrl.expr(type_expr, block, ResultInfo.none);
for (full.ast.fields) |field_init| {
_ = try astrl.expr(field_init, block, ResultInfo.type_only);
}
@@ -728,33 +749,35 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
.fn_proto_one,
.fn_proto,
.fn_decl,
- => {
+ => |tag| {
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
- const body_node = if (node_tags[node] == .fn_decl) node_datas[node].rhs else 0;
+ const body_node = if (tag == .fn_decl) tree.nodeData(node).node_and_node[1].toOptional() else .none;
{
var it = full.iterate(tree);
while (it.next()) |param| {
if (param.anytype_ellipsis3 == null) {
- _ = try astrl.expr(param.type_expr, block, ResultInfo.type_only);
+ const type_expr = param.type_expr.?;
+ _ = try astrl.expr(type_expr, block, ResultInfo.type_only);
}
}
}
- if (full.ast.align_expr != 0) {
- _ = try astrl.expr(full.ast.align_expr, block, ResultInfo.type_only);
+ if (full.ast.align_expr.unwrap()) |align_expr| {
+ _ = try astrl.expr(align_expr, block, ResultInfo.type_only);
}
- if (full.ast.addrspace_expr != 0) {
- _ = try astrl.expr(full.ast.addrspace_expr, block, ResultInfo.type_only);
+ if (full.ast.addrspace_expr.unwrap()) |addrspace_expr| {
+ _ = try astrl.expr(addrspace_expr, block, ResultInfo.type_only);
}
- if (full.ast.section_expr != 0) {
- _ = try astrl.expr(full.ast.section_expr, block, ResultInfo.type_only);
+ if (full.ast.section_expr.unwrap()) |section_expr| {
+ _ = try astrl.expr(section_expr, block, ResultInfo.type_only);
}
- if (full.ast.callconv_expr != 0) {
- _ = try astrl.expr(full.ast.callconv_expr, block, ResultInfo.type_only);
+ if (full.ast.callconv_expr.unwrap()) |callconv_expr| {
+ _ = try astrl.expr(callconv_expr, block, ResultInfo.type_only);
}
- _ = try astrl.expr(full.ast.return_type, block, ResultInfo.type_only);
- if (body_node != 0) {
- _ = try astrl.expr(body_node, block, ResultInfo.none);
+ const return_type = full.ast.return_type.unwrap().?;
+ _ = try astrl.expr(return_type, block, ResultInfo.type_only);
+ if (body_node.unwrap()) |body| {
+ _ = try astrl.expr(body, block, ResultInfo.none);
}
return false;
},
@@ -763,8 +786,7 @@ fn expr(astrl: *AstRlAnnotate, node: Ast.Node.Index, block: ?*Block, ri: ResultI
fn identString(astrl: *AstRlAnnotate, token: Ast.TokenIndex) ![]const u8 {
const tree = astrl.tree;
- const token_tags = tree.tokens.items(.tag);
- assert(token_tags[token] == .identifier);
+ assert(tree.tokenTag(token) == .identifier);
const ident_name = tree.tokenSlice(token);
if (!std.mem.startsWith(u8, ident_name, "@")) {
return ident_name;
@@ -777,13 +799,9 @@ fn identString(astrl: *AstRlAnnotate, token: Ast.TokenIndex) ![]const u8 {
fn blockExpr(astrl: *AstRlAnnotate, parent_block: ?*Block, ri: ResultInfo, node: Ast.Node.Index, statements: []const Ast.Node.Index) !bool {
const tree = astrl.tree;
- const token_tags = tree.tokens.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
- const lbrace = main_tokens[node];
- if (token_tags[lbrace - 1] == .colon and
- token_tags[lbrace - 2] == .identifier)
- {
+ const lbrace = tree.nodeMainToken(node);
+ if (tree.isTokenPrecededByTags(lbrace, &.{ .identifier, .colon })) {
// Labeled block
var new_block: Block = .{
.parent = parent_block,
@@ -812,8 +830,7 @@ fn builtinCall(astrl: *AstRlAnnotate, block: ?*Block, ri: ResultInfo, node: Ast.
_ = ri; // Currently, no builtin consumes its result location.
const tree = astrl.tree;
- const main_tokens = tree.nodes.items(.main_token);
- const builtin_token = main_tokens[node];
+ const builtin_token = tree.nodeMainToken(node);
const builtin_name = tree.tokenSlice(builtin_token);
const info = BuiltinFn.list.get(builtin_name) orelse return false;
if (info.param_count) |expected| {
lib/std/zig/ErrorBundle.zig
@@ -481,13 +481,13 @@ pub const Wip = struct {
const item = zir.extraData(Zir.Inst.CompileErrors.Item, extra_index);
extra_index = item.end;
const err_span = blk: {
- if (item.data.node != 0) {
- break :blk tree.nodeToSpan(item.data.node);
- }
- const token_starts = tree.tokens.items(.start);
- const start = token_starts[item.data.token] + item.data.byte_offset;
- const end = start + @as(u32, @intCast(tree.tokenSlice(item.data.token).len)) - item.data.byte_offset;
- break :blk std.zig.Ast.Span{ .start = start, .end = end, .main = start };
+ if (item.data.node.unwrap()) |node| {
+ break :blk tree.nodeToSpan(node);
+ } else if (item.data.token.unwrap()) |token| {
+ const start = tree.tokenStart(token) + item.data.byte_offset;
+ const end = start + @as(u32, @intCast(tree.tokenSlice(token).len)) - item.data.byte_offset;
+ break :blk std.zig.Ast.Span{ .start = start, .end = end, .main = start };
+ } else unreachable;
};
const err_loc = std.zig.findLineColumn(source, err_span.main);
@@ -516,13 +516,13 @@ pub const Wip = struct {
const note_item = zir.extraData(Zir.Inst.CompileErrors.Item, body_elem);
const msg = zir.nullTerminatedString(note_item.data.msg);
const span = blk: {
- if (note_item.data.node != 0) {
- break :blk tree.nodeToSpan(note_item.data.node);
- }
- const token_starts = tree.tokens.items(.start);
- const start = token_starts[note_item.data.token] + note_item.data.byte_offset;
- const end = start + @as(u32, @intCast(tree.tokenSlice(note_item.data.token).len)) - item.data.byte_offset;
- break :blk std.zig.Ast.Span{ .start = start, .end = end, .main = start };
+ if (note_item.data.node.unwrap()) |node| {
+ break :blk tree.nodeToSpan(node);
+ } else if (note_item.data.token.unwrap()) |token| {
+ const start = tree.tokenStart(token) + note_item.data.byte_offset;
+ const end = start + @as(u32, @intCast(tree.tokenSlice(token).len)) - item.data.byte_offset;
+ break :blk std.zig.Ast.Span{ .start = start, .end = end, .main = start };
+ } else unreachable;
};
const loc = std.zig.findLineColumn(source, span.main);
@@ -560,13 +560,14 @@ pub const Wip = struct {
for (zoir.compile_errors) |err| {
const err_span: std.zig.Ast.Span = span: {
- if (err.token == std.zig.Zoir.CompileError.invalid_token) {
- break :span tree.nodeToSpan(err.node_or_offset);
+ if (err.token.unwrap()) |token| {
+ const token_start = tree.tokenStart(token);
+ const start = token_start + err.node_or_offset;
+ const end = token_start + @as(u32, @intCast(tree.tokenSlice(token).len));
+ break :span .{ .start = start, .end = end, .main = start };
+ } else {
+ break :span tree.nodeToSpan(@enumFromInt(err.node_or_offset));
}
- const token_start = tree.tokens.items(.start)[err.token];
- const start = token_start + err.node_or_offset;
- const end = token_start + @as(u32, @intCast(tree.tokenSlice(err.token).len));
- break :span .{ .start = start, .end = end, .main = start };
};
const err_loc = std.zig.findLineColumn(source, err_span.main);
@@ -588,13 +589,14 @@ pub const Wip = struct {
for (notes_start.., err.first_note.., 0..err.note_count) |eb_note_idx, zoir_note_idx, _| {
const note = zoir.error_notes[zoir_note_idx];
const note_span: std.zig.Ast.Span = span: {
- if (note.token == std.zig.Zoir.CompileError.invalid_token) {
- break :span tree.nodeToSpan(note.node_or_offset);
+ if (note.token.unwrap()) |token| {
+ const token_start = tree.tokenStart(token);
+ const start = token_start + note.node_or_offset;
+ const end = token_start + @as(u32, @intCast(tree.tokenSlice(token).len));
+ break :span .{ .start = start, .end = end, .main = start };
+ } else {
+ break :span tree.nodeToSpan(@enumFromInt(note.node_or_offset));
}
- const token_start = tree.tokens.items(.start)[note.token];
- const start = token_start + note.node_or_offset;
- const end = token_start + @as(u32, @intCast(tree.tokenSlice(note.token).len));
- break :span .{ .start = start, .end = end, .main = start };
};
const note_loc = std.zig.findLineColumn(source, note_span.main);
lib/std/zig/Parse.zig
@@ -4,52 +4,71 @@ pub const Error = error{ParseError} || Allocator.Error;
gpa: Allocator,
source: []const u8,
-token_tags: []const Token.Tag,
-token_starts: []const Ast.ByteOffset,
+tokens: Ast.TokenList.Slice,
tok_i: TokenIndex,
errors: std.ArrayListUnmanaged(AstError),
nodes: Ast.NodeList,
-extra_data: std.ArrayListUnmanaged(Node.Index),
+extra_data: std.ArrayListUnmanaged(u32),
scratch: std.ArrayListUnmanaged(Node.Index),
+fn tokenTag(p: *const Parse, token_index: TokenIndex) Token.Tag {
+ return p.tokens.items(.tag)[token_index];
+}
+
+fn tokenStart(p: *const Parse, token_index: TokenIndex) Ast.ByteOffset {
+ return p.tokens.items(.start)[token_index];
+}
+
+fn nodeTag(p: *const Parse, node: Node.Index) Node.Tag {
+ return p.nodes.items(.tag)[@intFromEnum(node)];
+}
+
+fn nodeMainToken(p: *const Parse, node: Node.Index) TokenIndex {
+ return p.nodes.items(.main_token)[@intFromEnum(node)];
+}
+
+fn nodeData(p: *const Parse, node: Node.Index) Node.Data {
+ return p.nodes.items(.data)[@intFromEnum(node)];
+}
+
const SmallSpan = union(enum) {
- zero_or_one: Node.Index,
+ zero_or_one: Node.OptionalIndex,
multi: Node.SubRange,
};
const Members = struct {
len: usize,
- lhs: Node.Index,
- rhs: Node.Index,
+ /// Must be either `.opt_node_and_opt_node` if `len <= 2` or `.extra_range` otherwise.
+ data: Node.Data,
trailing: bool,
fn toSpan(self: Members, p: *Parse) !Node.SubRange {
- if (self.len <= 2) {
- const nodes = [2]Node.Index{ self.lhs, self.rhs };
- return p.listToSpan(nodes[0..self.len]);
- } else {
- return Node.SubRange{ .start = self.lhs, .end = self.rhs };
- }
+ return switch (self.len) {
+ 0 => p.listToSpan(&.{}),
+ 1 => p.listToSpan(&.{self.data.opt_node_and_opt_node[0].unwrap().?}),
+ 2 => p.listToSpan(&.{ self.data.opt_node_and_opt_node[0].unwrap().?, self.data.opt_node_and_opt_node[1].unwrap().? }),
+ else => self.data.extra_range,
+ };
}
};
-fn listToSpan(p: *Parse, list: []const Node.Index) !Node.SubRange {
- try p.extra_data.appendSlice(p.gpa, list);
- return Node.SubRange{
- .start = @as(Node.Index, @intCast(p.extra_data.items.len - list.len)),
- .end = @as(Node.Index, @intCast(p.extra_data.items.len)),
+fn listToSpan(p: *Parse, list: []const Node.Index) Allocator.Error!Node.SubRange {
+ try p.extra_data.appendSlice(p.gpa, @ptrCast(list));
+ return .{
+ .start = @enumFromInt(p.extra_data.items.len - list.len),
+ .end = @enumFromInt(p.extra_data.items.len),
};
}
fn addNode(p: *Parse, elem: Ast.Node) Allocator.Error!Node.Index {
- const result = @as(Node.Index, @intCast(p.nodes.len));
+ const result: Node.Index = @enumFromInt(p.nodes.len);
try p.nodes.append(p.gpa, elem);
return result;
}
fn setNode(p: *Parse, i: usize, elem: Ast.Node) Node.Index {
p.nodes.set(i, elem);
- return @as(Node.Index, @intCast(i));
+ return @enumFromInt(i);
}
fn reserveNode(p: *Parse, tag: Ast.Node.Tag) !usize {
@@ -69,13 +88,22 @@ fn unreserveNode(p: *Parse, node_index: usize) void {
}
}
-fn addExtra(p: *Parse, extra: anytype) Allocator.Error!Node.Index {
+fn addExtra(p: *Parse, extra: anytype) Allocator.Error!ExtraIndex {
const fields = std.meta.fields(@TypeOf(extra));
try p.extra_data.ensureUnusedCapacity(p.gpa, fields.len);
- const result = @as(u32, @intCast(p.extra_data.items.len));
+ const result: ExtraIndex = @enumFromInt(p.extra_data.items.len);
inline for (fields) |field| {
- comptime assert(field.type == Node.Index);
- p.extra_data.appendAssumeCapacity(@field(extra, field.name));
+ const data: u32 = switch (field.type) {
+ Node.Index,
+ Node.OptionalIndex,
+ OptionalTokenIndex,
+ ExtraIndex,
+ => @intFromEnum(@field(extra, field.name)),
+ TokenIndex,
+ => @field(extra, field.name),
+ else => @compileError("unexpected field type"),
+ };
+ p.extra_data.appendAssumeCapacity(data);
}
return result;
}
@@ -170,13 +198,10 @@ pub fn parseRoot(p: *Parse) !void {
});
const root_members = try p.parseContainerMembers();
const root_decls = try root_members.toSpan(p);
- if (p.token_tags[p.tok_i] != .eof) {
+ if (p.tokenTag(p.tok_i) != .eof) {
try p.warnExpected(.eof);
}
- p.nodes.items(.data)[0] = .{
- .lhs = root_decls.start,
- .rhs = root_decls.end,
- };
+ p.nodes.items(.data)[0] = .{ .extra_range = root_decls };
}
/// Parse in ZON mode. Subset of the language.
@@ -196,13 +221,10 @@ pub fn parseZon(p: *Parse) !void {
},
else => |e| return e,
};
- if (p.token_tags[p.tok_i] != .eof) {
+ if (p.tokenTag(p.tok_i) != .eof) {
try p.warnExpected(.eof);
}
- p.nodes.items(.data)[0] = .{
- .lhs = node_index,
- .rhs = undefined,
- };
+ p.nodes.items(.data)[0] = .{ .node = node_index };
}
/// ContainerMembers <- ContainerDeclaration* (ContainerField COMMA)* (ContainerField / ContainerDeclaration*)
@@ -235,13 +257,13 @@ fn parseContainerMembers(p: *Parse) Allocator.Error!Members {
while (true) {
const doc_comment = try p.eatDocComments();
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.keyword_test => {
if (doc_comment) |some| {
try p.warnMsg(.{ .tag = .test_doc_comment, .token = some });
}
- const test_decl_node = try p.expectTestDeclRecoverable();
- if (test_decl_node != 0) {
+ const maybe_test_decl_node = try p.expectTestDeclRecoverable();
+ if (maybe_test_decl_node) |test_decl_node| {
if (field_state == .seen) {
field_state = .{ .end = test_decl_node };
}
@@ -249,27 +271,24 @@ fn parseContainerMembers(p: *Parse) Allocator.Error!Members {
}
trailing = false;
},
- .keyword_comptime => switch (p.token_tags[p.tok_i + 1]) {
+ .keyword_comptime => switch (p.tokenTag(p.tok_i + 1)) {
.l_brace => {
if (doc_comment) |some| {
try p.warnMsg(.{ .tag = .comptime_doc_comment, .token = some });
}
const comptime_token = p.nextToken();
- const block = p.parseBlock() catch |err| switch (err) {
+ const opt_block = p.parseBlock() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => blk: {
p.findNextContainerMember();
- break :blk null_node;
+ break :blk null;
},
};
- if (block != 0) {
+ if (opt_block) |block| {
const comptime_node = try p.addNode(.{
.tag = .@"comptime",
.main_token = comptime_token,
- .data = .{
- .lhs = block,
- .rhs = undefined,
- },
+ .data = .{ .node = block },
});
if (field_state == .seen) {
field_state = .{ .end = comptime_node };
@@ -294,7 +313,7 @@ fn parseContainerMembers(p: *Parse) Allocator.Error!Members {
.end => |node| {
try p.warnMsg(.{
.tag = .decl_between_fields,
- .token = p.nodes.items(.main_token)[node],
+ .token = p.nodeMainToken(node),
});
try p.warnMsg(.{
.tag = .previous_field,
@@ -311,7 +330,7 @@ fn parseContainerMembers(p: *Parse) Allocator.Error!Members {
},
}
try p.scratch.append(p.gpa, container_field);
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.comma => {
p.tok_i += 1;
trailing = true;
@@ -331,24 +350,24 @@ fn parseContainerMembers(p: *Parse) Allocator.Error!Members {
},
.keyword_pub => {
p.tok_i += 1;
- const top_level_decl = try p.expectTopLevelDeclRecoverable();
- if (top_level_decl != 0) {
+ const opt_top_level_decl = try p.expectTopLevelDeclRecoverable();
+ if (opt_top_level_decl) |top_level_decl| {
if (field_state == .seen) {
field_state = .{ .end = top_level_decl };
}
try p.scratch.append(p.gpa, top_level_decl);
}
- trailing = p.token_tags[p.tok_i - 1] == .semicolon;
+ trailing = p.tokenTag(p.tok_i - 1) == .semicolon;
},
.keyword_usingnamespace => {
- const node = try p.expectUsingNamespaceRecoverable();
- if (node != 0) {
+ const opt_node = try p.expectUsingNamespaceRecoverable();
+ if (opt_node) |node| {
if (field_state == .seen) {
field_state = .{ .end = node };
}
try p.scratch.append(p.gpa, node);
}
- trailing = p.token_tags[p.tok_i - 1] == .semicolon;
+ trailing = p.tokenTag(p.tok_i - 1) == .semicolon;
},
.keyword_const,
.keyword_var,
@@ -359,14 +378,14 @@ fn parseContainerMembers(p: *Parse) Allocator.Error!Members {
.keyword_noinline,
.keyword_fn,
=> {
- const top_level_decl = try p.expectTopLevelDeclRecoverable();
- if (top_level_decl != 0) {
+ const opt_top_level_decl = try p.expectTopLevelDeclRecoverable();
+ if (opt_top_level_decl) |top_level_decl| {
if (field_state == .seen) {
field_state = .{ .end = top_level_decl };
}
try p.scratch.append(p.gpa, top_level_decl);
}
- trailing = p.token_tags[p.tok_i - 1] == .semicolon;
+ trailing = p.tokenTag(p.tok_i - 1) == .semicolon;
},
.eof, .r_brace => {
if (doc_comment) |tok| {
@@ -399,7 +418,7 @@ fn parseContainerMembers(p: *Parse) Allocator.Error!Members {
.end => |node| {
try p.warnMsg(.{
.tag = .decl_between_fields,
- .token = p.nodes.items(.main_token)[node],
+ .token = p.nodeMainToken(node),
});
try p.warnMsg(.{
.tag = .previous_field,
@@ -416,7 +435,7 @@ fn parseContainerMembers(p: *Parse) Allocator.Error!Members {
},
}
try p.scratch.append(p.gpa, container_field);
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.comma => {
p.tok_i += 1;
trailing = true;
@@ -431,7 +450,7 @@ fn parseContainerMembers(p: *Parse) Allocator.Error!Members {
// There is not allowed to be a decl after a field with no comma.
// Report error but recover parser.
try p.warn(.expected_comma_after_field);
- if (p.token_tags[p.tok_i] == .semicolon and p.token_tags[identifier] == .identifier) {
+ if (p.tokenTag(p.tok_i) == .semicolon and p.tokenTag(identifier) == .identifier) {
try p.warnMsg(.{
.tag = .var_const_decl,
.is_note = true,
@@ -445,34 +464,21 @@ fn parseContainerMembers(p: *Parse) Allocator.Error!Members {
}
const items = p.scratch.items[scratch_top..];
- switch (items.len) {
- 0 => return Members{
- .len = 0,
- .lhs = 0,
- .rhs = 0,
+ if (items.len <= 2) {
+ return Members{
+ .len = items.len,
+ .data = .{ .opt_node_and_opt_node = .{
+ if (items.len >= 1) items[0].toOptional() else .none,
+ if (items.len >= 2) items[1].toOptional() else .none,
+ } },
.trailing = trailing,
- },
- 1 => return Members{
- .len = 1,
- .lhs = items[0],
- .rhs = 0,
- .trailing = trailing,
- },
- 2 => return Members{
- .len = 2,
- .lhs = items[0],
- .rhs = items[1],
+ };
+ } else {
+ return Members{
+ .len = items.len,
+ .data = .{ .extra_range = try p.listToSpan(items) },
.trailing = trailing,
- },
- else => {
- const span = try p.listToSpan(items);
- return Members{
- .len = items.len,
- .lhs = span.start,
- .rhs = span.end,
- .trailing = trailing,
- };
- },
+ };
}
}
@@ -481,7 +487,7 @@ fn findNextContainerMember(p: *Parse) void {
var level: u32 = 0;
while (true) {
const tok = p.nextToken();
- switch (p.token_tags[tok]) {
+ switch (p.tokenTag(tok)) {
// Any of these can start a new top level declaration.
.keyword_test,
.keyword_comptime,
@@ -502,7 +508,7 @@ fn findNextContainerMember(p: *Parse) void {
}
},
.identifier => {
- if (p.token_tags[tok + 1] == .comma and level == 0) {
+ if (p.tokenTag(tok + 1) == .comma and level == 0) {
p.tok_i -= 1;
return;
}
@@ -539,7 +545,7 @@ fn findNextStmt(p: *Parse) void {
var level: u32 = 0;
while (true) {
const tok = p.nextToken();
- switch (p.token_tags[tok]) {
+ switch (p.tokenTag(tok)) {
.l_brace => level += 1,
.r_brace => {
if (level == 0) {
@@ -563,44 +569,45 @@ fn findNextStmt(p: *Parse) void {
}
/// TestDecl <- KEYWORD_test (STRINGLITERALSINGLE / IDENTIFIER)? Block
-fn expectTestDecl(p: *Parse) !Node.Index {
+fn expectTestDecl(p: *Parse) Error!Node.Index {
const test_token = p.assertToken(.keyword_test);
- const name_token = switch (p.token_tags[p.tok_i]) {
- .string_literal, .identifier => p.nextToken(),
- else => null,
+ const name_token: OptionalTokenIndex = switch (p.tokenTag(p.tok_i)) {
+ .string_literal, .identifier => .fromToken(p.nextToken()),
+ else => .none,
};
- const block_node = try p.parseBlock();
- if (block_node == 0) return p.fail(.expected_block);
+ const block_node = try p.parseBlock() orelse return p.fail(.expected_block);
return p.addNode(.{
.tag = .test_decl,
.main_token = test_token,
- .data = .{
- .lhs = name_token orelse 0,
- .rhs = block_node,
- },
+ .data = .{ .opt_token_and_node = .{
+ name_token,
+ block_node,
+ } },
});
}
-fn expectTestDeclRecoverable(p: *Parse) error{OutOfMemory}!Node.Index {
- return p.expectTestDecl() catch |err| switch (err) {
+fn expectTestDeclRecoverable(p: *Parse) error{OutOfMemory}!?Node.Index {
+ if (p.expectTestDecl()) |node| {
+ return node;
+ } else |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
- return null_node;
+ return null;
},
- };
+ }
}
/// Decl
/// <- (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE? / KEYWORD_inline / KEYWORD_noinline)? FnProto (SEMICOLON / Block)
/// / (KEYWORD_export / KEYWORD_extern STRINGLITERALSINGLE?)? KEYWORD_threadlocal? VarDecl
/// / KEYWORD_usingnamespace Expr SEMICOLON
-fn expectTopLevelDecl(p: *Parse) !Node.Index {
+fn expectTopLevelDecl(p: *Parse) !?Node.Index {
const extern_export_inline_token = p.nextToken();
var is_extern: bool = false;
var expect_fn: bool = false;
var expect_var_or_fn: bool = false;
- switch (p.token_tags[extern_export_inline_token]) {
+ switch (p.tokenTag(extern_export_inline_token)) {
.keyword_extern => {
_ = p.eatToken(.string_literal);
is_extern = true;
@@ -610,9 +617,9 @@ fn expectTopLevelDecl(p: *Parse) !Node.Index {
.keyword_inline, .keyword_noinline => expect_fn = true,
else => p.tok_i -= 1,
}
- const fn_proto = try p.parseFnProto();
- if (fn_proto != 0) {
- switch (p.token_tags[p.tok_i]) {
+ const opt_fn_proto = try p.parseFnProto();
+ if (opt_fn_proto) |fn_proto| {
+ switch (p.tokenTag(p.tok_i)) {
.semicolon => {
p.tok_i += 1;
return fn_proto;
@@ -620,20 +627,19 @@ fn expectTopLevelDecl(p: *Parse) !Node.Index {
.l_brace => {
if (is_extern) {
try p.warnMsg(.{ .tag = .extern_fn_body, .token = extern_export_inline_token });
- return null_node;
+ return null;
}
const fn_decl_index = try p.reserveNode(.fn_decl);
errdefer p.unreserveNode(fn_decl_index);
const body_block = try p.parseBlock();
- assert(body_block != 0);
return p.setNode(fn_decl_index, .{
.tag = .fn_decl,
- .main_token = p.nodes.items(.main_token)[fn_proto],
- .data = .{
- .lhs = fn_proto,
- .rhs = body_block,
- },
+ .main_token = p.nodeMainToken(fn_proto),
+ .data = .{ .node_and_node = .{
+ fn_proto,
+ body_block.?,
+ } },
});
},
else => {
@@ -641,7 +647,7 @@ fn expectTopLevelDecl(p: *Parse) !Node.Index {
// a missing '}' we can assume this function was
// supposed to end here.
try p.warn(.expected_semi_or_lbrace);
- return null_node;
+ return null;
},
}
}
@@ -651,28 +657,25 @@ fn expectTopLevelDecl(p: *Parse) !Node.Index {
}
const thread_local_token = p.eatToken(.keyword_threadlocal);
- const var_decl = try p.parseGlobalVarDecl();
- if (var_decl != 0) {
- return var_decl;
- }
+ if (try p.parseGlobalVarDecl()) |var_decl| return var_decl;
if (thread_local_token != null) {
return p.fail(.expected_var_decl);
}
if (expect_var_or_fn) {
return p.fail(.expected_var_decl_or_fn);
}
- if (p.token_tags[p.tok_i] != .keyword_usingnamespace) {
+ if (p.tokenTag(p.tok_i) != .keyword_usingnamespace) {
return p.fail(.expected_pub_item);
}
- return p.expectUsingNamespace();
+ return try p.expectUsingNamespace();
}
-fn expectTopLevelDeclRecoverable(p: *Parse) error{OutOfMemory}!Node.Index {
+fn expectTopLevelDeclRecoverable(p: *Parse) error{OutOfMemory}!?Node.Index {
return p.expectTopLevelDecl() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
- return null_node;
+ return null;
},
};
}
@@ -684,26 +687,23 @@ fn expectUsingNamespace(p: *Parse) !Node.Index {
return p.addNode(.{
.tag = .@"usingnamespace",
.main_token = usingnamespace_token,
- .data = .{
- .lhs = expr,
- .rhs = undefined,
- },
+ .data = .{ .node = expr },
});
}
-fn expectUsingNamespaceRecoverable(p: *Parse) error{OutOfMemory}!Node.Index {
+fn expectUsingNamespaceRecoverable(p: *Parse) error{OutOfMemory}!?Node.Index {
return p.expectUsingNamespace() catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextContainerMember();
- return null_node;
+ return null;
},
};
}
/// FnProto <- KEYWORD_fn IDENTIFIER? LPAREN ParamDeclList RPAREN ByteAlign? AddrSpace? LinkSection? CallConv? EXCLAMATIONMARK? TypeExpr
-fn parseFnProto(p: *Parse) !Node.Index {
- const fn_token = p.eatToken(.keyword_fn) orelse return null_node;
+fn parseFnProto(p: *Parse) !?Node.Index {
+ const fn_token = p.eatToken(.keyword_fn) orelse return null;
// We want the fn proto node to be before its children in the array.
const fn_proto_index = try p.reserveNode(.fn_proto);
@@ -718,33 +718,33 @@ fn parseFnProto(p: *Parse) !Node.Index {
_ = p.eatToken(.bang);
const return_type_expr = try p.parseTypeExpr();
- if (return_type_expr == 0) {
+ if (return_type_expr == null) {
// most likely the user forgot to specify the return type.
// Mark return type as invalid and try to continue.
try p.warn(.expected_return_type);
}
- if (align_expr == 0 and section_expr == 0 and callconv_expr == 0 and addrspace_expr == 0) {
+ if (align_expr == null and section_expr == null and callconv_expr == null and addrspace_expr == null) {
switch (params) {
.zero_or_one => |param| return p.setNode(fn_proto_index, .{
.tag = .fn_proto_simple,
.main_token = fn_token,
- .data = .{
- .lhs = param,
- .rhs = return_type_expr,
- },
+ .data = .{ .opt_node_and_opt_node = .{
+ param,
+ .fromOptional(return_type_expr),
+ } },
}),
.multi => |span| {
return p.setNode(fn_proto_index, .{
.tag = .fn_proto_multi,
.main_token = fn_token,
- .data = .{
- .lhs = try p.addExtra(Node.SubRange{
+ .data = .{ .extra_and_opt_node = .{
+ try p.addExtra(Node.SubRange{
.start = span.start,
.end = span.end,
}),
- .rhs = return_type_expr,
- },
+ .fromOptional(return_type_expr),
+ } },
});
},
}
@@ -753,109 +753,124 @@ fn parseFnProto(p: *Parse) !Node.Index {
.zero_or_one => |param| return p.setNode(fn_proto_index, .{
.tag = .fn_proto_one,
.main_token = fn_token,
- .data = .{
- .lhs = try p.addExtra(Node.FnProtoOne{
+ .data = .{ .extra_and_opt_node = .{
+ try p.addExtra(Node.FnProtoOne{
.param = param,
- .align_expr = align_expr,
- .addrspace_expr = addrspace_expr,
- .section_expr = section_expr,
- .callconv_expr = callconv_expr,
+ .align_expr = .fromOptional(align_expr),
+ .addrspace_expr = .fromOptional(addrspace_expr),
+ .section_expr = .fromOptional(section_expr),
+ .callconv_expr = .fromOptional(callconv_expr),
}),
- .rhs = return_type_expr,
- },
+ .fromOptional(return_type_expr),
+ } },
}),
.multi => |span| {
return p.setNode(fn_proto_index, .{
.tag = .fn_proto,
.main_token = fn_token,
- .data = .{
- .lhs = try p.addExtra(Node.FnProto{
+ .data = .{ .extra_and_opt_node = .{
+ try p.addExtra(Node.FnProto{
.params_start = span.start,
.params_end = span.end,
- .align_expr = align_expr,
- .addrspace_expr = addrspace_expr,
- .section_expr = section_expr,
- .callconv_expr = callconv_expr,
+ .align_expr = .fromOptional(align_expr),
+ .addrspace_expr = .fromOptional(addrspace_expr),
+ .section_expr = .fromOptional(section_expr),
+ .callconv_expr = .fromOptional(callconv_expr),
}),
- .rhs = return_type_expr,
- },
+ .fromOptional(return_type_expr),
+ } },
});
},
}
}
+fn setVarDeclInitExpr(p: *Parse, var_decl: Node.Index, init_expr: Node.OptionalIndex) void {
+ const init_expr_result = switch (p.nodeTag(var_decl)) {
+ .simple_var_decl => &p.nodes.items(.data)[@intFromEnum(var_decl)].opt_node_and_opt_node[1],
+ .aligned_var_decl => &p.nodes.items(.data)[@intFromEnum(var_decl)].node_and_opt_node[1],
+ .local_var_decl, .global_var_decl => &p.nodes.items(.data)[@intFromEnum(var_decl)].extra_and_opt_node[1],
+ else => unreachable,
+ };
+ init_expr_result.* = init_expr;
+}
+
/// VarDeclProto <- (KEYWORD_const / KEYWORD_var) IDENTIFIER (COLON TypeExpr)? ByteAlign? AddrSpace? LinkSection?
-/// Returns a `*_var_decl` node with its rhs (init expression) initialized to 0.
-fn parseVarDeclProto(p: *Parse) !Node.Index {
+/// Returns a `*_var_decl` node with its rhs (init expression) initialized to .none.
+fn parseVarDeclProto(p: *Parse) !?Node.Index {
const mut_token = p.eatToken(.keyword_const) orelse
p.eatToken(.keyword_var) orelse
- return null_node;
+ return null;
_ = try p.expectToken(.identifier);
- const type_node: Node.Index = if (p.eatToken(.colon) == null) 0 else try p.expectTypeExpr();
- const align_node = try p.parseByteAlign();
- const addrspace_node = try p.parseAddrSpace();
- const section_node = try p.parseLinkSection();
-
- if (section_node == 0 and addrspace_node == 0) {
- if (align_node == 0) {
- return p.addNode(.{
+ const opt_type_node = if (p.eatToken(.colon) == null) null else try p.expectTypeExpr();
+ const opt_align_node = try p.parseByteAlign();
+ const opt_addrspace_node = try p.parseAddrSpace();
+ const opt_section_node = try p.parseLinkSection();
+
+ if (opt_section_node == null and opt_addrspace_node == null) {
+ const align_node = opt_align_node orelse {
+ return try p.addNode(.{
.tag = .simple_var_decl,
.main_token = mut_token,
.data = .{
- .lhs = type_node,
- .rhs = 0,
+ .opt_node_and_opt_node = .{
+ .fromOptional(opt_type_node),
+ .none, // set later with `setVarDeclInitExpr
+ },
},
});
- }
+ };
- if (type_node == 0) {
- return p.addNode(.{
+ const type_node = opt_type_node orelse {
+ return try p.addNode(.{
.tag = .aligned_var_decl,
.main_token = mut_token,
.data = .{
- .lhs = align_node,
- .rhs = 0,
+ .node_and_opt_node = .{
+ align_node,
+ .none, // set later with `setVarDeclInitExpr
+ },
},
});
- }
+ };
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .local_var_decl,
.main_token = mut_token,
.data = .{
- .lhs = try p.addExtra(Node.LocalVarDecl{
- .type_node = type_node,
- .align_node = align_node,
- }),
- .rhs = 0,
+ .extra_and_opt_node = .{
+ try p.addExtra(Node.LocalVarDecl{
+ .type_node = type_node,
+ .align_node = align_node,
+ }),
+ .none, // set later with `setVarDeclInitExpr
+ },
},
});
} else {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .global_var_decl,
.main_token = mut_token,
.data = .{
- .lhs = try p.addExtra(Node.GlobalVarDecl{
- .type_node = type_node,
- .align_node = align_node,
- .addrspace_node = addrspace_node,
- .section_node = section_node,
- }),
- .rhs = 0,
+ .extra_and_opt_node = .{
+ try p.addExtra(Node.GlobalVarDecl{
+ .type_node = .fromOptional(opt_type_node),
+ .align_node = .fromOptional(opt_align_node),
+ .addrspace_node = .fromOptional(opt_addrspace_node),
+ .section_node = .fromOptional(opt_section_node),
+ }),
+ .none, // set later with `setVarDeclInitExpr
+ },
},
});
}
}
/// GlobalVarDecl <- VarDeclProto (EQUAL Expr?) SEMICOLON
-fn parseGlobalVarDecl(p: *Parse) !Node.Index {
- const var_decl = try p.parseVarDeclProto();
- if (var_decl == 0) {
- return null_node;
- }
+fn parseGlobalVarDecl(p: *Parse) !?Node.Index {
+ const var_decl = try p.parseVarDeclProto() orelse return null;
- const init_node: Node.Index = switch (p.token_tags[p.tok_i]) {
+ const init_node: ?Node.Index = switch (p.tokenTag(p.tok_i)) {
.equal_equal => blk: {
try p.warn(.wrong_equal_var_decl);
p.tok_i += 1;
@@ -865,10 +880,10 @@ fn parseGlobalVarDecl(p: *Parse) !Node.Index {
p.tok_i += 1;
break :blk try p.expectExpr();
},
- else => 0,
+ else => null,
};
- p.nodes.items(.data)[var_decl].rhs = init_node;
+ p.setVarDeclInitExpr(var_decl, .fromOptional(init_node));
try p.expectSemicolon(.expected_semi_after_decl, false);
return var_decl;
@@ -878,40 +893,39 @@ fn parseGlobalVarDecl(p: *Parse) !Node.Index {
fn expectContainerField(p: *Parse) !Node.Index {
_ = p.eatToken(.keyword_comptime);
const main_token = p.tok_i;
- if (p.token_tags[p.tok_i] == .identifier and p.token_tags[p.tok_i + 1] == .colon) p.tok_i += 2;
+ _ = p.eatTokens(&.{ .identifier, .colon });
const type_expr = try p.expectTypeExpr();
const align_expr = try p.parseByteAlign();
- const value_expr: Node.Index = if (p.eatToken(.equal) == null) 0 else try p.expectExpr();
+ const value_expr = if (p.eatToken(.equal) == null) null else try p.expectExpr();
- if (align_expr == 0) {
+ if (align_expr == null) {
return p.addNode(.{
.tag = .container_field_init,
.main_token = main_token,
- .data = .{
- .lhs = type_expr,
- .rhs = value_expr,
- },
+ .data = .{ .node_and_opt_node = .{
+ type_expr,
+ .fromOptional(value_expr),
+ } },
});
- } else if (value_expr == 0) {
+ } else if (value_expr == null) {
return p.addNode(.{
.tag = .container_field_align,
.main_token = main_token,
- .data = .{
- .lhs = type_expr,
- .rhs = align_expr,
- },
+ .data = .{ .node_and_node = .{
+ type_expr,
+ align_expr.?,
+ } },
});
} else {
return p.addNode(.{
.tag = .container_field,
.main_token = main_token,
- .data = .{
- .lhs = type_expr,
- .rhs = try p.addExtra(Node.ContainerField{
- .align_expr = align_expr,
- .value_expr = value_expr,
+ .data = .{ .node_and_extra = .{
+ type_expr, try p.addExtra(Node.ContainerField{
+ .align_expr = align_expr.?,
+ .value_expr = value_expr.?,
}),
- },
+ } },
});
}
}
@@ -927,15 +941,12 @@ fn expectContainerField(p: *Parse) !Node.Index {
/// / VarDeclExprStatement
fn expectStatement(p: *Parse, allow_defer_var: bool) Error!Node.Index {
if (p.eatToken(.keyword_comptime)) |comptime_token| {
- const block_expr = try p.parseBlockExpr();
- if (block_expr != 0) {
+ const opt_block_expr = try p.parseBlockExpr();
+ if (opt_block_expr) |block_expr| {
return p.addNode(.{
.tag = .@"comptime",
.main_token = comptime_token,
- .data = .{
- .lhs = block_expr,
- .rhs = undefined,
- },
+ .data = .{ .node = block_expr },
});
}
@@ -947,23 +958,17 @@ fn expectStatement(p: *Parse, allow_defer_var: bool) Error!Node.Index {
return p.addNode(.{
.tag = .@"comptime",
.main_token = comptime_token,
- .data = .{
- .lhs = assign,
- .rhs = undefined,
- },
+ .data = .{ .node = assign },
});
}
}
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.keyword_nosuspend => {
return p.addNode(.{
.tag = .@"nosuspend",
.main_token = p.nextToken(),
- .data = .{
- .lhs = try p.expectBlockExprStatement(),
- .rhs = undefined,
- },
+ .data = .{ .node = try p.expectBlockExprStatement() },
});
},
.keyword_suspend => {
@@ -972,27 +977,21 @@ fn expectStatement(p: *Parse, allow_defer_var: bool) Error!Node.Index {
return p.addNode(.{
.tag = .@"suspend",
.main_token = token,
- .data = .{
- .lhs = block_expr,
- .rhs = undefined,
- },
+ .data = .{ .node = block_expr },
});
},
.keyword_defer => if (allow_defer_var) return p.addNode(.{
.tag = .@"defer",
.main_token = p.nextToken(),
- .data = .{
- .lhs = undefined,
- .rhs = try p.expectBlockExprStatement(),
- },
+ .data = .{ .node = try p.expectBlockExprStatement() },
}),
.keyword_errdefer => if (allow_defer_var) return p.addNode(.{
.tag = .@"errdefer",
.main_token = p.nextToken(),
- .data = .{
- .lhs = try p.parsePayload(),
- .rhs = try p.expectBlockExprStatement(),
- },
+ .data = .{ .opt_token_and_node = .{
+ try p.parsePayload(),
+ try p.expectBlockExprStatement(),
+ } },
}),
.keyword_if => return p.expectIfStatement(),
.keyword_enum, .keyword_struct, .keyword_union => {
@@ -1002,18 +1001,14 @@ fn expectStatement(p: *Parse, allow_defer_var: bool) Error!Node.Index {
return p.addNode(.{
.tag = .identifier,
.main_token = identifier,
- .data = .{
- .lhs = undefined,
- .rhs = undefined,
- },
+ .data = undefined,
});
}
},
else => {},
}
- const labeled_statement = try p.parseLabeledStatement();
- if (labeled_statement != 0) return labeled_statement;
+ if (try p.parseLabeledStatement()) |labeled_statement| return labeled_statement;
if (allow_defer_var) {
return p.expectVarDeclExprStatement(null);
@@ -1028,12 +1023,15 @@ fn expectStatement(p: *Parse, allow_defer_var: bool) Error!Node.Index {
/// <- BlockExpr
/// / VarDeclExprStatement
fn expectComptimeStatement(p: *Parse, comptime_token: TokenIndex) !Node.Index {
- const block_expr = try p.parseBlockExpr();
- if (block_expr != 0) {
+ const maybe_block_expr = try p.parseBlockExpr();
+ if (maybe_block_expr) |block_expr| {
return p.addNode(.{
.tag = .@"comptime",
.main_token = comptime_token,
- .data = .{ .lhs = block_expr, .rhs = undefined },
+ .data = .{
+ .lhs = .{ .node = block_expr },
+ .rhs = undefined,
+ },
});
}
return p.expectVarDeclExprStatement(comptime_token);
@@ -1047,12 +1045,11 @@ fn expectVarDeclExprStatement(p: *Parse, comptime_token: ?TokenIndex) !Node.Inde
defer p.scratch.shrinkRetainingCapacity(scratch_top);
while (true) {
- const var_decl_proto = try p.parseVarDeclProto();
- if (var_decl_proto != 0) {
- try p.scratch.append(p.gpa, var_decl_proto);
+ const opt_var_decl_proto = try p.parseVarDeclProto();
+ if (opt_var_decl_proto) |var_decl| {
+ try p.scratch.append(p.gpa, var_decl);
} else {
- const expr = try p.parseExpr();
- if (expr == 0) {
+ const expr = try p.parseExpr() orelse {
if (p.scratch.items.len == scratch_top) {
// We parsed nothing
return p.fail(.expected_statement);
@@ -1060,7 +1057,7 @@ fn expectVarDeclExprStatement(p: *Parse, comptime_token: ?TokenIndex) !Node.Inde
// We've had at least one LHS, but had a bad comma
return p.fail(.expected_expr_or_var_decl);
}
- }
+ };
try p.scratch.append(p.gpa, expr);
}
_ = p.eatToken(.comma) orelse break;
@@ -1079,7 +1076,7 @@ fn expectVarDeclExprStatement(p: *Parse, comptime_token: ?TokenIndex) !Node.Inde
return p.failExpected(.equal);
}
const lhs = p.scratch.items[scratch_top];
- switch (p.nodes.items(.tag)[lhs]) {
+ switch (p.nodeTag(lhs)) {
.global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => {
// Definitely a var decl, so allow recovering from ==
if (p.eatToken(.equal_equal)) |tok| {
@@ -1097,10 +1094,7 @@ fn expectVarDeclExprStatement(p: *Parse, comptime_token: ?TokenIndex) !Node.Inde
return p.addNode(.{
.tag = .@"comptime",
.main_token = t,
- .data = .{
- .lhs = expr,
- .rhs = undefined,
- },
+ .data = .{ .node = expr },
});
} else {
return expr;
@@ -1112,9 +1106,9 @@ fn expectVarDeclExprStatement(p: *Parse, comptime_token: ?TokenIndex) !Node.Inde
if (lhs_count == 1) {
const lhs = p.scratch.items[scratch_top];
- switch (p.nodes.items(.tag)[lhs]) {
- .global_var_decl, .local_var_decl, .simple_var_decl, .aligned_var_decl => {
- p.nodes.items(.data)[lhs].rhs = rhs;
+ switch (p.nodeTag(lhs)) {
+ .simple_var_decl, .aligned_var_decl, .local_var_decl, .global_var_decl => {
+ p.setVarDeclInitExpr(lhs, rhs.toOptional());
// Don't need to wrap in comptime
return lhs;
},
@@ -1123,16 +1117,16 @@ fn expectVarDeclExprStatement(p: *Parse, comptime_token: ?TokenIndex) !Node.Inde
const expr = try p.addNode(.{
.tag = .assign,
.main_token = equal_token,
- .data = .{ .lhs = lhs, .rhs = rhs },
+ .data = .{ .node_and_node = .{
+ lhs,
+ rhs,
+ } },
});
if (comptime_token) |t| {
return p.addNode(.{
.tag = .@"comptime",
.main_token = t,
- .data = .{
- .lhs = expr,
- .rhs = undefined,
- },
+ .data = .{ .node = expr },
});
} else {
return expr;
@@ -1141,32 +1135,32 @@ fn expectVarDeclExprStatement(p: *Parse, comptime_token: ?TokenIndex) !Node.Inde
// An actual destructure! No need for any `comptime` wrapper here.
- const extra_start = p.extra_data.items.len;
+ const extra_start: ExtraIndex = @enumFromInt(p.extra_data.items.len);
try p.extra_data.ensureUnusedCapacity(p.gpa, lhs_count + 1);
p.extra_data.appendAssumeCapacity(@intCast(lhs_count));
- p.extra_data.appendSliceAssumeCapacity(p.scratch.items[scratch_top..]);
+ p.extra_data.appendSliceAssumeCapacity(@ptrCast(p.scratch.items[scratch_top..]));
return p.addNode(.{
.tag = .assign_destructure,
.main_token = equal_token,
- .data = .{
- .lhs = @intCast(extra_start),
- .rhs = rhs,
- },
+ .data = .{ .extra_and_node = .{
+ extra_start,
+ rhs,
+ } },
});
}
/// If a parse error occurs, reports an error, but then finds the next statement
/// and returns that one instead. If a parse error occurs but there is no following
/// statement, returns 0.
-fn expectStatementRecoverable(p: *Parse) Error!Node.Index {
+fn expectStatementRecoverable(p: *Parse) Error!?Node.Index {
while (true) {
return p.expectStatement(true) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.ParseError => {
p.findNextStmt(); // Try to skip to the next statement.
- switch (p.token_tags[p.tok_i]) {
- .r_brace => return null_node,
+ switch (p.tokenTag(p.tok_i)) {
+ .r_brace => return null,
.eof => return error.ParseError,
else => continue,
}
@@ -1190,19 +1184,18 @@ fn expectIfStatement(p: *Parse) !Node.Index {
var else_required = false;
const then_expr = blk: {
const block_expr = try p.parseBlockExpr();
- if (block_expr != 0) break :blk block_expr;
- const assign_expr = try p.parseAssignExpr();
- if (assign_expr == 0) {
+ if (block_expr) |block| break :blk block;
+ const assign_expr = try p.parseAssignExpr() orelse {
return p.fail(.expected_block_or_assignment);
- }
+ };
if (p.eatToken(.semicolon)) |_| {
return p.addNode(.{
.tag = .if_simple,
.main_token = if_token,
- .data = .{
- .lhs = condition,
- .rhs = assign_expr,
- },
+ .data = .{ .node_and_node = .{
+ condition,
+ assign_expr,
+ } },
});
}
else_required = true;
@@ -1215,10 +1208,10 @@ fn expectIfStatement(p: *Parse) !Node.Index {
return p.addNode(.{
.tag = .if_simple,
.main_token = if_token,
- .data = .{
- .lhs = condition,
- .rhs = then_expr,
- },
+ .data = .{ .node_and_node = .{
+ condition,
+ then_expr,
+ } },
});
};
_ = try p.parsePayload();
@@ -1226,57 +1219,46 @@ fn expectIfStatement(p: *Parse) !Node.Index {
return p.addNode(.{
.tag = .@"if",
.main_token = if_token,
- .data = .{
- .lhs = condition,
- .rhs = try p.addExtra(Node.If{
+ .data = .{ .node_and_extra = .{
+ condition, try p.addExtra(Node.If{
.then_expr = then_expr,
.else_expr = else_expr,
}),
- },
+ } },
});
}
/// LabeledStatement <- BlockLabel? (Block / LoopStatement / SwitchExpr)
-fn parseLabeledStatement(p: *Parse) !Node.Index {
- const label_token = p.parseBlockLabel();
- const block = try p.parseBlock();
- if (block != 0) return block;
-
- const loop_stmt = try p.parseLoopStatement();
- if (loop_stmt != 0) return loop_stmt;
-
- const switch_expr = try p.parseSwitchExpr(label_token != 0);
- if (switch_expr != 0) return switch_expr;
-
- if (label_token != 0) {
- const after_colon = p.tok_i;
- const node = try p.parseTypeExpr();
- if (node != 0) {
- const a = try p.parseByteAlign();
- const b = try p.parseAddrSpace();
- const c = try p.parseLinkSection();
- const d = if (p.eatToken(.equal) == null) 0 else try p.expectExpr();
- if (a != 0 or b != 0 or c != 0 or d != 0) {
- return p.failMsg(.{ .tag = .expected_var_const, .token = label_token });
- }
+fn parseLabeledStatement(p: *Parse) !?Node.Index {
+ const opt_label_token = p.parseBlockLabel();
+
+ if (try p.parseBlock()) |block| return block;
+ if (try p.parseLoopStatement()) |loop_stmt| return loop_stmt;
+ if (try p.parseSwitchExpr(opt_label_token != null)) |switch_expr| return switch_expr;
+
+ const label_token = opt_label_token orelse return null;
+
+ const after_colon = p.tok_i;
+ if (try p.parseTypeExpr()) |_| {
+ const a = try p.parseByteAlign();
+ const b = try p.parseAddrSpace();
+ const c = try p.parseLinkSection();
+ const d = if (p.eatToken(.equal) == null) null else try p.expectExpr();
+ if (a != null or b != null or c != null or d != null) {
+ return p.failMsg(.{ .tag = .expected_var_const, .token = label_token });
}
- return p.failMsg(.{ .tag = .expected_labelable, .token = after_colon });
}
-
- return null_node;
+ return p.failMsg(.{ .tag = .expected_labelable, .token = after_colon });
}
/// LoopStatement <- KEYWORD_inline? (ForStatement / WhileStatement)
-fn parseLoopStatement(p: *Parse) !Node.Index {
+fn parseLoopStatement(p: *Parse) !?Node.Index {
const inline_token = p.eatToken(.keyword_inline);
- const for_statement = try p.parseForStatement();
- if (for_statement != 0) return for_statement;
+ if (try p.parseForStatement()) |for_statement| return for_statement;
+ if (try p.parseWhileStatement()) |while_statement| return while_statement;
- const while_statement = try p.parseWhileStatement();
- if (while_statement != 0) return while_statement;
-
- if (inline_token == null) return null_node;
+ if (inline_token == null) return null;
// If we've seen "inline", there should have been a "for" or "while"
return p.fail(.expected_inlinable);
@@ -1285,8 +1267,8 @@ fn parseLoopStatement(p: *Parse) !Node.Index {
/// ForStatement
/// <- ForPrefix BlockExpr ( KEYWORD_else Statement )?
/// / ForPrefix AssignExpr ( SEMICOLON / KEYWORD_else Statement )
-fn parseForStatement(p: *Parse) !Node.Index {
- const for_token = p.eatToken(.keyword_for) orelse return null_node;
+fn parseForStatement(p: *Parse) !?Node.Index {
+ const for_token = p.eatToken(.keyword_for) orelse return null;
const scratch_top = p.scratch.items.len;
defer p.scratch.shrinkRetainingCapacity(scratch_top);
@@ -1296,11 +1278,10 @@ fn parseForStatement(p: *Parse) !Node.Index {
var seen_semicolon = false;
const then_expr = blk: {
const block_expr = try p.parseBlockExpr();
- if (block_expr != 0) break :blk block_expr;
- const assign_expr = try p.parseAssignExpr();
- if (assign_expr == 0) {
+ if (block_expr) |block| break :blk block;
+ const assign_expr = try p.parseAssignExpr() orelse {
return p.fail(.expected_block_or_assignment);
- }
+ };
if (p.eatToken(.semicolon)) |_| {
seen_semicolon = true;
break :blk assign_expr;
@@ -1316,28 +1297,25 @@ fn parseForStatement(p: *Parse) !Node.Index {
has_else = true;
} else if (inputs == 1) {
if (else_required) try p.warn(.expected_semi_or_else);
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .for_simple,
.main_token = for_token,
- .data = .{
- .lhs = p.scratch.items[scratch_top],
- .rhs = then_expr,
- },
+ .data = .{ .node_and_node = .{
+ p.scratch.items[scratch_top],
+ then_expr,
+ } },
});
} else {
if (else_required) try p.warn(.expected_semi_or_else);
try p.scratch.append(p.gpa, then_expr);
}
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .@"for",
.main_token = for_token,
- .data = .{
- .lhs = (try p.listToSpan(p.scratch.items[scratch_top..])).start,
- .rhs = @as(u32, @bitCast(Node.For{
- .inputs = @as(u31, @intCast(inputs)),
- .has_else = has_else,
- })),
- },
+ .data = .{ .@"for" = .{
+ (try p.listToSpan(p.scratch.items[scratch_top..])).start,
+ .{ .inputs = @intCast(inputs), .has_else = has_else },
+ } },
});
}
@@ -1346,8 +1324,8 @@ fn parseForStatement(p: *Parse) !Node.Index {
/// WhileStatement
/// <- WhilePrefix BlockExpr ( KEYWORD_else Payload? Statement )?
/// / WhilePrefix AssignExpr ( SEMICOLON / KEYWORD_else Payload? Statement )
-fn parseWhileStatement(p: *Parse) !Node.Index {
- const while_token = p.eatToken(.keyword_while) orelse return null_node;
+fn parseWhileStatement(p: *Parse) !?Node.Index {
+ const while_token = p.eatToken(.keyword_while) orelse return null;
_ = try p.expectToken(.l_paren);
const condition = try p.expectExpr();
_ = try p.expectToken(.r_paren);
@@ -1359,32 +1337,31 @@ fn parseWhileStatement(p: *Parse) !Node.Index {
var else_required = false;
const then_expr = blk: {
const block_expr = try p.parseBlockExpr();
- if (block_expr != 0) break :blk block_expr;
- const assign_expr = try p.parseAssignExpr();
- if (assign_expr == 0) {
+ if (block_expr) |block| break :blk block;
+ const assign_expr = try p.parseAssignExpr() orelse {
return p.fail(.expected_block_or_assignment);
- }
+ };
if (p.eatToken(.semicolon)) |_| {
- if (cont_expr == 0) {
- return p.addNode(.{
+ if (cont_expr == null) {
+ return try p.addNode(.{
.tag = .while_simple,
.main_token = while_token,
- .data = .{
- .lhs = condition,
- .rhs = assign_expr,
- },
+ .data = .{ .node_and_node = .{
+ condition,
+ assign_expr,
+ } },
});
} else {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .while_cont,
.main_token = while_token,
- .data = .{
- .lhs = condition,
- .rhs = try p.addExtra(Node.WhileCont{
- .cont_expr = cont_expr,
+ .data = .{ .node_and_extra = .{
+ condition,
+ try p.addExtra(Node.WhileCont{
+ .cont_expr = cont_expr.?,
.then_expr = assign_expr,
}),
- },
+ } },
});
}
}
@@ -1395,84 +1372,77 @@ fn parseWhileStatement(p: *Parse) !Node.Index {
if (else_required) {
try p.warn(.expected_semi_or_else);
}
- if (cont_expr == 0) {
- return p.addNode(.{
+ if (cont_expr == null) {
+ return try p.addNode(.{
.tag = .while_simple,
.main_token = while_token,
- .data = .{
- .lhs = condition,
- .rhs = then_expr,
- },
+ .data = .{ .node_and_node = .{
+ condition,
+ then_expr,
+ } },
});
} else {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .while_cont,
.main_token = while_token,
- .data = .{
- .lhs = condition,
- .rhs = try p.addExtra(Node.WhileCont{
- .cont_expr = cont_expr,
+ .data = .{ .node_and_extra = .{
+ condition,
+ try p.addExtra(Node.WhileCont{
+ .cont_expr = cont_expr.?,
.then_expr = then_expr,
}),
- },
+ } },
});
}
};
_ = try p.parsePayload();
const else_expr = try p.expectStatement(false);
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .@"while",
.main_token = while_token,
- .data = .{
- .lhs = condition,
- .rhs = try p.addExtra(Node.While{
- .cont_expr = cont_expr,
+ .data = .{ .node_and_extra = .{
+ condition, try p.addExtra(Node.While{
+ .cont_expr = .fromOptional(cont_expr),
.then_expr = then_expr,
.else_expr = else_expr,
}),
- },
+ } },
});
}
/// BlockExprStatement
/// <- BlockExpr
/// / AssignExpr SEMICOLON
-fn parseBlockExprStatement(p: *Parse) !Node.Index {
+fn parseBlockExprStatement(p: *Parse) !?Node.Index {
const block_expr = try p.parseBlockExpr();
- if (block_expr != 0) {
- return block_expr;
- }
+ if (block_expr) |expr| return expr;
const assign_expr = try p.parseAssignExpr();
- if (assign_expr != 0) {
+ if (assign_expr) |expr| {
try p.expectSemicolon(.expected_semi_after_stmt, true);
- return assign_expr;
+ return expr;
}
- return null_node;
+ return null;
}
fn expectBlockExprStatement(p: *Parse) !Node.Index {
- const node = try p.parseBlockExprStatement();
- if (node == 0) {
- return p.fail(.expected_block_or_expr);
- }
- return node;
+ return try p.parseBlockExprStatement() orelse return p.fail(.expected_block_or_expr);
}
/// BlockExpr <- BlockLabel? Block
-fn parseBlockExpr(p: *Parse) Error!Node.Index {
- switch (p.token_tags[p.tok_i]) {
+fn parseBlockExpr(p: *Parse) Error!?Node.Index {
+ switch (p.tokenTag(p.tok_i)) {
.identifier => {
- if (p.token_tags[p.tok_i + 1] == .colon and
- p.token_tags[p.tok_i + 2] == .l_brace)
+ if (p.tokenTag(p.tok_i + 1) == .colon and
+ p.tokenTag(p.tok_i + 2) == .l_brace)
{
p.tok_i += 2;
return p.parseBlock();
} else {
- return null_node;
+ return null;
}
},
.l_brace => return p.parseBlock(),
- else => return null_node,
+ else => return null,
}
}
@@ -1497,38 +1467,36 @@ fn parseBlockExpr(p: *Parse) Error!Node.Index {
/// / PLUSPERCENTEQUAL
/// / MINUSPERCENTEQUAL
/// / EQUAL
-fn parseAssignExpr(p: *Parse) !Node.Index {
- const expr = try p.parseExpr();
- if (expr == 0) return null_node;
- return p.finishAssignExpr(expr);
+fn parseAssignExpr(p: *Parse) !?Node.Index {
+ const expr = try p.parseExpr() orelse return null;
+ return try p.finishAssignExpr(expr);
}
/// SingleAssignExpr <- Expr (AssignOp Expr)?
-fn parseSingleAssignExpr(p: *Parse) !Node.Index {
- const lhs = try p.parseExpr();
- if (lhs == 0) return null_node;
- const tag = assignOpNode(p.token_tags[p.tok_i]) orelse return lhs;
- return p.addNode(.{
+fn parseSingleAssignExpr(p: *Parse) !?Node.Index {
+ const lhs = try p.parseExpr() orelse return null;
+ const tag = assignOpNode(p.tokenTag(p.tok_i)) orelse return lhs;
+ return try p.addNode(.{
.tag = tag,
.main_token = p.nextToken(),
- .data = .{
- .lhs = lhs,
- .rhs = try p.expectExpr(),
- },
+ .data = .{ .node_and_node = .{
+ lhs,
+ try p.expectExpr(),
+ } },
});
}
fn finishAssignExpr(p: *Parse, lhs: Node.Index) !Node.Index {
- const tok = p.token_tags[p.tok_i];
+ const tok = p.tokenTag(p.tok_i);
if (tok == .comma) return p.finishAssignDestructureExpr(lhs);
const tag = assignOpNode(tok) orelse return lhs;
return p.addNode(.{
.tag = tag,
.main_token = p.nextToken(),
- .data = .{
- .lhs = lhs,
- .rhs = try p.expectExpr(),
- },
+ .data = .{ .node_and_node = .{
+ lhs,
+ try p.expectExpr(),
+ } },
});
}
@@ -1574,48 +1542,35 @@ fn finishAssignDestructureExpr(p: *Parse, first_lhs: Node.Index) !Node.Index {
const lhs_count = p.scratch.items.len - scratch_top;
assert(lhs_count > 1); // we already had first_lhs, and must have at least one more lvalue
- const extra_start = p.extra_data.items.len;
+ const extra_start: ExtraIndex = @enumFromInt(p.extra_data.items.len);
try p.extra_data.ensureUnusedCapacity(p.gpa, lhs_count + 1);
p.extra_data.appendAssumeCapacity(@intCast(lhs_count));
- p.extra_data.appendSliceAssumeCapacity(p.scratch.items[scratch_top..]);
+ p.extra_data.appendSliceAssumeCapacity(@ptrCast(p.scratch.items[scratch_top..]));
return p.addNode(.{
.tag = .assign_destructure,
.main_token = equal_token,
- .data = .{
- .lhs = @intCast(extra_start),
- .rhs = rhs,
- },
+ .data = .{ .extra_and_node = .{
+ extra_start,
+ rhs,
+ } },
});
}
fn expectSingleAssignExpr(p: *Parse) !Node.Index {
- const expr = try p.parseSingleAssignExpr();
- if (expr == 0) {
- return p.fail(.expected_expr_or_assignment);
- }
- return expr;
+ return try p.parseSingleAssignExpr() orelse return p.fail(.expected_expr_or_assignment);
}
fn expectAssignExpr(p: *Parse) !Node.Index {
- const expr = try p.parseAssignExpr();
- if (expr == 0) {
- return p.fail(.expected_expr_or_assignment);
- }
- return expr;
+ return try p.parseAssignExpr() orelse return p.fail(.expected_expr_or_assignment);
}
-fn parseExpr(p: *Parse) Error!Node.Index {
+fn parseExpr(p: *Parse) Error!?Node.Index {
return p.parseExprPrecedence(0);
}
fn expectExpr(p: *Parse) Error!Node.Index {
- const node = try p.parseExpr();
- if (node == 0) {
- return p.fail(.expected_expr);
- } else {
- return node;
- }
+ return try p.parseExpr() orelse return p.fail(.expected_expr);
}
const Assoc = enum {
@@ -1671,17 +1626,14 @@ const operTable = std.enums.directEnumArrayDefault(Token.Tag, OperInfo, .{ .prec
.asterisk_pipe = .{ .prec = 70, .tag = .mul_sat },
});
-fn parseExprPrecedence(p: *Parse, min_prec: i32) Error!Node.Index {
+fn parseExprPrecedence(p: *Parse, min_prec: i32) Error!?Node.Index {
assert(min_prec >= 0);
- var node = try p.parsePrefixExpr();
- if (node == 0) {
- return null_node;
- }
+ var node = try p.parsePrefixExpr() orelse return null;
var banned_prec: i8 = -1;
while (true) {
- const tok_tag = p.token_tags[p.tok_i];
+ const tok_tag = p.tokenTag(p.tok_i);
const info = operTable[@as(usize, @intCast(@intFromEnum(tok_tag)))];
if (info.prec < min_prec) {
break;
@@ -1695,16 +1647,15 @@ fn parseExprPrecedence(p: *Parse, min_prec: i32) Error!Node.Index {
if (tok_tag == .keyword_catch) {
_ = try p.parsePayload();
}
- const rhs = try p.parseExprPrecedence(info.prec + 1);
- if (rhs == 0) {
+ const rhs = try p.parseExprPrecedence(info.prec + 1) orelse {
try p.warn(.expected_expr);
return node;
- }
+ };
{
const tok_len = tok_tag.lexeme().?.len;
- const char_before = p.source[p.token_starts[oper_token] - 1];
- const char_after = p.source[p.token_starts[oper_token] + tok_len];
+ const char_before = p.source[p.tokenStart(oper_token) - 1];
+ const char_after = p.source[p.tokenStart(oper_token) + tok_len];
if (tok_tag == .ampersand and char_after == '&') {
// without types we don't know if '&&' was intended as 'bitwise_and address_of', or a c-style logical_and
// The best the parser can do is recommend changing it to 'and' or ' & &'
@@ -1717,10 +1668,7 @@ fn parseExprPrecedence(p: *Parse, min_prec: i32) Error!Node.Index {
node = try p.addNode(.{
.tag = info.tag,
.main_token = oper_token,
- .data = .{
- .lhs = node,
- .rhs = rhs,
- },
+ .data = .{ .node_and_node = .{ node, rhs } },
});
if (info.assoc == Assoc.none) {
@@ -1741,8 +1689,8 @@ fn parseExprPrecedence(p: *Parse, min_prec: i32) Error!Node.Index {
/// / AMPERSAND
/// / KEYWORD_try
/// / KEYWORD_await
-fn parsePrefixExpr(p: *Parse) Error!Node.Index {
- const tag: Node.Tag = switch (p.token_tags[p.tok_i]) {
+fn parsePrefixExpr(p: *Parse) Error!?Node.Index {
+ const tag: Node.Tag = switch (p.tokenTag(p.tok_i)) {
.bang => .bool_not,
.minus => .negation,
.tilde => .bit_not,
@@ -1752,22 +1700,15 @@ fn parsePrefixExpr(p: *Parse) Error!Node.Index {
.keyword_await => .@"await",
else => return p.parsePrimaryExpr(),
};
- return p.addNode(.{
+ return try p.addNode(.{
.tag = tag,
.main_token = p.nextToken(),
- .data = .{
- .lhs = try p.expectPrefixExpr(),
- .rhs = undefined,
- },
+ .data = .{ .node = try p.expectPrefixExpr() },
});
}
fn expectPrefixExpr(p: *Parse) Error!Node.Index {
- const node = try p.parsePrefixExpr();
- if (node == 0) {
- return p.fail(.expected_prefix_expr);
- }
- return node;
+ return try p.parsePrefixExpr() orelse return p.fail(.expected_prefix_expr);
}
/// TypeExpr <- PrefixTypeOp* ErrorUnionExpr
@@ -1787,67 +1728,64 @@ fn expectPrefixExpr(p: *Parse) Error!Node.Index {
/// / LBRACKET ASTERISK (LETTERC / COLON Expr)? RBRACKET
///
/// ArrayTypeStart <- LBRACKET Expr (COLON Expr)? RBRACKET
-fn parseTypeExpr(p: *Parse) Error!Node.Index {
- switch (p.token_tags[p.tok_i]) {
- .question_mark => return p.addNode(.{
+fn parseTypeExpr(p: *Parse) Error!?Node.Index {
+ switch (p.tokenTag(p.tok_i)) {
+ .question_mark => return try p.addNode(.{
.tag = .optional_type,
.main_token = p.nextToken(),
- .data = .{
- .lhs = try p.expectTypeExpr(),
- .rhs = undefined,
- },
+ .data = .{ .node = try p.expectTypeExpr() },
}),
- .keyword_anyframe => switch (p.token_tags[p.tok_i + 1]) {
- .arrow => return p.addNode(.{
+ .keyword_anyframe => switch (p.tokenTag(p.tok_i + 1)) {
+ .arrow => return try p.addNode(.{
.tag = .anyframe_type,
.main_token = p.nextToken(),
- .data = .{
- .lhs = p.nextToken(),
- .rhs = try p.expectTypeExpr(),
- },
+ .data = .{ .token_and_node = .{
+ p.nextToken(),
+ try p.expectTypeExpr(),
+ } },
}),
- else => return p.parseErrorUnionExpr(),
+ else => return try p.parseErrorUnionExpr(),
},
.asterisk => {
const asterisk = p.nextToken();
const mods = try p.parsePtrModifiers();
const elem_type = try p.expectTypeExpr();
- if (mods.bit_range_start != 0) {
- return p.addNode(.{
+ if (mods.bit_range_start != .none) {
+ return try p.addNode(.{
.tag = .ptr_type_bit_range,
.main_token = asterisk,
- .data = .{
- .lhs = try p.addExtra(Node.PtrTypeBitRange{
- .sentinel = 0,
- .align_node = mods.align_node,
+ .data = .{ .extra_and_node = .{
+ try p.addExtra(Node.PtrTypeBitRange{
+ .sentinel = .none,
+ .align_node = mods.align_node.unwrap().?,
.addrspace_node = mods.addrspace_node,
- .bit_range_start = mods.bit_range_start,
- .bit_range_end = mods.bit_range_end,
+ .bit_range_start = mods.bit_range_start.unwrap().?,
+ .bit_range_end = mods.bit_range_end.unwrap().?,
}),
- .rhs = elem_type,
- },
+ elem_type,
+ } },
});
- } else if (mods.addrspace_node != 0) {
- return p.addNode(.{
+ } else if (mods.addrspace_node != .none) {
+ return try p.addNode(.{
.tag = .ptr_type,
.main_token = asterisk,
- .data = .{
- .lhs = try p.addExtra(Node.PtrType{
- .sentinel = 0,
+ .data = .{ .extra_and_node = .{
+ try p.addExtra(Node.PtrType{
+ .sentinel = .none,
.align_node = mods.align_node,
.addrspace_node = mods.addrspace_node,
}),
- .rhs = elem_type,
- },
+ elem_type,
+ } },
});
} else {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .ptr_type_aligned,
.main_token = asterisk,
- .data = .{
- .lhs = mods.align_node,
- .rhs = elem_type,
- },
+ .data = .{ .opt_node_and_node = .{
+ mods.align_node,
+ elem_type,
+ } },
});
}
},
@@ -1856,61 +1794,61 @@ fn parseTypeExpr(p: *Parse) Error!Node.Index {
const mods = try p.parsePtrModifiers();
const elem_type = try p.expectTypeExpr();
const inner: Node.Index = inner: {
- if (mods.bit_range_start != 0) {
+ if (mods.bit_range_start != .none) {
break :inner try p.addNode(.{
.tag = .ptr_type_bit_range,
.main_token = asterisk,
- .data = .{
- .lhs = try p.addExtra(Node.PtrTypeBitRange{
- .sentinel = 0,
- .align_node = mods.align_node,
+ .data = .{ .extra_and_node = .{
+ try p.addExtra(Node.PtrTypeBitRange{
+ .sentinel = .none,
+ .align_node = mods.align_node.unwrap().?,
.addrspace_node = mods.addrspace_node,
- .bit_range_start = mods.bit_range_start,
- .bit_range_end = mods.bit_range_end,
+ .bit_range_start = mods.bit_range_start.unwrap().?,
+ .bit_range_end = mods.bit_range_end.unwrap().?,
}),
- .rhs = elem_type,
- },
+ elem_type,
+ } },
});
- } else if (mods.addrspace_node != 0) {
+ } else if (mods.addrspace_node != .none) {
break :inner try p.addNode(.{
.tag = .ptr_type,
.main_token = asterisk,
- .data = .{
- .lhs = try p.addExtra(Node.PtrType{
- .sentinel = 0,
+ .data = .{ .extra_and_node = .{
+ try p.addExtra(Node.PtrType{
+ .sentinel = .none,
.align_node = mods.align_node,
.addrspace_node = mods.addrspace_node,
}),
- .rhs = elem_type,
- },
+ elem_type,
+ } },
});
} else {
break :inner try p.addNode(.{
.tag = .ptr_type_aligned,
.main_token = asterisk,
- .data = .{
- .lhs = mods.align_node,
- .rhs = elem_type,
- },
+ .data = .{ .opt_node_and_node = .{
+ mods.align_node,
+ elem_type,
+ } },
});
}
};
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .ptr_type_aligned,
.main_token = asterisk,
- .data = .{
- .lhs = 0,
- .rhs = inner,
- },
+ .data = .{ .opt_node_and_node = .{
+ .none,
+ inner,
+ } },
});
},
- .l_bracket => switch (p.token_tags[p.tok_i + 1]) {
+ .l_bracket => switch (p.tokenTag(p.tok_i + 1)) {
.asterisk => {
const l_bracket = p.nextToken();
_ = p.nextToken();
- var sentinel: Node.Index = 0;
+ var sentinel: ?Node.Index = null;
if (p.eatToken(.identifier)) |ident| {
- const ident_slice = p.source[p.token_starts[ident]..p.token_starts[ident + 1]];
+ const ident_slice = p.source[p.tokenStart(ident)..p.tokenStart(ident + 1)];
if (!std.mem.eql(u8, std.mem.trimRight(u8, ident_slice, &std.ascii.whitespace), "c")) {
p.tok_i -= 1;
}
@@ -1920,107 +1858,107 @@ fn parseTypeExpr(p: *Parse) Error!Node.Index {
_ = try p.expectToken(.r_bracket);
const mods = try p.parsePtrModifiers();
const elem_type = try p.expectTypeExpr();
- if (mods.bit_range_start == 0) {
- if (sentinel == 0 and mods.addrspace_node == 0) {
- return p.addNode(.{
+ if (mods.bit_range_start == .none) {
+ if (sentinel == null and mods.addrspace_node == .none) {
+ return try p.addNode(.{
.tag = .ptr_type_aligned,
.main_token = l_bracket,
- .data = .{
- .lhs = mods.align_node,
- .rhs = elem_type,
- },
+ .data = .{ .opt_node_and_node = .{
+ mods.align_node,
+ elem_type,
+ } },
});
- } else if (mods.align_node == 0 and mods.addrspace_node == 0) {
- return p.addNode(.{
+ } else if (mods.align_node == .none and mods.addrspace_node == .none) {
+ return try p.addNode(.{
.tag = .ptr_type_sentinel,
.main_token = l_bracket,
- .data = .{
- .lhs = sentinel,
- .rhs = elem_type,
- },
+ .data = .{ .opt_node_and_node = .{
+ .fromOptional(sentinel),
+ elem_type,
+ } },
});
} else {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .ptr_type,
.main_token = l_bracket,
- .data = .{
- .lhs = try p.addExtra(Node.PtrType{
- .sentinel = sentinel,
+ .data = .{ .extra_and_node = .{
+ try p.addExtra(Node.PtrType{
+ .sentinel = .fromOptional(sentinel),
.align_node = mods.align_node,
.addrspace_node = mods.addrspace_node,
}),
- .rhs = elem_type,
- },
+ elem_type,
+ } },
});
}
} else {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .ptr_type_bit_range,
.main_token = l_bracket,
- .data = .{
- .lhs = try p.addExtra(Node.PtrTypeBitRange{
- .sentinel = sentinel,
- .align_node = mods.align_node,
+ .data = .{ .extra_and_node = .{
+ try p.addExtra(Node.PtrTypeBitRange{
+ .sentinel = .fromOptional(sentinel),
+ .align_node = mods.align_node.unwrap().?,
.addrspace_node = mods.addrspace_node,
- .bit_range_start = mods.bit_range_start,
- .bit_range_end = mods.bit_range_end,
+ .bit_range_start = mods.bit_range_start.unwrap().?,
+ .bit_range_end = mods.bit_range_end.unwrap().?,
}),
- .rhs = elem_type,
- },
+ elem_type,
+ } },
});
}
},
else => {
const lbracket = p.nextToken();
const len_expr = try p.parseExpr();
- const sentinel: Node.Index = if (p.eatToken(.colon)) |_|
+ const sentinel: ?Node.Index = if (p.eatToken(.colon)) |_|
try p.expectExpr()
else
- 0;
+ null;
_ = try p.expectToken(.r_bracket);
- if (len_expr == 0) {
+ if (len_expr == null) {
const mods = try p.parsePtrModifiers();
const elem_type = try p.expectTypeExpr();
- if (mods.bit_range_start != 0) {
+ if (mods.bit_range_start.unwrap()) |bit_range_start| {
try p.warnMsg(.{
.tag = .invalid_bit_range,
- .token = p.nodes.items(.main_token)[mods.bit_range_start],
+ .token = p.nodeMainToken(bit_range_start),
});
}
- if (sentinel == 0 and mods.addrspace_node == 0) {
- return p.addNode(.{
+ if (sentinel == null and mods.addrspace_node == .none) {
+ return try p.addNode(.{
.tag = .ptr_type_aligned,
.main_token = lbracket,
- .data = .{
- .lhs = mods.align_node,
- .rhs = elem_type,
- },
+ .data = .{ .opt_node_and_node = .{
+ mods.align_node,
+ elem_type,
+ } },
});
- } else if (mods.align_node == 0 and mods.addrspace_node == 0) {
- return p.addNode(.{
+ } else if (mods.align_node == .none and mods.addrspace_node == .none) {
+ return try p.addNode(.{
.tag = .ptr_type_sentinel,
.main_token = lbracket,
- .data = .{
- .lhs = sentinel,
- .rhs = elem_type,
- },
+ .data = .{ .opt_node_and_node = .{
+ .fromOptional(sentinel),
+ elem_type,
+ } },
});
} else {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .ptr_type,
.main_token = lbracket,
- .data = .{
- .lhs = try p.addExtra(Node.PtrType{
- .sentinel = sentinel,
+ .data = .{ .extra_and_node = .{
+ try p.addExtra(Node.PtrType{
+ .sentinel = .fromOptional(sentinel),
.align_node = mods.align_node,
.addrspace_node = mods.addrspace_node,
}),
- .rhs = elem_type,
- },
+ elem_type,
+ } },
});
}
} else {
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.keyword_align,
.keyword_const,
.keyword_volatile,
@@ -2030,26 +1968,25 @@ fn parseTypeExpr(p: *Parse) Error!Node.Index {
else => {},
}
const elem_type = try p.expectTypeExpr();
- if (sentinel == 0) {
- return p.addNode(.{
+ if (sentinel == null) {
+ return try p.addNode(.{
.tag = .array_type,
.main_token = lbracket,
- .data = .{
- .lhs = len_expr,
- .rhs = elem_type,
- },
+ .data = .{ .node_and_node = .{
+ len_expr.?,
+ elem_type,
+ } },
});
} else {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .array_type_sentinel,
.main_token = lbracket,
- .data = .{
- .lhs = len_expr,
- .rhs = try p.addExtra(Node.ArrayTypeSentinel{
- .sentinel = sentinel,
+ .data = .{ .node_and_extra = .{
+ len_expr.?, try p.addExtra(Node.ArrayTypeSentinel{
+ .sentinel = sentinel.?,
.elem_type = elem_type,
}),
- },
+ } },
});
}
}
@@ -2060,11 +1997,7 @@ fn parseTypeExpr(p: *Parse) Error!Node.Index {
}
fn expectTypeExpr(p: *Parse) Error!Node.Index {
- const node = try p.parseTypeExpr();
- if (node == 0) {
- return p.fail(.expected_type_expr);
- }
- return node;
+ return try p.parseTypeExpr() orelse return p.fail(.expected_type_expr);
}
/// PrimaryExpr
@@ -2079,169 +2012,135 @@ fn expectTypeExpr(p: *Parse) Error!Node.Index {
/// / BlockLabel? LoopExpr
/// / Block
/// / CurlySuffixExpr
-fn parsePrimaryExpr(p: *Parse) !Node.Index {
- switch (p.token_tags[p.tok_i]) {
- .keyword_asm => return p.expectAsmExpr(),
- .keyword_if => return p.parseIfExpr(),
+fn parsePrimaryExpr(p: *Parse) !?Node.Index {
+ switch (p.tokenTag(p.tok_i)) {
+ .keyword_asm => return try p.expectAsmExpr(),
+ .keyword_if => return try p.parseIfExpr(),
.keyword_break => {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .@"break",
.main_token = p.nextToken(),
- .data = .{
- .lhs = try p.parseBreakLabel(),
- .rhs = try p.parseExpr(),
- },
+ .data = .{ .opt_token_and_opt_node = .{
+ try p.parseBreakLabel(),
+ .fromOptional(try p.parseExpr()),
+ } },
});
},
.keyword_continue => {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .@"continue",
.main_token = p.nextToken(),
- .data = .{
- .lhs = try p.parseBreakLabel(),
- .rhs = try p.parseExpr(),
- },
+ .data = .{ .opt_token_and_opt_node = .{
+ try p.parseBreakLabel(),
+ .fromOptional(try p.parseExpr()),
+ } },
});
},
.keyword_comptime => {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .@"comptime",
.main_token = p.nextToken(),
- .data = .{
- .lhs = try p.expectExpr(),
- .rhs = undefined,
- },
+ .data = .{ .node = try p.expectExpr() },
});
},
.keyword_nosuspend => {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .@"nosuspend",
.main_token = p.nextToken(),
- .data = .{
- .lhs = try p.expectExpr(),
- .rhs = undefined,
- },
+ .data = .{ .node = try p.expectExpr() },
});
},
.keyword_resume => {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .@"resume",
.main_token = p.nextToken(),
- .data = .{
- .lhs = try p.expectExpr(),
- .rhs = undefined,
- },
+ .data = .{ .node = try p.expectExpr() },
});
},
.keyword_return => {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .@"return",
.main_token = p.nextToken(),
- .data = .{
- .lhs = try p.parseExpr(),
- .rhs = undefined,
- },
+ .data = .{ .opt_node = .fromOptional(try p.parseExpr()) },
});
},
.identifier => {
- if (p.token_tags[p.tok_i + 1] == .colon) {
- switch (p.token_tags[p.tok_i + 2]) {
+ if (p.tokenTag(p.tok_i + 1) == .colon) {
+ switch (p.tokenTag(p.tok_i + 2)) {
.keyword_inline => {
p.tok_i += 3;
- switch (p.token_tags[p.tok_i]) {
- .keyword_for => return p.parseFor(expectExpr),
- .keyword_while => return p.parseWhileExpr(),
+ switch (p.tokenTag(p.tok_i)) {
+ .keyword_for => return try p.parseFor(expectExpr),
+ .keyword_while => return try p.parseWhileExpr(),
else => return p.fail(.expected_inlinable),
}
},
.keyword_for => {
p.tok_i += 2;
- return p.parseFor(expectExpr);
+ return try p.parseFor(expectExpr);
},
.keyword_while => {
p.tok_i += 2;
- return p.parseWhileExpr();
+ return try p.parseWhileExpr();
},
.l_brace => {
p.tok_i += 2;
- return p.parseBlock();
+ return try p.parseBlock();
},
- else => return p.parseCurlySuffixExpr(),
+ else => return try p.parseCurlySuffixExpr(),
}
} else {
- return p.parseCurlySuffixExpr();
+ return try p.parseCurlySuffixExpr();
}
},
.keyword_inline => {
p.tok_i += 1;
- switch (p.token_tags[p.tok_i]) {
- .keyword_for => return p.parseFor(expectExpr),
- .keyword_while => return p.parseWhileExpr(),
+ switch (p.tokenTag(p.tok_i)) {
+ .keyword_for => return try p.parseFor(expectExpr),
+ .keyword_while => return try p.parseWhileExpr(),
else => return p.fail(.expected_inlinable),
}
},
- .keyword_for => return p.parseFor(expectExpr),
- .keyword_while => return p.parseWhileExpr(),
- .l_brace => return p.parseBlock(),
- else => return p.parseCurlySuffixExpr(),
+ .keyword_for => return try p.parseFor(expectExpr),
+ .keyword_while => return try p.parseWhileExpr(),
+ .l_brace => return try p.parseBlock(),
+ else => return try p.parseCurlySuffixExpr(),
}
}
/// IfExpr <- IfPrefix Expr (KEYWORD_else Payload? Expr)?
-fn parseIfExpr(p: *Parse) !Node.Index {
- return p.parseIf(expectExpr);
+fn parseIfExpr(p: *Parse) !?Node.Index {
+ return try p.parseIf(expectExpr);
}
/// Block <- LBRACE Statement* RBRACE
-fn parseBlock(p: *Parse) !Node.Index {
- const lbrace = p.eatToken(.l_brace) orelse return null_node;
+fn parseBlock(p: *Parse) !?Node.Index {
+ const lbrace = p.eatToken(.l_brace) orelse return null;
const scratch_top = p.scratch.items.len;
defer p.scratch.shrinkRetainingCapacity(scratch_top);
while (true) {
- if (p.token_tags[p.tok_i] == .r_brace) break;
- const statement = try p.expectStatementRecoverable();
- if (statement == 0) break;
+ if (p.tokenTag(p.tok_i) == .r_brace) break;
+ const statement = try p.expectStatementRecoverable() orelse break;
try p.scratch.append(p.gpa, statement);
}
_ = try p.expectToken(.r_brace);
- const semicolon = (p.token_tags[p.tok_i - 2] == .semicolon);
const statements = p.scratch.items[scratch_top..];
- switch (statements.len) {
- 0 => return p.addNode(.{
- .tag = .block_two,
- .main_token = lbrace,
- .data = .{
- .lhs = 0,
- .rhs = 0,
- },
- }),
- 1 => return p.addNode(.{
+ const semicolon = statements.len != 0 and (p.tokenTag(p.tok_i - 2)) == .semicolon;
+ if (statements.len <= 2) {
+ return try p.addNode(.{
.tag = if (semicolon) .block_two_semicolon else .block_two,
.main_token = lbrace,
- .data = .{
- .lhs = statements[0],
- .rhs = 0,
- },
- }),
- 2 => return p.addNode(.{
- .tag = if (semicolon) .block_two_semicolon else .block_two,
+ .data = .{ .opt_node_and_opt_node = .{
+ if (statements.len >= 1) statements[0].toOptional() else .none,
+ if (statements.len >= 2) statements[1].toOptional() else .none,
+ } },
+ });
+ } else {
+ return try p.addNode(.{
+ .tag = if (semicolon) .block_semicolon else .block,
.main_token = lbrace,
- .data = .{
- .lhs = statements[0],
- .rhs = statements[1],
- },
- }),
- else => {
- const span = try p.listToSpan(statements);
- return p.addNode(.{
- .tag = if (semicolon) .block_semicolon else .block,
- .main_token = lbrace,
- .data = .{
- .lhs = span.start,
- .rhs = span.end,
- },
- });
- },
+ .data = .{ .extra_range = try p.listToSpan(statements) },
+ });
}
}
@@ -2260,15 +2159,15 @@ fn forPrefix(p: *Parse) Error!usize {
input = try p.addNode(.{
.tag = .for_range,
.main_token = ellipsis,
- .data = .{
- .lhs = input,
- .rhs = try p.parseExpr(),
- },
+ .data = .{ .node_and_opt_node = .{
+ input,
+ .fromOptional(try p.parseExpr()),
+ } },
});
}
try p.scratch.append(p.gpa, input);
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.comma => p.tok_i += 1,
.r_paren => {
p.tok_i += 1;
@@ -2297,7 +2196,7 @@ fn forPrefix(p: *Parse) Error!usize {
try p.warnMsg(.{ .tag = .extra_for_capture, .token = identifier });
warned_excess = true;
}
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.comma => p.tok_i += 1,
.pipe => {
p.tok_i += 1;
@@ -2311,7 +2210,7 @@ fn forPrefix(p: *Parse) Error!usize {
if (captures < inputs) {
const index = p.scratch.items.len - captures;
- const input = p.nodes.items(.main_token)[p.scratch.items[index]];
+ const input = p.nodeMainToken(p.scratch.items[index]);
try p.warnMsg(.{ .tag = .for_input_not_captured, .token = input });
}
return inputs;
@@ -2320,8 +2219,8 @@ fn forPrefix(p: *Parse) Error!usize {
/// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr?
///
/// WhileExpr <- WhilePrefix Expr (KEYWORD_else Payload? Expr)?
-fn parseWhileExpr(p: *Parse) !Node.Index {
- const while_token = p.eatToken(.keyword_while) orelse return null_node;
+fn parseWhileExpr(p: *Parse) !?Node.Index {
+ const while_token = p.eatToken(.keyword_while) orelse return null;
_ = try p.expectToken(.l_paren);
const condition = try p.expectExpr();
_ = try p.expectToken(.r_paren);
@@ -2330,42 +2229,42 @@ fn parseWhileExpr(p: *Parse) !Node.Index {
const then_expr = try p.expectExpr();
_ = p.eatToken(.keyword_else) orelse {
- if (cont_expr == 0) {
- return p.addNode(.{
+ if (cont_expr == null) {
+ return try p.addNode(.{
.tag = .while_simple,
.main_token = while_token,
- .data = .{
- .lhs = condition,
- .rhs = then_expr,
- },
+ .data = .{ .node_and_node = .{
+ condition,
+ then_expr,
+ } },
});
} else {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .while_cont,
.main_token = while_token,
- .data = .{
- .lhs = condition,
- .rhs = try p.addExtra(Node.WhileCont{
- .cont_expr = cont_expr,
+ .data = .{ .node_and_extra = .{
+ condition,
+ try p.addExtra(Node.WhileCont{
+ .cont_expr = cont_expr.?,
.then_expr = then_expr,
}),
- },
+ } },
});
}
};
_ = try p.parsePayload();
const else_expr = try p.expectExpr();
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .@"while",
.main_token = while_token,
- .data = .{
- .lhs = condition,
- .rhs = try p.addExtra(Node.While{
- .cont_expr = cont_expr,
+ .data = .{ .node_and_extra = .{
+ condition,
+ try p.addExtra(Node.While{
+ .cont_expr = .fromOptional(cont_expr),
.then_expr = then_expr,
.else_expr = else_expr,
}),
- },
+ } },
});
}
@@ -2375,9 +2274,8 @@ fn parseWhileExpr(p: *Parse) !Node.Index {
/// <- LBRACE FieldInit (COMMA FieldInit)* COMMA? RBRACE
/// / LBRACE Expr (COMMA Expr)* COMMA? RBRACE
/// / LBRACE RBRACE
-fn parseCurlySuffixExpr(p: *Parse) !Node.Index {
- const lhs = try p.parseTypeExpr();
- if (lhs == 0) return null_node;
+fn parseCurlySuffixExpr(p: *Parse) !?Node.Index {
+ const lhs = try p.parseTypeExpr() orelse return null;
const lbrace = p.eatToken(.l_brace) orelse return lhs;
// If there are 0 or 1 items, we can use ArrayInitOne/StructInitOne;
@@ -2385,11 +2283,11 @@ fn parseCurlySuffixExpr(p: *Parse) !Node.Index {
const scratch_top = p.scratch.items.len;
defer p.scratch.shrinkRetainingCapacity(scratch_top);
- const field_init = try p.parseFieldInit();
- if (field_init != 0) {
+ const opt_field_init = try p.parseFieldInit();
+ if (opt_field_init) |field_init| {
try p.scratch.append(p.gpa, field_init);
while (true) {
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.comma => p.tok_i += 1,
.r_brace => {
p.tok_i += 1;
@@ -2403,26 +2301,27 @@ fn parseCurlySuffixExpr(p: *Parse) !Node.Index {
const next = try p.expectFieldInit();
try p.scratch.append(p.gpa, next);
}
- const comma = (p.token_tags[p.tok_i - 2] == .comma);
+ const comma = (p.tokenTag(p.tok_i - 2)) == .comma;
const inits = p.scratch.items[scratch_top..];
- switch (inits.len) {
- 0 => unreachable,
- 1 => return p.addNode(.{
+ std.debug.assert(inits.len != 0);
+ if (inits.len <= 1) {
+ return try p.addNode(.{
.tag = if (comma) .struct_init_one_comma else .struct_init_one,
.main_token = lbrace,
- .data = .{
- .lhs = lhs,
- .rhs = inits[0],
- },
- }),
- else => return p.addNode(.{
+ .data = .{ .node_and_opt_node = .{
+ lhs,
+ inits[0].toOptional(),
+ } },
+ });
+ } else {
+ return try p.addNode(.{
.tag = if (comma) .struct_init_comma else .struct_init,
.main_token = lbrace,
- .data = .{
- .lhs = lhs,
- .rhs = try p.addExtra(try p.listToSpan(inits)),
- },
- }),
+ .data = .{ .node_and_extra = .{
+ lhs,
+ try p.addExtra(try p.listToSpan(inits)),
+ } },
+ });
}
}
@@ -2430,7 +2329,7 @@ fn parseCurlySuffixExpr(p: *Parse) !Node.Index {
if (p.eatToken(.r_brace)) |_| break;
const elem_init = try p.expectExpr();
try p.scratch.append(p.gpa, elem_init);
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.comma => p.tok_i += 1,
.r_brace => {
p.tok_i += 1;
@@ -2441,48 +2340,47 @@ fn parseCurlySuffixExpr(p: *Parse) !Node.Index {
else => try p.warn(.expected_comma_after_initializer),
}
}
- const comma = (p.token_tags[p.tok_i - 2] == .comma);
+ const comma = (p.tokenTag(p.tok_i - 2)) == .comma;
const inits = p.scratch.items[scratch_top..];
switch (inits.len) {
- 0 => return p.addNode(.{
+ 0 => return try p.addNode(.{
.tag = .struct_init_one,
.main_token = lbrace,
- .data = .{
- .lhs = lhs,
- .rhs = 0,
- },
+ .data = .{ .node_and_opt_node = .{
+ lhs,
+ .none,
+ } },
}),
- 1 => return p.addNode(.{
+ 1 => return try p.addNode(.{
.tag = if (comma) .array_init_one_comma else .array_init_one,
.main_token = lbrace,
- .data = .{
- .lhs = lhs,
- .rhs = inits[0],
- },
+ .data = .{ .node_and_node = .{
+ lhs,
+ inits[0],
+ } },
}),
- else => return p.addNode(.{
+ else => return try p.addNode(.{
.tag = if (comma) .array_init_comma else .array_init,
.main_token = lbrace,
- .data = .{
- .lhs = lhs,
- .rhs = try p.addExtra(try p.listToSpan(inits)),
- },
+ .data = .{ .node_and_extra = .{
+ lhs,
+ try p.addExtra(try p.listToSpan(inits)),
+ } },
}),
}
}
/// ErrorUnionExpr <- SuffixExpr (EXCLAMATIONMARK TypeExpr)?
-fn parseErrorUnionExpr(p: *Parse) !Node.Index {
- const suffix_expr = try p.parseSuffixExpr();
- if (suffix_expr == 0) return null_node;
+fn parseErrorUnionExpr(p: *Parse) !?Node.Index {
+ const suffix_expr = try p.parseSuffixExpr() orelse return null;
const bang = p.eatToken(.bang) orelse return suffix_expr;
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .error_union,
.main_token = bang,
- .data = .{
- .lhs = suffix_expr,
- .rhs = try p.expectTypeExpr(),
- },
+ .data = .{ .node_and_node = .{
+ suffix_expr,
+ try p.expectTypeExpr(),
+ } },
});
}
@@ -2493,13 +2391,11 @@ fn parseErrorUnionExpr(p: *Parse) !Node.Index {
/// FnCallArguments <- LPAREN ExprList RPAREN
///
/// ExprList <- (Expr COMMA)* Expr?
-fn parseSuffixExpr(p: *Parse) !Node.Index {
+fn parseSuffixExpr(p: *Parse) !?Node.Index {
if (p.eatToken(.keyword_async)) |_| {
var res = try p.expectPrimaryTypeExpr();
while (true) {
- const node = try p.parseSuffixOp(res);
- if (node == 0) break;
- res = node;
+ res = try p.parseSuffixOp(res) orelse break;
}
const lparen = p.eatToken(.l_paren) orelse {
try p.warn(.expected_param_list);
@@ -2511,7 +2407,7 @@ fn parseSuffixExpr(p: *Parse) !Node.Index {
if (p.eatToken(.r_paren)) |_| break;
const param = try p.expectExpr();
try p.scratch.append(p.gpa, param);
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.comma => p.tok_i += 1,
.r_paren => {
p.tok_i += 1;
@@ -2522,41 +2418,33 @@ fn parseSuffixExpr(p: *Parse) !Node.Index {
else => try p.warn(.expected_comma_after_arg),
}
}
- const comma = (p.token_tags[p.tok_i - 2] == .comma);
+ const comma = (p.tokenTag(p.tok_i - 2)) == .comma;
const params = p.scratch.items[scratch_top..];
- switch (params.len) {
- 0 => return p.addNode(.{
+ if (params.len <= 1) {
+ return try p.addNode(.{
.tag = if (comma) .async_call_one_comma else .async_call_one,
.main_token = lparen,
- .data = .{
- .lhs = res,
- .rhs = 0,
- },
- }),
- 1 => return p.addNode(.{
- .tag = if (comma) .async_call_one_comma else .async_call_one,
- .main_token = lparen,
- .data = .{
- .lhs = res,
- .rhs = params[0],
- },
- }),
- else => return p.addNode(.{
+ .data = .{ .node_and_opt_node = .{
+ res,
+ if (params.len >= 1) params[0].toOptional() else .none,
+ } },
+ });
+ } else {
+ return try p.addNode(.{
.tag = if (comma) .async_call_comma else .async_call,
.main_token = lparen,
- .data = .{
- .lhs = res,
- .rhs = try p.addExtra(try p.listToSpan(params)),
- },
- }),
+ .data = .{ .node_and_extra = .{
+ res,
+ try p.addExtra(try p.listToSpan(params)),
+ } },
+ });
}
}
- var res = try p.parsePrimaryTypeExpr();
- if (res == 0) return res;
+ var res = try p.parsePrimaryTypeExpr() orelse return null;
while (true) {
- const suffix_op = try p.parseSuffixOp(res);
- if (suffix_op != 0) {
+ const opt_suffix_op = try p.parseSuffixOp(res);
+ if (opt_suffix_op) |suffix_op| {
res = suffix_op;
continue;
}
@@ -2567,7 +2455,7 @@ fn parseSuffixExpr(p: *Parse) !Node.Index {
if (p.eatToken(.r_paren)) |_| break;
const param = try p.expectExpr();
try p.scratch.append(p.gpa, param);
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.comma => p.tok_i += 1,
.r_paren => {
p.tok_i += 1;
@@ -2578,32 +2466,24 @@ fn parseSuffixExpr(p: *Parse) !Node.Index {
else => try p.warn(.expected_comma_after_arg),
}
}
- const comma = (p.token_tags[p.tok_i - 2] == .comma);
+ const comma = (p.tokenTag(p.tok_i - 2)) == .comma;
const params = p.scratch.items[scratch_top..];
res = switch (params.len) {
- 0 => try p.addNode(.{
+ 0, 1 => try p.addNode(.{
.tag = if (comma) .call_one_comma else .call_one,
.main_token = lparen,
- .data = .{
- .lhs = res,
- .rhs = 0,
- },
- }),
- 1 => try p.addNode(.{
- .tag = if (comma) .call_one_comma else .call_one,
- .main_token = lparen,
- .data = .{
- .lhs = res,
- .rhs = params[0],
- },
+ .data = .{ .node_and_opt_node = .{
+ res,
+ if (params.len >= 1) .fromOptional(params[0]) else .none,
+ } },
}),
else => try p.addNode(.{
.tag = if (comma) .call_comma else .call,
.main_token = lparen,
- .data = .{
- .lhs = res,
- .rhs = try p.addExtra(try p.listToSpan(params)),
- },
+ .data = .{ .node_and_extra = .{
+ res,
+ try p.addExtra(try p.listToSpan(params)),
+ } },
}),
};
}
@@ -2650,153 +2530,126 @@ fn parseSuffixExpr(p: *Parse) !Node.Index {
/// / BlockLabel? SwitchExpr
///
/// LoopTypeExpr <- KEYWORD_inline? (ForTypeExpr / WhileTypeExpr)
-fn parsePrimaryTypeExpr(p: *Parse) !Node.Index {
- switch (p.token_tags[p.tok_i]) {
- .char_literal => return p.addNode(.{
+fn parsePrimaryTypeExpr(p: *Parse) !?Node.Index {
+ switch (p.tokenTag(p.tok_i)) {
+ .char_literal => return try p.addNode(.{
.tag = .char_literal,
.main_token = p.nextToken(),
- .data = .{
- .lhs = undefined,
- .rhs = undefined,
- },
+ .data = undefined,
}),
- .number_literal => return p.addNode(.{
+ .number_literal => return try p.addNode(.{
.tag = .number_literal,
.main_token = p.nextToken(),
- .data = .{
- .lhs = undefined,
- .rhs = undefined,
- },
+ .data = undefined,
}),
- .keyword_unreachable => return p.addNode(.{
+ .keyword_unreachable => return try p.addNode(.{
.tag = .unreachable_literal,
.main_token = p.nextToken(),
- .data = .{
- .lhs = undefined,
- .rhs = undefined,
- },
+ .data = undefined,
}),
- .keyword_anyframe => return p.addNode(.{
+ .keyword_anyframe => return try p.addNode(.{
.tag = .anyframe_literal,
.main_token = p.nextToken(),
- .data = .{
- .lhs = undefined,
- .rhs = undefined,
- },
+ .data = undefined,
}),
.string_literal => {
const main_token = p.nextToken();
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .string_literal,
.main_token = main_token,
- .data = .{
- .lhs = undefined,
- .rhs = undefined,
- },
+ .data = undefined,
});
},
- .builtin => return p.parseBuiltinCall(),
- .keyword_fn => return p.parseFnProto(),
- .keyword_if => return p.parseIf(expectTypeExpr),
- .keyword_switch => return p.expectSwitchExpr(false),
+ .builtin => return try p.parseBuiltinCall(),
+ .keyword_fn => return try p.parseFnProto(),
+ .keyword_if => return try p.parseIf(expectTypeExpr),
+ .keyword_switch => return try p.expectSwitchExpr(false),
.keyword_extern,
.keyword_packed,
=> {
p.tok_i += 1;
- return p.parseContainerDeclAuto();
+ return try p.parseContainerDeclAuto();
},
.keyword_struct,
.keyword_opaque,
.keyword_enum,
.keyword_union,
- => return p.parseContainerDeclAuto(),
+ => return try p.parseContainerDeclAuto(),
- .keyword_comptime => return p.addNode(.{
+ .keyword_comptime => return try p.addNode(.{
.tag = .@"comptime",
.main_token = p.nextToken(),
- .data = .{
- .lhs = try p.expectTypeExpr(),
- .rhs = undefined,
- },
+ .data = .{ .node = try p.expectTypeExpr() },
}),
.multiline_string_literal_line => {
const first_line = p.nextToken();
- while (p.token_tags[p.tok_i] == .multiline_string_literal_line) {
+ while (p.tokenTag(p.tok_i) == .multiline_string_literal_line) {
p.tok_i += 1;
}
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .multiline_string_literal,
.main_token = first_line,
- .data = .{
- .lhs = first_line,
- .rhs = p.tok_i - 1,
- },
+ .data = .{ .token_and_token = .{
+ first_line,
+ p.tok_i - 1,
+ } },
});
},
- .identifier => switch (p.token_tags[p.tok_i + 1]) {
- .colon => switch (p.token_tags[p.tok_i + 2]) {
+ .identifier => switch (p.tokenTag(p.tok_i + 1)) {
+ .colon => switch (p.tokenTag(p.tok_i + 2)) {
.keyword_inline => {
p.tok_i += 3;
- switch (p.token_tags[p.tok_i]) {
- .keyword_for => return p.parseFor(expectTypeExpr),
- .keyword_while => return p.parseWhileTypeExpr(),
+ switch (p.tokenTag(p.tok_i)) {
+ .keyword_for => return try p.parseFor(expectTypeExpr),
+ .keyword_while => return try p.parseWhileTypeExpr(),
else => return p.fail(.expected_inlinable),
}
},
.keyword_for => {
p.tok_i += 2;
- return p.parseFor(expectTypeExpr);
+ return try p.parseFor(expectTypeExpr);
},
.keyword_while => {
p.tok_i += 2;
- return p.parseWhileTypeExpr();
+ return try p.parseWhileTypeExpr();
},
.keyword_switch => {
p.tok_i += 2;
- return p.expectSwitchExpr(true);
+ return try p.expectSwitchExpr(true);
},
.l_brace => {
p.tok_i += 2;
- return p.parseBlock();
+ return try p.parseBlock();
},
- else => return p.addNode(.{
+ else => return try p.addNode(.{
.tag = .identifier,
.main_token = p.nextToken(),
- .data = .{
- .lhs = undefined,
- .rhs = undefined,
- },
+ .data = undefined,
}),
},
- else => return p.addNode(.{
+ else => return try p.addNode(.{
.tag = .identifier,
.main_token = p.nextToken(),
- .data = .{
- .lhs = undefined,
- .rhs = undefined,
- },
+ .data = undefined,
}),
},
.keyword_inline => {
p.tok_i += 1;
- switch (p.token_tags[p.tok_i]) {
- .keyword_for => return p.parseFor(expectTypeExpr),
- .keyword_while => return p.parseWhileTypeExpr(),
+ switch (p.tokenTag(p.tok_i)) {
+ .keyword_for => return try p.parseFor(expectTypeExpr),
+ .keyword_while => return try p.parseWhileTypeExpr(),
else => return p.fail(.expected_inlinable),
}
},
- .keyword_for => return p.parseFor(expectTypeExpr),
- .keyword_while => return p.parseWhileTypeExpr(),
- .period => switch (p.token_tags[p.tok_i + 1]) {
- .identifier => return p.addNode(.{
+ .keyword_for => return try p.parseFor(expectTypeExpr),
+ .keyword_while => return try p.parseWhileTypeExpr(),
+ .period => switch (p.tokenTag(p.tok_i + 1)) {
+ .identifier => return try p.addNode(.{
.tag = .enum_literal,
- .data = .{
- .lhs = p.nextToken(), // dot
- .rhs = undefined,
- },
+ .data = .{ .token = p.nextToken() }, // dot
.main_token = p.nextToken(), // identifier
}),
.l_brace => {
@@ -2808,11 +2661,11 @@ fn parsePrimaryTypeExpr(p: *Parse) !Node.Index {
const scratch_top = p.scratch.items.len;
defer p.scratch.shrinkRetainingCapacity(scratch_top);
- const field_init = try p.parseFieldInit();
- if (field_init != 0) {
+ const opt_field_init = try p.parseFieldInit();
+ if (opt_field_init) |field_init| {
try p.scratch.append(p.gpa, field_init);
while (true) {
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.comma => p.tok_i += 1,
.r_brace => {
p.tok_i += 1;
@@ -2826,37 +2679,24 @@ fn parsePrimaryTypeExpr(p: *Parse) !Node.Index {
const next = try p.expectFieldInit();
try p.scratch.append(p.gpa, next);
}
- const comma = (p.token_tags[p.tok_i - 2] == .comma);
+ const comma = (p.tokenTag(p.tok_i - 2)) == .comma;
const inits = p.scratch.items[scratch_top..];
- switch (inits.len) {
- 0 => unreachable,
- 1 => return p.addNode(.{
+ std.debug.assert(inits.len != 0);
+ if (inits.len <= 2) {
+ return try p.addNode(.{
.tag = if (comma) .struct_init_dot_two_comma else .struct_init_dot_two,
.main_token = lbrace,
- .data = .{
- .lhs = inits[0],
- .rhs = 0,
- },
- }),
- 2 => return p.addNode(.{
- .tag = if (comma) .struct_init_dot_two_comma else .struct_init_dot_two,
+ .data = .{ .opt_node_and_opt_node = .{
+ if (inits.len >= 1) .fromOptional(inits[0]) else .none,
+ if (inits.len >= 2) .fromOptional(inits[1]) else .none,
+ } },
+ });
+ } else {
+ return try p.addNode(.{
+ .tag = if (comma) .struct_init_dot_comma else .struct_init_dot,
.main_token = lbrace,
- .data = .{
- .lhs = inits[0],
- .rhs = inits[1],
- },
- }),
- else => {
- const span = try p.listToSpan(inits);
- return p.addNode(.{
- .tag = if (comma) .struct_init_dot_comma else .struct_init_dot,
- .main_token = lbrace,
- .data = .{
- .lhs = span.start,
- .rhs = span.end,
- },
- });
- },
+ .data = .{ .extra_range = try p.listToSpan(inits) },
+ });
}
}
@@ -2864,7 +2704,7 @@ fn parsePrimaryTypeExpr(p: *Parse) !Node.Index {
if (p.eatToken(.r_brace)) |_| break;
const elem_init = try p.expectExpr();
try p.scratch.append(p.gpa, elem_init);
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.comma => p.tok_i += 1,
.r_brace => {
p.tok_i += 1;
@@ -2875,49 +2715,30 @@ fn parsePrimaryTypeExpr(p: *Parse) !Node.Index {
else => try p.warn(.expected_comma_after_initializer),
}
}
- const comma = (p.token_tags[p.tok_i - 2] == .comma);
+ const comma = (p.tokenTag(p.tok_i - 2)) == .comma;
const inits = p.scratch.items[scratch_top..];
- switch (inits.len) {
- 0 => return p.addNode(.{
- .tag = .struct_init_dot_two,
- .main_token = lbrace,
- .data = .{
- .lhs = 0,
- .rhs = 0,
- },
- }),
- 1 => return p.addNode(.{
- .tag = if (comma) .array_init_dot_two_comma else .array_init_dot_two,
+ if (inits.len <= 2) {
+ return try p.addNode(.{
+ .tag = if (inits.len == 0)
+ .struct_init_dot_two
+ else if (comma) .array_init_dot_two_comma else .array_init_dot_two,
.main_token = lbrace,
- .data = .{
- .lhs = inits[0],
- .rhs = 0,
- },
- }),
- 2 => return p.addNode(.{
- .tag = if (comma) .array_init_dot_two_comma else .array_init_dot_two,
+ .data = .{ .opt_node_and_opt_node = .{
+ if (inits.len >= 1) inits[0].toOptional() else .none,
+ if (inits.len >= 2) inits[1].toOptional() else .none,
+ } },
+ });
+ } else {
+ return try p.addNode(.{
+ .tag = if (comma) .array_init_dot_comma else .array_init_dot,
.main_token = lbrace,
- .data = .{
- .lhs = inits[0],
- .rhs = inits[1],
- },
- }),
- else => {
- const span = try p.listToSpan(inits);
- return p.addNode(.{
- .tag = if (comma) .array_init_dot_comma else .array_init_dot,
- .main_token = lbrace,
- .data = .{
- .lhs = span.start,
- .rhs = span.end,
- },
- });
- },
+ .data = .{ .extra_range = try p.listToSpan(inits) },
+ });
}
},
- else => return null_node,
+ else => return null,
},
- .keyword_error => switch (p.token_tags[p.tok_i + 1]) {
+ .keyword_error => switch (p.tokenTag(p.tok_i + 1)) {
.l_brace => {
const error_token = p.tok_i;
p.tok_i += 2;
@@ -2925,7 +2746,7 @@ fn parsePrimaryTypeExpr(p: *Parse) !Node.Index {
if (p.eatToken(.r_brace)) |_| break;
_ = try p.eatDocComments();
_ = try p.expectToken(.identifier);
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.comma => p.tok_i += 1,
.r_brace => {
p.tok_i += 1;
@@ -2936,13 +2757,10 @@ fn parsePrimaryTypeExpr(p: *Parse) !Node.Index {
else => try p.warn(.expected_comma_after_field),
}
}
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .error_set_decl,
.main_token = error_token,
- .data = .{
- .lhs = undefined,
- .rhs = p.tok_i - 1, // rbrace
- },
+ .data = .{ .token = p.tok_i - 1 }, // rbrace
});
},
else => {
@@ -2951,41 +2769,37 @@ fn parsePrimaryTypeExpr(p: *Parse) !Node.Index {
if (period == null) try p.warnExpected(.period);
const identifier = p.eatToken(.identifier);
if (identifier == null) try p.warnExpected(.identifier);
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .error_value,
.main_token = main_token,
- .data = .{
- .lhs = period orelse 0,
- .rhs = identifier orelse 0,
- },
+ .data = .{ .opt_token_and_opt_token = .{
+ .fromOptional(period),
+ .fromOptional(identifier),
+ } },
});
},
},
- .l_paren => return p.addNode(.{
+ .l_paren => return try p.addNode(.{
.tag = .grouped_expression,
.main_token = p.nextToken(),
- .data = .{
- .lhs = try p.expectExpr(),
- .rhs = try p.expectToken(.r_paren),
- },
+ .data = .{ .node_and_token = .{
+ try p.expectExpr(),
+ try p.expectToken(.r_paren),
+ } },
}),
- else => return null_node,
+ else => return null,
}
}
fn expectPrimaryTypeExpr(p: *Parse) !Node.Index {
- const node = try p.parsePrimaryTypeExpr();
- if (node == 0) {
- return p.fail(.expected_primary_type_expr);
- }
- return node;
+ return try p.parsePrimaryTypeExpr() orelse return p.fail(.expected_primary_type_expr);
}
/// WhilePrefix <- KEYWORD_while LPAREN Expr RPAREN PtrPayload? WhileContinueExpr?
///
/// WhileTypeExpr <- WhilePrefix TypeExpr (KEYWORD_else Payload? TypeExpr)?
-fn parseWhileTypeExpr(p: *Parse) !Node.Index {
- const while_token = p.eatToken(.keyword_while) orelse return null_node;
+fn parseWhileTypeExpr(p: *Parse) !?Node.Index {
+ const while_token = p.eatToken(.keyword_while) orelse return null;
_ = try p.expectToken(.l_paren);
const condition = try p.expectExpr();
_ = try p.expectToken(.r_paren);
@@ -2994,54 +2808,52 @@ fn parseWhileTypeExpr(p: *Parse) !Node.Index {
const then_expr = try p.expectTypeExpr();
_ = p.eatToken(.keyword_else) orelse {
- if (cont_expr == 0) {
- return p.addNode(.{
+ if (cont_expr == null) {
+ return try p.addNode(.{
.tag = .while_simple,
.main_token = while_token,
- .data = .{
- .lhs = condition,
- .rhs = then_expr,
- },
+ .data = .{ .node_and_node = .{
+ condition,
+ then_expr,
+ } },
});
} else {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .while_cont,
.main_token = while_token,
- .data = .{
- .lhs = condition,
- .rhs = try p.addExtra(Node.WhileCont{
- .cont_expr = cont_expr,
+ .data = .{ .node_and_extra = .{
+ condition, try p.addExtra(Node.WhileCont{
+ .cont_expr = cont_expr.?,
.then_expr = then_expr,
}),
- },
+ } },
});
}
};
_ = try p.parsePayload();
const else_expr = try p.expectTypeExpr();
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .@"while",
.main_token = while_token,
- .data = .{
- .lhs = condition,
- .rhs = try p.addExtra(Node.While{
- .cont_expr = cont_expr,
+ .data = .{ .node_and_extra = .{
+ condition, try p.addExtra(Node.While{
+ .cont_expr = .fromOptional(cont_expr),
.then_expr = then_expr,
.else_expr = else_expr,
}),
- },
+ } },
});
}
/// SwitchExpr <- KEYWORD_switch LPAREN Expr RPAREN LBRACE SwitchProngList RBRACE
-fn parseSwitchExpr(p: *Parse, is_labeled: bool) !Node.Index {
- const switch_token = p.eatToken(.keyword_switch) orelse return null_node;
- return p.expectSwitchSuffix(if (is_labeled) switch_token - 2 else switch_token);
+fn parseSwitchExpr(p: *Parse, is_labeled: bool) !?Node.Index {
+ const switch_token = p.eatToken(.keyword_switch) orelse return null;
+ return try p.expectSwitchSuffix(if (is_labeled) switch_token - 2 else switch_token);
}
fn expectSwitchExpr(p: *Parse, is_labeled: bool) !Node.Index {
const switch_token = p.assertToken(.keyword_switch);
- return p.expectSwitchSuffix(if (is_labeled) switch_token - 2 else switch_token);
+ return try p.expectSwitchSuffix(if (is_labeled) switch_token - 2 else switch_token);
}
fn expectSwitchSuffix(p: *Parse, main_token: TokenIndex) !Node.Index {
@@ -3050,19 +2862,19 @@ fn expectSwitchSuffix(p: *Parse, main_token: TokenIndex) !Node.Index {
_ = try p.expectToken(.r_paren);
_ = try p.expectToken(.l_brace);
const cases = try p.parseSwitchProngList();
- const trailing_comma = p.token_tags[p.tok_i - 1] == .comma;
+ const trailing_comma = p.tokenTag(p.tok_i - 1) == .comma;
_ = try p.expectToken(.r_brace);
return p.addNode(.{
.tag = if (trailing_comma) .switch_comma else .@"switch",
.main_token = main_token,
- .data = .{
- .lhs = expr_node,
- .rhs = try p.addExtra(Node.SubRange{
+ .data = .{ .node_and_extra = .{
+ expr_node,
+ try p.addExtra(Node.SubRange{
.start = cases.start,
.end = cases.end,
}),
- },
+ } },
});
}
@@ -3089,10 +2901,10 @@ fn expectAsmExpr(p: *Parse) !Node.Index {
return p.addNode(.{
.tag = .asm_simple,
.main_token = asm_token,
- .data = .{
- .lhs = template,
- .rhs = rparen,
- },
+ .data = .{ .node_and_token = .{
+ template,
+ rparen,
+ } },
});
}
@@ -3102,10 +2914,9 @@ fn expectAsmExpr(p: *Parse) !Node.Index {
defer p.scratch.shrinkRetainingCapacity(scratch_top);
while (true) {
- const output_item = try p.parseAsmOutputItem();
- if (output_item == 0) break;
+ const output_item = try p.parseAsmOutputItem() orelse break;
try p.scratch.append(p.gpa, output_item);
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.comma => p.tok_i += 1,
// All possible delimiters.
.colon, .r_paren, .r_brace, .r_bracket => break,
@@ -3115,10 +2926,9 @@ fn expectAsmExpr(p: *Parse) !Node.Index {
}
if (p.eatToken(.colon)) |_| {
while (true) {
- const input_item = try p.parseAsmInputItem();
- if (input_item == 0) break;
+ const input_item = try p.parseAsmInputItem() orelse break;
try p.scratch.append(p.gpa, input_item);
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.comma => p.tok_i += 1,
// All possible delimiters.
.colon, .r_paren, .r_brace, .r_bracket => break,
@@ -3128,7 +2938,7 @@ fn expectAsmExpr(p: *Parse) !Node.Index {
}
if (p.eatToken(.colon)) |_| {
while (p.eatToken(.string_literal)) |_| {
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.comma => p.tok_i += 1,
.colon, .r_paren, .r_brace, .r_bracket => break,
// Likely just a missing comma; give error but continue parsing.
@@ -3142,121 +2952,106 @@ fn expectAsmExpr(p: *Parse) !Node.Index {
return p.addNode(.{
.tag = .@"asm",
.main_token = asm_token,
- .data = .{
- .lhs = template,
- .rhs = try p.addExtra(Node.Asm{
+ .data = .{ .node_and_extra = .{
+ template,
+ try p.addExtra(Node.Asm{
.items_start = span.start,
.items_end = span.end,
.rparen = rparen,
}),
- },
+ } },
});
}
/// AsmOutputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN (MINUSRARROW TypeExpr / IDENTIFIER) RPAREN
-fn parseAsmOutputItem(p: *Parse) !Node.Index {
- _ = p.eatToken(.l_bracket) orelse return null_node;
+fn parseAsmOutputItem(p: *Parse) !?Node.Index {
+ _ = p.eatToken(.l_bracket) orelse return null;
const identifier = try p.expectToken(.identifier);
_ = try p.expectToken(.r_bracket);
_ = try p.expectToken(.string_literal);
_ = try p.expectToken(.l_paren);
- const type_expr: Node.Index = blk: {
+ const type_expr: Node.OptionalIndex = blk: {
if (p.eatToken(.arrow)) |_| {
- break :blk try p.expectTypeExpr();
+ break :blk .fromOptional(try p.expectTypeExpr());
} else {
_ = try p.expectToken(.identifier);
- break :blk null_node;
+ break :blk .none;
}
};
const rparen = try p.expectToken(.r_paren);
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .asm_output,
.main_token = identifier,
- .data = .{
- .lhs = type_expr,
- .rhs = rparen,
- },
+ .data = .{ .opt_node_and_token = .{
+ type_expr,
+ rparen,
+ } },
});
}
/// AsmInputItem <- LBRACKET IDENTIFIER RBRACKET STRINGLITERAL LPAREN Expr RPAREN
-fn parseAsmInputItem(p: *Parse) !Node.Index {
- _ = p.eatToken(.l_bracket) orelse return null_node;
+fn parseAsmInputItem(p: *Parse) !?Node.Index {
+ _ = p.eatToken(.l_bracket) orelse return null;
const identifier = try p.expectToken(.identifier);
_ = try p.expectToken(.r_bracket);
_ = try p.expectToken(.string_literal);
_ = try p.expectToken(.l_paren);
const expr = try p.expectExpr();
const rparen = try p.expectToken(.r_paren);
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .asm_input,
.main_token = identifier,
- .data = .{
- .lhs = expr,
- .rhs = rparen,
- },
+ .data = .{ .node_and_token = .{
+ expr,
+ rparen,
+ } },
});
}
/// BreakLabel <- COLON IDENTIFIER
-fn parseBreakLabel(p: *Parse) !TokenIndex {
- _ = p.eatToken(.colon) orelse return null_node;
- return p.expectToken(.identifier);
+fn parseBreakLabel(p: *Parse) Error!OptionalTokenIndex {
+ _ = p.eatToken(.colon) orelse return .none;
+ const next_token = try p.expectToken(.identifier);
+ return .fromToken(next_token);
}
/// BlockLabel <- IDENTIFIER COLON
-fn parseBlockLabel(p: *Parse) TokenIndex {
- if (p.token_tags[p.tok_i] == .identifier and
- p.token_tags[p.tok_i + 1] == .colon)
- {
- const identifier = p.tok_i;
- p.tok_i += 2;
- return identifier;
- }
- return null_node;
+fn parseBlockLabel(p: *Parse) ?TokenIndex {
+ return p.eatTokens(&.{ .identifier, .colon });
}
/// FieldInit <- DOT IDENTIFIER EQUAL Expr
-fn parseFieldInit(p: *Parse) !Node.Index {
- if (p.token_tags[p.tok_i + 0] == .period and
- p.token_tags[p.tok_i + 1] == .identifier and
- p.token_tags[p.tok_i + 2] == .equal)
- {
- p.tok_i += 3;
- return p.expectExpr();
- } else {
- return null_node;
+fn parseFieldInit(p: *Parse) !?Node.Index {
+ if (p.eatTokens(&.{ .period, .identifier, .equal })) |_| {
+ return try p.expectExpr();
}
+ return null;
}
fn expectFieldInit(p: *Parse) !Node.Index {
- if (p.token_tags[p.tok_i] != .period or
- p.token_tags[p.tok_i + 1] != .identifier or
- p.token_tags[p.tok_i + 2] != .equal)
- return p.fail(.expected_initializer);
-
- p.tok_i += 3;
- return p.expectExpr();
+ if (p.eatTokens(&.{ .period, .identifier, .equal })) |_| {
+ return try p.expectExpr();
+ }
+ return p.fail(.expected_initializer);
}
/// WhileContinueExpr <- COLON LPAREN AssignExpr RPAREN
-fn parseWhileContinueExpr(p: *Parse) !Node.Index {
+fn parseWhileContinueExpr(p: *Parse) !?Node.Index {
_ = p.eatToken(.colon) orelse {
- if (p.token_tags[p.tok_i] == .l_paren and
+ if (p.tokenTag(p.tok_i) == .l_paren and
p.tokensOnSameLine(p.tok_i - 1, p.tok_i))
return p.fail(.expected_continue_expr);
- return null_node;
+ return null;
};
_ = try p.expectToken(.l_paren);
- const node = try p.parseAssignExpr();
- if (node == 0) return p.fail(.expected_expr_or_assignment);
+ const node = try p.parseAssignExpr() orelse return p.fail(.expected_expr_or_assignment);
_ = try p.expectToken(.r_paren);
return node;
}
/// LinkSection <- KEYWORD_linksection LPAREN Expr RPAREN
-fn parseLinkSection(p: *Parse) !Node.Index {
- _ = p.eatToken(.keyword_linksection) orelse return null_node;
+fn parseLinkSection(p: *Parse) !?Node.Index {
+ _ = p.eatToken(.keyword_linksection) orelse return null;
_ = try p.expectToken(.l_paren);
const expr_node = try p.expectExpr();
_ = try p.expectToken(.r_paren);
@@ -3264,8 +3059,8 @@ fn parseLinkSection(p: *Parse) !Node.Index {
}
/// CallConv <- KEYWORD_callconv LPAREN Expr RPAREN
-fn parseCallconv(p: *Parse) !Node.Index {
- _ = p.eatToken(.keyword_callconv) orelse return null_node;
+fn parseCallconv(p: *Parse) !?Node.Index {
+ _ = p.eatToken(.keyword_callconv) orelse return null;
_ = try p.expectToken(.l_paren);
const expr_node = try p.expectExpr();
_ = try p.expectToken(.r_paren);
@@ -3273,8 +3068,8 @@ fn parseCallconv(p: *Parse) !Node.Index {
}
/// AddrSpace <- KEYWORD_addrspace LPAREN Expr RPAREN
-fn parseAddrSpace(p: *Parse) !Node.Index {
- _ = p.eatToken(.keyword_addrspace) orelse return null_node;
+fn parseAddrSpace(p: *Parse) !?Node.Index {
+ _ = p.eatToken(.keyword_addrspace) orelse return null;
_ = try p.expectToken(.l_paren);
const expr_node = try p.expectExpr();
_ = try p.expectToken(.r_paren);
@@ -3292,59 +3087,53 @@ fn parseAddrSpace(p: *Parse) !Node.Index {
/// ParamType
/// <- KEYWORD_anytype
/// / TypeExpr
-fn expectParamDecl(p: *Parse) !Node.Index {
+fn expectParamDecl(p: *Parse) !?Node.Index {
_ = try p.eatDocComments();
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.keyword_noalias, .keyword_comptime => p.tok_i += 1,
.ellipsis3 => {
p.tok_i += 1;
- return null_node;
+ return null;
},
else => {},
}
- if (p.token_tags[p.tok_i] == .identifier and
- p.token_tags[p.tok_i + 1] == .colon)
- {
- p.tok_i += 2;
- }
- switch (p.token_tags[p.tok_i]) {
- .keyword_anytype => {
- p.tok_i += 1;
- return null_node;
- },
- else => return p.expectTypeExpr(),
+ _ = p.eatTokens(&.{ .identifier, .colon });
+ if (p.eatToken(.keyword_anytype)) |_| {
+ return null;
+ } else {
+ return try p.expectTypeExpr();
}
}
/// Payload <- PIPE IDENTIFIER PIPE
-fn parsePayload(p: *Parse) !TokenIndex {
- _ = p.eatToken(.pipe) orelse return null_node;
+fn parsePayload(p: *Parse) Error!OptionalTokenIndex {
+ _ = p.eatToken(.pipe) orelse return .none;
const identifier = try p.expectToken(.identifier);
_ = try p.expectToken(.pipe);
- return identifier;
+ return .fromToken(identifier);
}
/// PtrPayload <- PIPE ASTERISK? IDENTIFIER PIPE
-fn parsePtrPayload(p: *Parse) !TokenIndex {
- _ = p.eatToken(.pipe) orelse return null_node;
+fn parsePtrPayload(p: *Parse) Error!OptionalTokenIndex {
+ _ = p.eatToken(.pipe) orelse return .none;
_ = p.eatToken(.asterisk);
const identifier = try p.expectToken(.identifier);
_ = try p.expectToken(.pipe);
- return identifier;
+ return .fromToken(identifier);
}
/// Returns the first identifier token, if any.
///
/// PtrIndexPayload <- PIPE ASTERISK? IDENTIFIER (COMMA IDENTIFIER)? PIPE
-fn parsePtrIndexPayload(p: *Parse) !TokenIndex {
- _ = p.eatToken(.pipe) orelse return null_node;
+fn parsePtrIndexPayload(p: *Parse) Error!OptionalTokenIndex {
+ _ = p.eatToken(.pipe) orelse return .none;
_ = p.eatToken(.asterisk);
const identifier = try p.expectToken(.identifier);
if (p.eatToken(.comma) != null) {
_ = try p.expectToken(.identifier);
}
_ = try p.expectToken(.pipe);
- return identifier;
+ return .fromToken(identifier);
}
/// SwitchProng <- KEYWORD_inline? SwitchCase EQUALRARROW PtrIndexPayload? AssignExpr
@@ -3352,7 +3141,7 @@ fn parsePtrIndexPayload(p: *Parse) !TokenIndex {
/// SwitchCase
/// <- SwitchItem (COMMA SwitchItem)* COMMA?
/// / KEYWORD_else
-fn parseSwitchProng(p: *Parse) !Node.Index {
+fn parseSwitchProng(p: *Parse) !?Node.Index {
const scratch_top = p.scratch.items.len;
defer p.scratch.shrinkRetainingCapacity(scratch_top);
@@ -3360,97 +3149,92 @@ fn parseSwitchProng(p: *Parse) !Node.Index {
if (p.eatToken(.keyword_else) == null) {
while (true) {
- const item = try p.parseSwitchItem();
- if (item == 0) break;
+ const item = try p.parseSwitchItem() orelse break;
try p.scratch.append(p.gpa, item);
if (p.eatToken(.comma) == null) break;
}
if (scratch_top == p.scratch.items.len) {
if (is_inline) p.tok_i -= 1;
- return null_node;
+ return null;
}
}
const arrow_token = try p.expectToken(.equal_angle_bracket_right);
_ = try p.parsePtrIndexPayload();
const items = p.scratch.items[scratch_top..];
- switch (items.len) {
- 0 => return p.addNode(.{
+ if (items.len <= 1) {
+ return try p.addNode(.{
.tag = if (is_inline) .switch_case_inline_one else .switch_case_one,
.main_token = arrow_token,
- .data = .{
- .lhs = 0,
- .rhs = try p.expectSingleAssignExpr(),
- },
- }),
- 1 => return p.addNode(.{
- .tag = if (is_inline) .switch_case_inline_one else .switch_case_one,
- .main_token = arrow_token,
- .data = .{
- .lhs = items[0],
- .rhs = try p.expectSingleAssignExpr(),
- },
- }),
- else => return p.addNode(.{
+ .data = .{ .opt_node_and_node = .{
+ if (items.len >= 1) items[0].toOptional() else .none,
+ try p.expectSingleAssignExpr(),
+ } },
+ });
+ } else {
+ return try p.addNode(.{
.tag = if (is_inline) .switch_case_inline else .switch_case,
.main_token = arrow_token,
- .data = .{
- .lhs = try p.addExtra(try p.listToSpan(items)),
- .rhs = try p.expectSingleAssignExpr(),
- },
- }),
+ .data = .{ .extra_and_node = .{
+ try p.addExtra(try p.listToSpan(items)),
+ try p.expectSingleAssignExpr(),
+ } },
+ });
}
}
/// SwitchItem <- Expr (DOT3 Expr)?
-fn parseSwitchItem(p: *Parse) !Node.Index {
- const expr = try p.parseExpr();
- if (expr == 0) return null_node;
+fn parseSwitchItem(p: *Parse) !?Node.Index {
+ const expr = try p.parseExpr() orelse return null;
if (p.eatToken(.ellipsis3)) |token| {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .switch_range,
.main_token = token,
- .data = .{
- .lhs = expr,
- .rhs = try p.expectExpr(),
- },
+ .data = .{ .node_and_node = .{
+ expr,
+ try p.expectExpr(),
+ } },
});
}
return expr;
}
+/// The following invariant will hold:
+/// - `(bit_range_start == .none) == (bit_range_end == .none)`
+/// - `bit_range_start != .none` implies `align_node != .none`
+/// - `bit_range_end != .none` implies `align_node != .none`
const PtrModifiers = struct {
- align_node: Node.Index,
- addrspace_node: Node.Index,
- bit_range_start: Node.Index,
- bit_range_end: Node.Index,
+ align_node: Node.OptionalIndex,
+ addrspace_node: Node.OptionalIndex,
+ bit_range_start: Node.OptionalIndex,
+ bit_range_end: Node.OptionalIndex,
};
fn parsePtrModifiers(p: *Parse) !PtrModifiers {
var result: PtrModifiers = .{
- .align_node = 0,
- .addrspace_node = 0,
- .bit_range_start = 0,
- .bit_range_end = 0,
+ .align_node = .none,
+ .addrspace_node = .none,
+ .bit_range_start = .none,
+ .bit_range_end = .none,
};
var saw_const = false;
var saw_volatile = false;
var saw_allowzero = false;
while (true) {
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.keyword_align => {
- if (result.align_node != 0) {
+ if (result.align_node != .none) {
try p.warn(.extra_align_qualifier);
}
p.tok_i += 1;
_ = try p.expectToken(.l_paren);
- result.align_node = try p.expectExpr();
+ result.align_node = (try p.expectExpr()).toOptional();
if (p.eatToken(.colon)) |_| {
- result.bit_range_start = try p.expectExpr();
+ result.bit_range_start = (try p.expectExpr()).toOptional();
_ = try p.expectToken(.colon);
- result.bit_range_end = try p.expectExpr();
+ result.bit_range_end = (try p.expectExpr()).toOptional();
}
_ = try p.expectToken(.r_paren);
@@ -3477,10 +3261,10 @@ fn parsePtrModifiers(p: *Parse) !PtrModifiers {
saw_allowzero = true;
},
.keyword_addrspace => {
- if (result.addrspace_node != 0) {
+ if (result.addrspace_node != .none) {
try p.warn(.extra_addrspace_qualifier);
}
- result.addrspace_node = try p.parseAddrSpace();
+ result.addrspace_node = .fromOptional(try p.parseAddrSpace());
},
else => return result,
}
@@ -3492,110 +3276,102 @@ fn parsePtrModifiers(p: *Parse) !PtrModifiers {
/// / DOT IDENTIFIER
/// / DOTASTERISK
/// / DOTQUESTIONMARK
-fn parseSuffixOp(p: *Parse, lhs: Node.Index) !Node.Index {
- switch (p.token_tags[p.tok_i]) {
+fn parseSuffixOp(p: *Parse, lhs: Node.Index) !?Node.Index {
+ switch (p.tokenTag(p.tok_i)) {
.l_bracket => {
const lbracket = p.nextToken();
const index_expr = try p.expectExpr();
if (p.eatToken(.ellipsis2)) |_| {
- const end_expr = try p.parseExpr();
+ const opt_end_expr = try p.parseExpr();
if (p.eatToken(.colon)) |_| {
const sentinel = try p.expectExpr();
_ = try p.expectToken(.r_bracket);
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .slice_sentinel,
.main_token = lbracket,
- .data = .{
- .lhs = lhs,
- .rhs = try p.addExtra(Node.SliceSentinel{
+ .data = .{ .node_and_extra = .{
+ lhs, try p.addExtra(Node.SliceSentinel{
.start = index_expr,
- .end = end_expr,
+ .end = .fromOptional(opt_end_expr),
.sentinel = sentinel,
}),
- },
+ } },
});
}
_ = try p.expectToken(.r_bracket);
- if (end_expr == 0) {
- return p.addNode(.{
+ const end_expr = opt_end_expr orelse {
+ return try p.addNode(.{
.tag = .slice_open,
.main_token = lbracket,
- .data = .{
- .lhs = lhs,
- .rhs = index_expr,
- },
+ .data = .{ .node_and_node = .{
+ lhs,
+ index_expr,
+ } },
});
- }
- return p.addNode(.{
+ };
+ return try p.addNode(.{
.tag = .slice,
.main_token = lbracket,
- .data = .{
- .lhs = lhs,
- .rhs = try p.addExtra(Node.Slice{
+ .data = .{ .node_and_extra = .{
+ lhs, try p.addExtra(Node.Slice{
.start = index_expr,
.end = end_expr,
}),
- },
+ } },
});
}
_ = try p.expectToken(.r_bracket);
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .array_access,
.main_token = lbracket,
- .data = .{
- .lhs = lhs,
- .rhs = index_expr,
- },
+ .data = .{ .node_and_node = .{
+ lhs,
+ index_expr,
+ } },
});
},
- .period_asterisk => return p.addNode(.{
+ .period_asterisk => return try p.addNode(.{
.tag = .deref,
.main_token = p.nextToken(),
- .data = .{
- .lhs = lhs,
- .rhs = undefined,
- },
+ .data = .{ .node = lhs },
}),
.invalid_periodasterisks => {
try p.warn(.asterisk_after_ptr_deref);
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .deref,
.main_token = p.nextToken(),
- .data = .{
- .lhs = lhs,
- .rhs = undefined,
- },
+ .data = .{ .node = lhs },
});
},
- .period => switch (p.token_tags[p.tok_i + 1]) {
- .identifier => return p.addNode(.{
+ .period => switch (p.tokenTag(p.tok_i + 1)) {
+ .identifier => return try p.addNode(.{
.tag = .field_access,
.main_token = p.nextToken(),
- .data = .{
- .lhs = lhs,
- .rhs = p.nextToken(),
- },
+ .data = .{ .node_and_token = .{
+ lhs,
+ p.nextToken(),
+ } },
}),
- .question_mark => return p.addNode(.{
+ .question_mark => return try p.addNode(.{
.tag = .unwrap_optional,
.main_token = p.nextToken(),
- .data = .{
- .lhs = lhs,
- .rhs = p.nextToken(),
- },
+ .data = .{ .node_and_token = .{
+ lhs,
+ p.nextToken(),
+ } },
}),
.l_brace => {
// this a misplaced `.{`, handle the error somewhere else
- return null_node;
+ return null;
},
else => {
p.tok_i += 1;
try p.warn(.expected_suffix_op);
- return null_node;
+ return null;
},
},
- else => return null_node,
+ else => return null,
}
}
@@ -3608,17 +3384,17 @@ fn parseSuffixOp(p: *Parse, lhs: Node.Index) !Node.Index {
/// / KEYWORD_opaque
/// / KEYWORD_enum (LPAREN Expr RPAREN)?
/// / KEYWORD_union (LPAREN (KEYWORD_enum (LPAREN Expr RPAREN)? / Expr) RPAREN)?
-fn parseContainerDeclAuto(p: *Parse) !Node.Index {
+fn parseContainerDeclAuto(p: *Parse) !?Node.Index {
const main_token = p.nextToken();
- const arg_expr = switch (p.token_tags[main_token]) {
- .keyword_opaque => null_node,
+ const arg_expr = switch (p.tokenTag(main_token)) {
+ .keyword_opaque => null,
.keyword_struct, .keyword_enum => blk: {
if (p.eatToken(.l_paren)) |_| {
const expr = try p.expectExpr();
_ = try p.expectToken(.r_paren);
break :blk expr;
} else {
- break :blk null_node;
+ break :blk null;
}
},
.keyword_union => blk: {
@@ -3633,16 +3409,16 @@ fn parseContainerDeclAuto(p: *Parse) !Node.Index {
const members = try p.parseContainerMembers();
const members_span = try members.toSpan(p);
_ = try p.expectToken(.r_brace);
- return p.addNode(.{
+ return try p.addNode(.{
.tag = switch (members.trailing) {
true => .tagged_union_enum_tag_trailing,
false => .tagged_union_enum_tag,
},
.main_token = main_token,
- .data = .{
- .lhs = enum_tag_expr,
- .rhs = try p.addExtra(members_span),
- },
+ .data = .{ .node_and_extra = .{
+ enum_tag_expr,
+ try p.addExtra(members_span),
+ } },
});
} else {
_ = try p.expectToken(.r_paren);
@@ -3651,29 +3427,23 @@ fn parseContainerDeclAuto(p: *Parse) !Node.Index {
const members = try p.parseContainerMembers();
_ = try p.expectToken(.r_brace);
if (members.len <= 2) {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = switch (members.trailing) {
true => .tagged_union_two_trailing,
false => .tagged_union_two,
},
.main_token = main_token,
- .data = .{
- .lhs = members.lhs,
- .rhs = members.rhs,
- },
+ .data = members.data,
});
} else {
const span = try members.toSpan(p);
- return p.addNode(.{
+ return try p.addNode(.{
.tag = switch (members.trailing) {
true => .tagged_union_trailing,
false => .tagged_union,
},
.main_token = main_token,
- .data = .{
- .lhs = span.start,
- .rhs = span.end,
- },
+ .data = .{ .extra_range = span },
});
}
}
@@ -3683,7 +3453,7 @@ fn parseContainerDeclAuto(p: *Parse) !Node.Index {
break :blk expr;
}
} else {
- break :blk null_node;
+ break :blk null;
}
},
else => {
@@ -3694,48 +3464,42 @@ fn parseContainerDeclAuto(p: *Parse) !Node.Index {
_ = try p.expectToken(.l_brace);
const members = try p.parseContainerMembers();
_ = try p.expectToken(.r_brace);
- if (arg_expr == 0) {
+ if (arg_expr == null) {
if (members.len <= 2) {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = switch (members.trailing) {
true => .container_decl_two_trailing,
false => .container_decl_two,
},
.main_token = main_token,
- .data = .{
- .lhs = members.lhs,
- .rhs = members.rhs,
- },
+ .data = members.data,
});
} else {
const span = try members.toSpan(p);
- return p.addNode(.{
+ return try p.addNode(.{
.tag = switch (members.trailing) {
true => .container_decl_trailing,
false => .container_decl,
},
.main_token = main_token,
- .data = .{
- .lhs = span.start,
- .rhs = span.end,
- },
+ .data = .{ .extra_range = span },
});
}
} else {
const span = try members.toSpan(p);
- return p.addNode(.{
+ return try p.addNode(.{
.tag = switch (members.trailing) {
true => .container_decl_arg_trailing,
false => .container_decl_arg,
},
.main_token = main_token,
- .data = .{
- .lhs = arg_expr,
- .rhs = try p.addExtra(Node.SubRange{
+ .data = .{ .node_and_extra = .{
+ arg_expr.?,
+ try p.addExtra(Node.SubRange{
.start = span.start,
.end = span.end,
}),
- },
+ } },
});
}
}
@@ -3744,24 +3508,24 @@ fn parseContainerDeclAuto(p: *Parse) !Node.Index {
/// C's 'struct Foo {};' to Zig's 'const Foo = struct {};'.
fn parseCStyleContainer(p: *Parse) Error!bool {
const main_token = p.tok_i;
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.keyword_enum, .keyword_union, .keyword_struct => {},
else => return false,
}
const identifier = p.tok_i + 1;
- if (p.token_tags[identifier] != .identifier) return false;
+ if (p.tokenTag(identifier) != .identifier) return false;
p.tok_i += 2;
try p.warnMsg(.{
.tag = .c_style_container,
.token = identifier,
- .extra = .{ .expected_tag = p.token_tags[main_token] },
+ .extra = .{ .expected_tag = p.tokenTag(main_token) },
});
try p.warnMsg(.{
.tag = .zig_style_container,
.is_note = true,
.token = identifier,
- .extra = .{ .expected_tag = p.token_tags[main_token] },
+ .extra = .{ .expected_tag = p.tokenTag(main_token) },
});
_ = try p.expectToken(.l_brace);
@@ -3774,8 +3538,8 @@ fn parseCStyleContainer(p: *Parse) Error!bool {
/// Holds temporary data until we are ready to construct the full ContainerDecl AST node.
///
/// ByteAlign <- KEYWORD_align LPAREN Expr RPAREN
-fn parseByteAlign(p: *Parse) !Node.Index {
- _ = p.eatToken(.keyword_align) orelse return null_node;
+fn parseByteAlign(p: *Parse) !?Node.Index {
+ _ = p.eatToken(.keyword_align) orelse return null;
_ = try p.expectToken(.l_paren);
const expr = try p.expectExpr();
_ = try p.expectToken(.r_paren);
@@ -3788,12 +3552,11 @@ fn parseSwitchProngList(p: *Parse) !Node.SubRange {
defer p.scratch.shrinkRetainingCapacity(scratch_top);
while (true) {
- const item = try parseSwitchProng(p);
- if (item == 0) break;
+ const item = try parseSwitchProng(p) orelse break;
try p.scratch.append(p.gpa, item);
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.comma => p.tok_i += 1,
// All possible delimiters.
.colon, .r_paren, .r_brace, .r_bracket => break,
@@ -3813,13 +3576,13 @@ fn parseParamDeclList(p: *Parse) !SmallSpan {
while (true) {
if (p.eatToken(.r_paren)) |_| break;
if (varargs == .seen) varargs = .{ .nonfinal = p.tok_i };
- const param = try p.expectParamDecl();
- if (param != 0) {
+ const opt_param = try p.expectParamDecl();
+ if (opt_param) |param| {
try p.scratch.append(p.gpa, param);
- } else if (p.token_tags[p.tok_i - 1] == .ellipsis3) {
+ } else if (p.tokenTag(p.tok_i - 1) == .ellipsis3) {
if (varargs == .none) varargs = .seen;
}
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.comma => p.tok_i += 1,
.r_paren => {
p.tok_i += 1;
@@ -3835,9 +3598,9 @@ fn parseParamDeclList(p: *Parse) !SmallSpan {
}
const params = p.scratch.items[scratch_top..];
return switch (params.len) {
- 0 => SmallSpan{ .zero_or_one = 0 },
- 1 => SmallSpan{ .zero_or_one = params[0] },
- else => SmallSpan{ .multi = try p.listToSpan(params) },
+ 0 => .{ .zero_or_one = .none },
+ 1 => .{ .zero_or_one = params[0].toOptional() },
+ else => .{ .multi = try p.listToSpan(params) },
};
}
@@ -3852,10 +3615,7 @@ fn parseBuiltinCall(p: *Parse) !Node.Index {
return p.addNode(.{
.tag = .identifier,
.main_token = builtin_token,
- .data = .{
- .lhs = undefined,
- .rhs = undefined,
- },
+ .data = undefined,
});
};
const scratch_top = p.scratch.items.len;
@@ -3864,7 +3624,7 @@ fn parseBuiltinCall(p: *Parse) !Node.Index {
if (p.eatToken(.r_paren)) |_| break;
const param = try p.expectExpr();
try p.scratch.append(p.gpa, param);
- switch (p.token_tags[p.tok_i]) {
+ switch (p.tokenTag(p.tok_i)) {
.comma => p.tok_i += 1,
.r_paren => {
p.tok_i += 1;
@@ -3874,88 +3634,66 @@ fn parseBuiltinCall(p: *Parse) !Node.Index {
else => try p.warn(.expected_comma_after_arg),
}
}
- const comma = (p.token_tags[p.tok_i - 2] == .comma);
+ const comma = (p.tokenTag(p.tok_i - 2)) == .comma;
const params = p.scratch.items[scratch_top..];
- switch (params.len) {
- 0 => return p.addNode(.{
- .tag = .builtin_call_two,
- .main_token = builtin_token,
- .data = .{
- .lhs = 0,
- .rhs = 0,
- },
- }),
- 1 => return p.addNode(.{
+ if (params.len <= 2) {
+ return p.addNode(.{
.tag = if (comma) .builtin_call_two_comma else .builtin_call_two,
.main_token = builtin_token,
- .data = .{
- .lhs = params[0],
- .rhs = 0,
- },
- }),
- 2 => return p.addNode(.{
- .tag = if (comma) .builtin_call_two_comma else .builtin_call_two,
+ .data = .{ .opt_node_and_opt_node = .{
+ if (params.len >= 1) .fromOptional(params[0]) else .none,
+ if (params.len >= 2) .fromOptional(params[1]) else .none,
+ } },
+ });
+ } else {
+ const span = try p.listToSpan(params);
+ return p.addNode(.{
+ .tag = if (comma) .builtin_call_comma else .builtin_call,
.main_token = builtin_token,
- .data = .{
- .lhs = params[0],
- .rhs = params[1],
- },
- }),
- else => {
- const span = try p.listToSpan(params);
- return p.addNode(.{
- .tag = if (comma) .builtin_call_comma else .builtin_call,
- .main_token = builtin_token,
- .data = .{
- .lhs = span.start,
- .rhs = span.end,
- },
- });
- },
+ .data = .{ .extra_range = span },
+ });
}
}
/// IfPrefix <- KEYWORD_if LPAREN Expr RPAREN PtrPayload?
-fn parseIf(p: *Parse, comptime bodyParseFn: fn (p: *Parse) Error!Node.Index) !Node.Index {
- const if_token = p.eatToken(.keyword_if) orelse return null_node;
+fn parseIf(p: *Parse, comptime bodyParseFn: fn (p: *Parse) Error!Node.Index) !?Node.Index {
+ const if_token = p.eatToken(.keyword_if) orelse return null;
_ = try p.expectToken(.l_paren);
const condition = try p.expectExpr();
_ = try p.expectToken(.r_paren);
_ = try p.parsePtrPayload();
const then_expr = try bodyParseFn(p);
- assert(then_expr != 0);
- _ = p.eatToken(.keyword_else) orelse return p.addNode(.{
+ _ = p.eatToken(.keyword_else) orelse return try p.addNode(.{
.tag = .if_simple,
.main_token = if_token,
- .data = .{
- .lhs = condition,
- .rhs = then_expr,
- },
+ .data = .{ .node_and_node = .{
+ condition,
+ then_expr,
+ } },
});
_ = try p.parsePayload();
const else_expr = try bodyParseFn(p);
- assert(else_expr != 0);
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .@"if",
.main_token = if_token,
- .data = .{
- .lhs = condition,
- .rhs = try p.addExtra(Node.If{
+ .data = .{ .node_and_extra = .{
+ condition,
+ try p.addExtra(Node.If{
.then_expr = then_expr,
.else_expr = else_expr,
}),
- },
+ } },
});
}
/// ForExpr <- ForPrefix Expr (KEYWORD_else Expr)?
///
/// ForTypeExpr <- ForPrefix TypeExpr (KEYWORD_else TypeExpr)?
-fn parseFor(p: *Parse, comptime bodyParseFn: fn (p: *Parse) Error!Node.Index) !Node.Index {
- const for_token = p.eatToken(.keyword_for) orelse return null_node;
+fn parseFor(p: *Parse, comptime bodyParseFn: fn (p: *Parse) Error!Node.Index) !?Node.Index {
+ const for_token = p.eatToken(.keyword_for) orelse return null;
const scratch_top = p.scratch.items.len;
defer p.scratch.shrinkRetainingCapacity(scratch_top);
@@ -3969,27 +3707,24 @@ fn parseFor(p: *Parse, comptime bodyParseFn: fn (p: *Parse) Error!Node.Index) !N
try p.scratch.append(p.gpa, else_expr);
has_else = true;
} else if (inputs == 1) {
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .for_simple,
.main_token = for_token,
- .data = .{
- .lhs = p.scratch.items[scratch_top],
- .rhs = then_expr,
- },
+ .data = .{ .node_and_node = .{
+ p.scratch.items[scratch_top],
+ then_expr,
+ } },
});
} else {
try p.scratch.append(p.gpa, then_expr);
}
- return p.addNode(.{
+ return try p.addNode(.{
.tag = .@"for",
.main_token = for_token,
- .data = .{
- .lhs = (try p.listToSpan(p.scratch.items[scratch_top..])).start,
- .rhs = @as(u32, @bitCast(Node.For{
- .inputs = @as(u31, @intCast(inputs)),
- .has_else = has_else,
- })),
- },
+ .data = .{ .@"for" = .{
+ (try p.listToSpan(p.scratch.items[scratch_top..])).start,
+ .{ .inputs = @intCast(inputs), .has_else = has_else },
+ } },
});
}
@@ -4011,21 +3746,29 @@ fn eatDocComments(p: *Parse) Allocator.Error!?TokenIndex {
}
fn tokensOnSameLine(p: *Parse, token1: TokenIndex, token2: TokenIndex) bool {
- return std.mem.indexOfScalar(u8, p.source[p.token_starts[token1]..p.token_starts[token2]], '\n') == null;
+ return std.mem.indexOfScalar(u8, p.source[p.tokenStart(token1)..p.tokenStart(token2)], '\n') == null;
}
fn eatToken(p: *Parse, tag: Token.Tag) ?TokenIndex {
- return if (p.token_tags[p.tok_i] == tag) p.nextToken() else null;
+ return if (p.tokenTag(p.tok_i) == tag) p.nextToken() else null;
+}
+
+fn eatTokens(p: *Parse, tags: []const Token.Tag) ?TokenIndex {
+ const available_tags = p.tokens.items(.tag)[p.tok_i..];
+ if (!std.mem.startsWith(Token.Tag, available_tags, tags)) return null;
+ const result = p.tok_i;
+ p.tok_i += @intCast(tags.len);
+ return result;
}
fn assertToken(p: *Parse, tag: Token.Tag) TokenIndex {
const token = p.nextToken();
- assert(p.token_tags[token] == tag);
+ assert(p.tokenTag(token) == tag);
return token;
}
fn expectToken(p: *Parse, tag: Token.Tag) Error!TokenIndex {
- if (p.token_tags[p.tok_i] != tag) {
+ if (p.tokenTag(p.tok_i) != tag) {
return p.failMsg(.{
.tag = .expected_token,
.token = p.tok_i,
@@ -4036,7 +3779,7 @@ fn expectToken(p: *Parse, tag: Token.Tag) Error!TokenIndex {
}
fn expectSemicolon(p: *Parse, error_tag: AstError.Tag, recoverable: bool) Error!void {
- if (p.token_tags[p.tok_i] == .semicolon) {
+ if (p.tokenTag(p.tok_i) == .semicolon) {
_ = p.nextToken();
return;
}
@@ -4050,8 +3793,6 @@ fn nextToken(p: *Parse) TokenIndex {
return result;
}
-const null_node: Node.Index = 0;
-
const Parse = @This();
const std = @import("../std.zig");
const assert = std.debug.assert;
@@ -4060,6 +3801,8 @@ const Ast = std.zig.Ast;
const Node = Ast.Node;
const AstError = Ast.Error;
const TokenIndex = Ast.TokenIndex;
+const OptionalTokenIndex = Ast.OptionalTokenIndex;
+const ExtraIndex = Ast.ExtraIndex;
const Token = std.zig.Token;
test {
lib/std/zig/render.zig
@@ -91,21 +91,22 @@ pub fn renderTree(buffer: *std.ArrayList(u8), tree: Ast, fixups: Fixups) Error!v
};
// Render all the line comments at the beginning of the file.
- const comment_end_loc = tree.tokens.items(.start)[0];
+ const comment_end_loc = tree.tokenStart(0);
_ = try renderComments(&r, 0, comment_end_loc);
- if (tree.tokens.items(.tag)[0] == .container_doc_comment) {
+ if (tree.tokenTag(0) == .container_doc_comment) {
try renderContainerDocComments(&r, 0);
}
- if (tree.mode == .zon) {
- try renderExpression(
- &r,
- tree.nodes.items(.data)[0].lhs,
- .newline,
- );
- } else {
- try renderMembers(&r, tree.rootDecls());
+ switch (tree.mode) {
+ .zig => try renderMembers(&r, tree.rootDecls()),
+ .zon => {
+ try renderExpression(
+ &r,
+ tree.rootDecls()[0],
+ .newline,
+ );
+ },
}
if (auto_indenting_stream.disabled_offset) |disabled_offset| {
@@ -141,23 +142,20 @@ fn renderMember(
) Error!void {
const tree = r.tree;
const ais = r.ais;
- const token_tags = tree.tokens.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
- const datas = tree.nodes.items(.data);
if (r.fixups.omit_nodes.contains(decl)) return;
try renderDocComments(r, tree.firstToken(decl));
- switch (tree.nodes.items(.tag)[decl]) {
+ switch (tree.nodeTag(decl)) {
.fn_decl => {
// Some examples:
// pub extern "foo" fn ...
// export fn ...
- const fn_proto = datas[decl].lhs;
- const fn_token = main_tokens[fn_proto];
+ const fn_proto, const body_node = tree.nodeData(decl).node_and_node;
+ const fn_token = tree.nodeMainToken(fn_proto);
// Go back to the first token we should render here.
var i = fn_token;
while (i > 0) {
i -= 1;
- switch (token_tags[i]) {
+ switch (tree.tokenTag(i)) {
.keyword_extern,
.keyword_export,
.keyword_pub,
@@ -172,31 +170,34 @@ fn renderMember(
},
}
}
+
while (i < fn_token) : (i += 1) {
try renderToken(r, i, .space);
}
- switch (tree.nodes.items(.tag)[fn_proto]) {
+ switch (tree.nodeTag(fn_proto)) {
.fn_proto_one, .fn_proto => {
- const callconv_expr = if (tree.nodes.items(.tag)[fn_proto] == .fn_proto_one)
- tree.extraData(datas[fn_proto].lhs, Ast.Node.FnProtoOne).callconv_expr
+ var buf: [1]Ast.Node.Index = undefined;
+ const opt_callconv_expr = if (tree.nodeTag(fn_proto) == .fn_proto_one)
+ tree.fnProtoOne(&buf, fn_proto).ast.callconv_expr
else
- tree.extraData(datas[fn_proto].lhs, Ast.Node.FnProto).callconv_expr;
+ tree.fnProto(fn_proto).ast.callconv_expr;
+
// Keep in sync with logic in `renderFnProto`. Search this file for the marker PROMOTE_CALLCONV_INLINE
- if (callconv_expr != 0 and tree.nodes.items(.tag)[callconv_expr] == .enum_literal) {
- if (mem.eql(u8, "@\"inline\"", tree.tokenSlice(main_tokens[callconv_expr]))) {
- try ais.writer().writeAll("inline ");
+ if (opt_callconv_expr.unwrap()) |callconv_expr| {
+ if (tree.nodeTag(callconv_expr) == .enum_literal) {
+ if (mem.eql(u8, "@\"inline\"", tree.tokenSlice(tree.nodeMainToken(callconv_expr)))) {
+ try ais.writer().writeAll("inline ");
+ }
}
}
},
.fn_proto_simple, .fn_proto_multi => {},
else => unreachable,
}
- assert(datas[decl].rhs != 0);
try renderExpression(r, fn_proto, .space);
- const body_node = datas[decl].rhs;
if (r.fixups.gut_functions.contains(decl)) {
try ais.pushIndent(.normal);
- const lbrace = tree.nodes.items(.main_token)[body_node];
+ const lbrace = tree.nodeMainToken(body_node);
try renderToken(r, lbrace, .newline);
try discardAllParams(r, fn_proto);
try ais.writer().writeAll("@trap();");
@@ -205,7 +206,7 @@ fn renderMember(
try renderToken(r, tree.lastToken(body_node), space); // rbrace
} else if (r.fixups.unused_var_decls.count() != 0) {
try ais.pushIndent(.normal);
- const lbrace = tree.nodes.items(.main_token)[body_node];
+ const lbrace = tree.nodeMainToken(body_node);
try renderToken(r, lbrace, .newline);
var fn_proto_buf: [1]Ast.Node.Index = undefined;
@@ -213,7 +214,7 @@ fn renderMember(
var it = full_fn_proto.iterate(&tree);
while (it.next()) |param| {
const name_ident = param.name_token.?;
- assert(token_tags[name_ident] == .identifier);
+ assert(tree.tokenTag(name_ident) == .identifier);
if (r.fixups.unused_var_decls.contains(name_ident)) {
const w = ais.writer();
try w.writeAll("_ = ");
@@ -235,11 +236,11 @@ fn renderMember(
=> {
// Extern function prototypes are parsed as these tags.
// Go back to the first token we should render here.
- const fn_token = main_tokens[decl];
+ const fn_token = tree.nodeMainToken(decl);
var i = fn_token;
while (i > 0) {
i -= 1;
- switch (token_tags[i]) {
+ switch (tree.tokenTag(i)) {
.keyword_extern,
.keyword_export,
.keyword_pub,
@@ -262,9 +263,9 @@ fn renderMember(
},
.@"usingnamespace" => {
- const main_token = main_tokens[decl];
- const expr = datas[decl].lhs;
- if (main_token > 0 and token_tags[main_token - 1] == .keyword_pub) {
+ const main_token = tree.nodeMainToken(decl);
+ const expr = tree.nodeData(decl).node;
+ if (tree.isTokenPrecededByTags(main_token, &.{.keyword_pub})) {
try renderToken(r, main_token - 1, .space); // pub
}
try renderToken(r, main_token, .space); // usingnamespace
@@ -283,15 +284,17 @@ fn renderMember(
},
.test_decl => {
- const test_token = main_tokens[decl];
+ const test_token = tree.nodeMainToken(decl);
+ const opt_name_token, const block_node = tree.nodeData(decl).opt_token_and_node;
try renderToken(r, test_token, .space);
- const test_name_tag = token_tags[test_token + 1];
- switch (test_name_tag) {
- .string_literal => try renderToken(r, test_token + 1, .space),
- .identifier => try renderIdentifier(r, test_token + 1, .space, .preserve_when_shadowing),
- else => {},
+ if (opt_name_token.unwrap()) |name_token| {
+ switch (tree.tokenTag(name_token)) {
+ .string_literal => try renderToken(r, name_token, .space),
+ .identifier => try renderIdentifier(r, name_token, .space, .preserve_when_shadowing),
+ else => unreachable,
+ }
}
- try renderExpression(r, datas[decl].rhs, space);
+ try renderExpression(r, block_node, space);
},
.container_field_init,
@@ -319,10 +322,6 @@ fn renderExpressions(r: *Render, expressions: []const Ast.Node.Index, space: Spa
fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
const tree = r.tree;
const ais = r.ais;
- const token_tags = tree.tokens.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
- const node_tags = tree.nodes.items(.tag);
- const datas = tree.nodes.items(.data);
if (r.fixups.replace_nodes_with_string.get(node)) |replacement| {
try ais.writer().writeAll(replacement);
try renderOnlySpace(r, space);
@@ -330,9 +329,9 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
} else if (r.fixups.replace_nodes_with_node.get(node)) |replacement| {
return renderExpression(r, replacement, space);
}
- switch (node_tags[node]) {
+ switch (tree.nodeTag(node)) {
.identifier => {
- const token_index = main_tokens[node];
+ const token_index = tree.nodeMainToken(node);
return renderIdentifier(r, token_index, space, .preserve_when_shadowing);
},
@@ -341,18 +340,23 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
.unreachable_literal,
.anyframe_literal,
.string_literal,
- => return renderToken(r, main_tokens[node], space),
+ => return renderToken(r, tree.nodeMainToken(node), space),
.multiline_string_literal => {
try ais.maybeInsertNewline();
- var i = datas[node].lhs;
- while (i <= datas[node].rhs) : (i += 1) try renderToken(r, i, .newline);
+ const first_tok, const last_tok = tree.nodeData(node).token_and_token;
+ for (first_tok..last_tok + 1) |i| {
+ try renderToken(r, @intCast(i), .newline);
+ }
+
+ const next_token = last_tok + 1;
+ const next_token_tag = tree.tokenTag(next_token);
// dedent the next thing that comes after a multiline string literal
if (!ais.indentStackEmpty() and
- token_tags[i] != .colon and
- ((token_tags[i] != .semicolon and token_tags[i] != .comma) or
+ next_token_tag != .colon and
+ ((next_token_tag != .semicolon and next_token_tag != .comma) or
ais.lastSpaceModeIndent() < ais.currentIndent()))
{
ais.popIndent();
@@ -361,16 +365,17 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
switch (space) {
.none, .space, .newline, .skip => {},
- .semicolon => if (token_tags[i] == .semicolon) try renderTokenOverrideSpaceMode(r, i, .newline, .semicolon),
- .comma => if (token_tags[i] == .comma) try renderTokenOverrideSpaceMode(r, i, .newline, .comma),
- .comma_space => if (token_tags[i] == .comma) try renderToken(r, i, .space),
+ .semicolon => if (next_token_tag == .semicolon) try renderTokenOverrideSpaceMode(r, next_token, .newline, .semicolon),
+ .comma => if (next_token_tag == .comma) try renderTokenOverrideSpaceMode(r, next_token, .newline, .comma),
+ .comma_space => if (next_token_tag == .comma) try renderToken(r, next_token, .space),
}
},
.error_value => {
- try renderToken(r, main_tokens[node], .none);
- try renderToken(r, main_tokens[node] + 1, .none);
- return renderIdentifier(r, main_tokens[node] + 2, space, .eagerly_unquote);
+ const main_token = tree.nodeMainToken(node);
+ try renderToken(r, main_token, .none);
+ try renderToken(r, main_token + 1, .none);
+ return renderIdentifier(r, main_token + 2, space, .eagerly_unquote);
},
.block_two,
@@ -384,12 +389,11 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
},
.@"errdefer" => {
- const defer_token = main_tokens[node];
- const payload_token = datas[node].lhs;
- const expr = datas[node].rhs;
+ const defer_token = tree.nodeMainToken(node);
+ const maybe_payload_token, const expr = tree.nodeData(node).opt_token_and_node;
try renderToken(r, defer_token, .space);
- if (payload_token != 0) {
+ if (maybe_payload_token.unwrap()) |payload_token| {
try renderToken(r, payload_token - 1, .none); // |
try renderIdentifier(r, payload_token, .none, .preserve_when_shadowing); // identifier
try renderToken(r, payload_token + 1, .space); // |
@@ -397,84 +401,76 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
return renderExpression(r, expr, space);
},
- .@"defer" => {
- const defer_token = main_tokens[node];
- const expr = datas[node].rhs;
- try renderToken(r, defer_token, .space);
- return renderExpression(r, expr, space);
- },
- .@"comptime", .@"nosuspend" => {
- const comptime_token = main_tokens[node];
- const block = datas[node].lhs;
- try renderToken(r, comptime_token, .space);
- return renderExpression(r, block, space);
- },
-
- .@"suspend" => {
- const suspend_token = main_tokens[node];
- const body = datas[node].lhs;
- try renderToken(r, suspend_token, .space);
- return renderExpression(r, body, space);
+ .@"defer",
+ .@"comptime",
+ .@"nosuspend",
+ .@"suspend",
+ => {
+ const main_token = tree.nodeMainToken(node);
+ const item = tree.nodeData(node).node;
+ try renderToken(r, main_token, .space);
+ return renderExpression(r, item, space);
},
.@"catch" => {
- const main_token = main_tokens[node];
- const fallback_first = tree.firstToken(datas[node].rhs);
+ const main_token = tree.nodeMainToken(node);
+ const lhs, const rhs = tree.nodeData(node).node_and_node;
+ const fallback_first = tree.firstToken(rhs);
const same_line = tree.tokensOnSameLine(main_token, fallback_first);
const after_op_space = if (same_line) Space.space else Space.newline;
- try renderExpression(r, datas[node].lhs, .space); // target
+ try renderExpression(r, lhs, .space); // target
try ais.pushIndent(.normal);
- if (token_tags[fallback_first - 1] == .pipe) {
+ if (tree.tokenTag(fallback_first - 1) == .pipe) {
try renderToken(r, main_token, .space); // catch keyword
try renderToken(r, main_token + 1, .none); // pipe
try renderIdentifier(r, main_token + 2, .none, .preserve_when_shadowing); // payload identifier
try renderToken(r, main_token + 3, after_op_space); // pipe
} else {
- assert(token_tags[fallback_first - 1] == .keyword_catch);
+ assert(tree.tokenTag(fallback_first - 1) == .keyword_catch);
try renderToken(r, main_token, after_op_space); // catch keyword
}
- try renderExpression(r, datas[node].rhs, space); // fallback
+ try renderExpression(r, rhs, space); // fallback
ais.popIndent();
},
.field_access => {
- const main_token = main_tokens[node];
- const field_access = datas[node];
+ const lhs, const name_token = tree.nodeData(node).node_and_token;
+ const dot_token = name_token - 1;
try ais.pushIndent(.field_access);
- try renderExpression(r, field_access.lhs, .none);
+ try renderExpression(r, lhs, .none);
// Allow a line break between the lhs and the dot if the lhs and rhs
// are on different lines.
- const lhs_last_token = tree.lastToken(field_access.lhs);
- const same_line = tree.tokensOnSameLine(lhs_last_token, main_token + 1);
- if (!same_line and !hasComment(tree, lhs_last_token, main_token)) try ais.insertNewline();
+ const lhs_last_token = tree.lastToken(lhs);
+ const same_line = tree.tokensOnSameLine(lhs_last_token, name_token);
+ if (!same_line and !hasComment(tree, lhs_last_token, dot_token)) try ais.insertNewline();
- try renderToken(r, main_token, .none); // .
+ try renderToken(r, dot_token, .none);
- try renderIdentifier(r, field_access.rhs, space, .eagerly_unquote); // field
+ try renderIdentifier(r, name_token, space, .eagerly_unquote); // field
ais.popIndent();
},
.error_union,
.switch_range,
=> {
- const infix = datas[node];
- try renderExpression(r, infix.lhs, .none);
- try renderToken(r, main_tokens[node], .none);
- return renderExpression(r, infix.rhs, space);
+ const lhs, const rhs = tree.nodeData(node).node_and_node;
+ try renderExpression(r, lhs, .none);
+ try renderToken(r, tree.nodeMainToken(node), .none);
+ return renderExpression(r, rhs, space);
},
.for_range => {
- const infix = datas[node];
- try renderExpression(r, infix.lhs, .none);
- if (infix.rhs != 0) {
- try renderToken(r, main_tokens[node], .none);
- return renderExpression(r, infix.rhs, space);
+ const start, const opt_end = tree.nodeData(node).node_and_opt_node;
+ try renderExpression(r, start, .none);
+ if (opt_end.unwrap()) |end| {
+ try renderToken(r, tree.nodeMainToken(node), .none);
+ return renderExpression(r, end, space);
} else {
- return renderToken(r, main_tokens[node], space);
+ return renderToken(r, tree.nodeMainToken(node), space);
}
},
@@ -497,16 +493,16 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
.assign_mul_wrap,
.assign_mul_sat,
=> {
- const infix = datas[node];
- try renderExpression(r, infix.lhs, .space);
- const op_token = main_tokens[node];
+ const lhs, const rhs = tree.nodeData(node).node_and_node;
+ try renderExpression(r, lhs, .space);
+ const op_token = tree.nodeMainToken(node);
try ais.pushIndent(.after_equals);
if (tree.tokensOnSameLine(op_token, op_token + 1)) {
try renderToken(r, op_token, .space);
} else {
try renderToken(r, op_token, .newline);
}
- try renderExpression(r, infix.rhs, space);
+ try renderExpression(r, rhs, space);
ais.popIndent();
},
@@ -540,16 +536,16 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
.sub_sat,
.@"orelse",
=> {
- const infix = datas[node];
- try renderExpression(r, infix.lhs, .space);
- const op_token = main_tokens[node];
+ const lhs, const rhs = tree.nodeData(node).node_and_node;
+ try renderExpression(r, lhs, .space);
+ const op_token = tree.nodeMainToken(node);
try ais.pushIndent(.binop);
if (tree.tokensOnSameLine(op_token, op_token + 1)) {
try renderToken(r, op_token, .space);
} else {
try renderToken(r, op_token, .newline);
}
- try renderExpression(r, infix.rhs, space);
+ try renderExpression(r, rhs, space);
ais.popIndent();
},
@@ -561,7 +557,7 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
for (full.ast.variables, 0..) |variable_node, i| {
const variable_space: Space = if (i == full.ast.variables.len - 1) .space else .comma_space;
- switch (node_tags[variable_node]) {
+ switch (tree.nodeTag(variable_node)) {
.global_var_decl,
.local_var_decl,
.simple_var_decl,
@@ -589,16 +585,16 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
.optional_type,
.address_of,
=> {
- try renderToken(r, main_tokens[node], .none);
- return renderExpression(r, datas[node].lhs, space);
+ try renderToken(r, tree.nodeMainToken(node), .none);
+ return renderExpression(r, tree.nodeData(node).node, space);
},
.@"try",
.@"resume",
.@"await",
=> {
- try renderToken(r, main_tokens[node], .space);
- return renderExpression(r, datas[node].lhs, space);
+ try renderToken(r, tree.nodeMainToken(node), .space);
+ return renderExpression(r, tree.nodeData(node).node, space);
},
.array_type,
@@ -651,68 +647,77 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
},
.array_access => {
- const suffix = datas[node];
- const lbracket = tree.firstToken(suffix.rhs) - 1;
- const rbracket = tree.lastToken(suffix.rhs) + 1;
+ const lhs, const rhs = tree.nodeData(node).node_and_node;
+ const lbracket = tree.firstToken(rhs) - 1;
+ const rbracket = tree.lastToken(rhs) + 1;
const one_line = tree.tokensOnSameLine(lbracket, rbracket);
const inner_space = if (one_line) Space.none else Space.newline;
- try renderExpression(r, suffix.lhs, .none);
+ try renderExpression(r, lhs, .none);
try ais.pushIndent(.normal);
try renderToken(r, lbracket, inner_space); // [
- try renderExpression(r, suffix.rhs, inner_space);
+ try renderExpression(r, rhs, inner_space);
ais.popIndent();
return renderToken(r, rbracket, space); // ]
},
- .slice_open, .slice, .slice_sentinel => return renderSlice(r, node, tree.fullSlice(node).?, space),
+ .slice_open,
+ .slice,
+ .slice_sentinel,
+ => return renderSlice(r, node, tree.fullSlice(node).?, space),
.deref => {
- try renderExpression(r, datas[node].lhs, .none);
- return renderToken(r, main_tokens[node], space);
+ try renderExpression(r, tree.nodeData(node).node, .none);
+ return renderToken(r, tree.nodeMainToken(node), space);
},
.unwrap_optional => {
- try renderExpression(r, datas[node].lhs, .none);
- try renderToken(r, main_tokens[node], .none);
- return renderToken(r, datas[node].rhs, space);
+ const lhs, const question_mark = tree.nodeData(node).node_and_token;
+ const dot_token = question_mark - 1;
+ try renderExpression(r, lhs, .none);
+ try renderToken(r, dot_token, .none);
+ return renderToken(r, question_mark, space);
},
.@"break", .@"continue" => {
- const main_token = main_tokens[node];
- const label_token = datas[node].lhs;
- const target = datas[node].rhs;
- if (label_token == 0 and target == 0) {
+ const main_token = tree.nodeMainToken(node);
+ const opt_label_token, const opt_target = tree.nodeData(node).opt_token_and_opt_node;
+ if (opt_label_token == .none and opt_target == .none) {
try renderToken(r, main_token, space); // break/continue
- } else if (label_token == 0 and target != 0) {
+ } else if (opt_label_token == .none and opt_target != .none) {
+ const target = opt_target.unwrap().?;
try renderToken(r, main_token, .space); // break/continue
try renderExpression(r, target, space);
- } else if (label_token != 0 and target == 0) {
+ } else if (opt_label_token != .none and opt_target == .none) {
+ const label_token = opt_label_token.unwrap().?;
try renderToken(r, main_token, .space); // break/continue
try renderToken(r, label_token - 1, .none); // :
try renderIdentifier(r, label_token, space, .eagerly_unquote); // identifier
- } else if (label_token != 0 and target != 0) {
+ } else if (opt_label_token != .none and opt_target != .none) {
+ const label_token = opt_label_token.unwrap().?;
+ const target = opt_target.unwrap().?;
try renderToken(r, main_token, .space); // break/continue
try renderToken(r, label_token - 1, .none); // :
try renderIdentifier(r, label_token, .space, .eagerly_unquote); // identifier
try renderExpression(r, target, space);
- }
+ } else unreachable;
},
.@"return" => {
- if (datas[node].lhs != 0) {
- try renderToken(r, main_tokens[node], .space);
- try renderExpression(r, datas[node].lhs, space);
+ if (tree.nodeData(node).opt_node.unwrap()) |expr| {
+ try renderToken(r, tree.nodeMainToken(node), .space);
+ try renderExpression(r, expr, space);
} else {
- try renderToken(r, main_tokens[node], space);
+ try renderToken(r, tree.nodeMainToken(node), space);
}
},
.grouped_expression => {
+ const expr, const rparen = tree.nodeData(node).node_and_token;
try ais.pushIndent(.normal);
- try renderToken(r, main_tokens[node], .none); // lparen
- try renderExpression(r, datas[node].lhs, .none);
+ try renderToken(r, tree.nodeMainToken(node), .none); // lparen
+ try renderExpression(r, expr, .none);
ais.popIndent();
- return renderToken(r, datas[node].rhs, space); // rparen
+ return renderToken(r, rparen, space);
},
.container_decl,
@@ -733,9 +738,9 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
},
.error_set_decl => {
- const error_token = main_tokens[node];
+ const error_token = tree.nodeMainToken(node);
const lbrace = error_token + 1;
- const rbrace = datas[node].rhs;
+ const rbrace = tree.nodeData(node).token;
try renderToken(r, error_token, .none);
@@ -743,20 +748,20 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
// There is nothing between the braces so render condensed: `error{}`
try renderToken(r, lbrace, .none);
return renderToken(r, rbrace, space);
- } else if (lbrace + 2 == rbrace and token_tags[lbrace + 1] == .identifier) {
+ } else if (lbrace + 2 == rbrace and tree.tokenTag(lbrace + 1) == .identifier) {
// There is exactly one member and no trailing comma or
// comments, so render without surrounding spaces: `error{Foo}`
try renderToken(r, lbrace, .none);
try renderIdentifier(r, lbrace + 1, .none, .eagerly_unquote); // identifier
return renderToken(r, rbrace, space);
- } else if (token_tags[rbrace - 1] == .comma) {
+ } else if (tree.tokenTag(rbrace - 1) == .comma) {
// There is a trailing comma so render each member on a new line.
try ais.pushIndent(.normal);
try renderToken(r, lbrace, .newline);
var i = lbrace + 1;
while (i < rbrace) : (i += 1) {
if (i > lbrace + 1) try renderExtraNewlineToken(r, i);
- switch (token_tags[i]) {
+ switch (tree.tokenTag(i)) {
.doc_comment => try renderToken(r, i, .newline),
.identifier => {
try ais.pushSpace(.comma);
@@ -774,7 +779,7 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
try renderToken(r, lbrace, .space);
var i = lbrace + 1;
while (i < rbrace) : (i += 1) {
- switch (token_tags[i]) {
+ switch (tree.tokenTag(i)) {
.doc_comment => unreachable, // TODO
.identifier => try renderIdentifier(r, i, .comma_space, .eagerly_unquote),
.comma => {},
@@ -792,7 +797,7 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
=> {
var buf: [2]Ast.Node.Index = undefined;
const params = tree.builtinCallParams(&buf, node).?;
- return renderBuiltinCall(r, main_tokens[node], params, space);
+ return renderBuiltinCall(r, tree.nodeMainToken(node), params, space);
},
.fn_proto_simple,
@@ -805,14 +810,10 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
},
.anyframe_type => {
- const main_token = main_tokens[node];
- if (datas[node].rhs != 0) {
- try renderToken(r, main_token, .none); // anyframe
- try renderToken(r, main_token + 1, .none); // ->
- return renderExpression(r, datas[node].rhs, space);
- } else {
- return renderToken(r, main_token, space); // anyframe
- }
+ const main_token = tree.nodeMainToken(node);
+ try renderToken(r, main_token, .none); // anyframe
+ try renderToken(r, main_token + 1, .none); // ->
+ return renderExpression(r, tree.nodeData(node).token_and_node[1], space);
},
.@"switch",
@@ -869,8 +870,8 @@ fn renderExpression(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
=> return renderAsm(r, tree.fullAsm(node).?, space),
.enum_literal => {
- try renderToken(r, main_tokens[node] - 1, .none); // .
- return renderIdentifier(r, main_tokens[node], space, .eagerly_unquote); // name
+ try renderToken(r, tree.nodeMainToken(node) - 1, .none); // .
+ return renderIdentifier(r, tree.nodeMainToken(node), space, .eagerly_unquote); // name
},
.fn_decl => unreachable,
@@ -912,9 +913,9 @@ fn renderArrayType(
try ais.pushIndent(.normal);
try renderToken(r, array_type.ast.lbracket, inner_space); // lbracket
try renderExpression(r, array_type.ast.elem_count, inner_space);
- if (array_type.ast.sentinel != 0) {
- try renderToken(r, tree.firstToken(array_type.ast.sentinel) - 1, inner_space); // colon
- try renderExpression(r, array_type.ast.sentinel, inner_space);
+ if (array_type.ast.sentinel.unwrap()) |sentinel| {
+ try renderToken(r, tree.firstToken(sentinel) - 1, inner_space); // colon
+ try renderExpression(r, sentinel, inner_space);
}
ais.popIndent();
try renderToken(r, rbracket, .none); // rbracket
@@ -923,6 +924,7 @@ fn renderArrayType(
fn renderPtrType(r: *Render, ptr_type: Ast.full.PtrType, space: Space) Error!void {
const tree = r.tree;
+ const main_token = ptr_type.ast.main_token;
switch (ptr_type.size) {
.one => {
// Since ** tokens exist and the same token is shared by two
@@ -930,41 +932,41 @@ fn renderPtrType(r: *Render, ptr_type: Ast.full.PtrType, space: Space) Error!voi
// in such a relationship. If so, skip rendering anything for
// this pointer type and rely on the child to render our asterisk
// as well when it renders the ** token.
- if (tree.tokens.items(.tag)[ptr_type.ast.main_token] == .asterisk_asterisk and
- ptr_type.ast.main_token == tree.nodes.items(.main_token)[ptr_type.ast.child_type])
+ if (tree.tokenTag(main_token) == .asterisk_asterisk and
+ main_token == tree.nodeMainToken(ptr_type.ast.child_type))
{
return renderExpression(r, ptr_type.ast.child_type, space);
}
- try renderToken(r, ptr_type.ast.main_token, .none); // asterisk
+ try renderToken(r, main_token, .none); // asterisk
},
.many => {
- if (ptr_type.ast.sentinel == 0) {
- try renderToken(r, ptr_type.ast.main_token, .none); // lbracket
- try renderToken(r, ptr_type.ast.main_token + 1, .none); // asterisk
- try renderToken(r, ptr_type.ast.main_token + 2, .none); // rbracket
+ if (ptr_type.ast.sentinel.unwrap()) |sentinel| {
+ try renderToken(r, main_token, .none); // lbracket
+ try renderToken(r, main_token + 1, .none); // asterisk
+ try renderToken(r, main_token + 2, .none); // colon
+ try renderExpression(r, sentinel, .none);
+ try renderToken(r, tree.lastToken(sentinel) + 1, .none); // rbracket
} else {
- try renderToken(r, ptr_type.ast.main_token, .none); // lbracket
- try renderToken(r, ptr_type.ast.main_token + 1, .none); // asterisk
- try renderToken(r, ptr_type.ast.main_token + 2, .none); // colon
- try renderExpression(r, ptr_type.ast.sentinel, .none);
- try renderToken(r, tree.lastToken(ptr_type.ast.sentinel) + 1, .none); // rbracket
+ try renderToken(r, main_token, .none); // lbracket
+ try renderToken(r, main_token + 1, .none); // asterisk
+ try renderToken(r, main_token + 2, .none); // rbracket
}
},
.c => {
- try renderToken(r, ptr_type.ast.main_token, .none); // lbracket
- try renderToken(r, ptr_type.ast.main_token + 1, .none); // asterisk
- try renderToken(r, ptr_type.ast.main_token + 2, .none); // c
- try renderToken(r, ptr_type.ast.main_token + 3, .none); // rbracket
+ try renderToken(r, main_token, .none); // lbracket
+ try renderToken(r, main_token + 1, .none); // asterisk
+ try renderToken(r, main_token + 2, .none); // c
+ try renderToken(r, main_token + 3, .none); // rbracket
},
.slice => {
- if (ptr_type.ast.sentinel == 0) {
- try renderToken(r, ptr_type.ast.main_token, .none); // lbracket
- try renderToken(r, ptr_type.ast.main_token + 1, .none); // rbracket
+ if (ptr_type.ast.sentinel.unwrap()) |sentinel| {
+ try renderToken(r, main_token, .none); // lbracket
+ try renderToken(r, main_token + 1, .none); // colon
+ try renderExpression(r, sentinel, .none);
+ try renderToken(r, tree.lastToken(sentinel) + 1, .none); // rbracket
} else {
- try renderToken(r, ptr_type.ast.main_token, .none); // lbracket
- try renderToken(r, ptr_type.ast.main_token + 1, .none); // colon
- try renderExpression(r, ptr_type.ast.sentinel, .none);
- try renderToken(r, tree.lastToken(ptr_type.ast.sentinel) + 1, .none); // rbracket
+ try renderToken(r, main_token, .none); // lbracket
+ try renderToken(r, main_token + 1, .none); // rbracket
}
},
}
@@ -973,29 +975,29 @@ fn renderPtrType(r: *Render, ptr_type: Ast.full.PtrType, space: Space) Error!voi
try renderToken(r, allowzero_token, .space);
}
- if (ptr_type.ast.align_node != 0) {
- const align_first = tree.firstToken(ptr_type.ast.align_node);
+ if (ptr_type.ast.align_node.unwrap()) |align_node| {
+ const align_first = tree.firstToken(align_node);
try renderToken(r, align_first - 2, .none); // align
try renderToken(r, align_first - 1, .none); // lparen
- try renderExpression(r, ptr_type.ast.align_node, .none);
- if (ptr_type.ast.bit_range_start != 0) {
- assert(ptr_type.ast.bit_range_end != 0);
- try renderToken(r, tree.firstToken(ptr_type.ast.bit_range_start) - 1, .none); // colon
- try renderExpression(r, ptr_type.ast.bit_range_start, .none);
- try renderToken(r, tree.firstToken(ptr_type.ast.bit_range_end) - 1, .none); // colon
- try renderExpression(r, ptr_type.ast.bit_range_end, .none);
- try renderToken(r, tree.lastToken(ptr_type.ast.bit_range_end) + 1, .space); // rparen
+ try renderExpression(r, align_node, .none);
+ if (ptr_type.ast.bit_range_start.unwrap()) |bit_range_start| {
+ const bit_range_end = ptr_type.ast.bit_range_end.unwrap().?;
+ try renderToken(r, tree.firstToken(bit_range_start) - 1, .none); // colon
+ try renderExpression(r, bit_range_start, .none);
+ try renderToken(r, tree.firstToken(bit_range_end) - 1, .none); // colon
+ try renderExpression(r, bit_range_end, .none);
+ try renderToken(r, tree.lastToken(bit_range_end) + 1, .space); // rparen
} else {
- try renderToken(r, tree.lastToken(ptr_type.ast.align_node) + 1, .space); // rparen
+ try renderToken(r, tree.lastToken(align_node) + 1, .space); // rparen
}
}
- if (ptr_type.ast.addrspace_node != 0) {
- const addrspace_first = tree.firstToken(ptr_type.ast.addrspace_node);
+ if (ptr_type.ast.addrspace_node.unwrap()) |addrspace_node| {
+ const addrspace_first = tree.firstToken(addrspace_node);
try renderToken(r, addrspace_first - 2, .none); // addrspace
try renderToken(r, addrspace_first - 1, .none); // lparen
- try renderExpression(r, ptr_type.ast.addrspace_node, .none);
- try renderToken(r, tree.lastToken(ptr_type.ast.addrspace_node) + 1, .space); // rparen
+ try renderExpression(r, addrspace_node, .none);
+ try renderToken(r, tree.lastToken(addrspace_node) + 1, .space); // rparen
}
if (ptr_type.const_token) |const_token| {
@@ -1016,13 +1018,12 @@ fn renderSlice(
space: Space,
) Error!void {
const tree = r.tree;
- const node_tags = tree.nodes.items(.tag);
- const after_start_space_bool = nodeCausesSliceOpSpace(node_tags[slice.ast.start]) or
- if (slice.ast.end != 0) nodeCausesSliceOpSpace(node_tags[slice.ast.end]) else false;
+ const after_start_space_bool = nodeCausesSliceOpSpace(tree.nodeTag(slice.ast.start)) or
+ if (slice.ast.end.unwrap()) |end| nodeCausesSliceOpSpace(tree.nodeTag(end)) else false;
const after_start_space = if (after_start_space_bool) Space.space else Space.none;
- const after_dots_space = if (slice.ast.end != 0)
+ const after_dots_space = if (slice.ast.end != .none)
after_start_space
- else if (slice.ast.sentinel != 0) Space.space else Space.none;
+ else if (slice.ast.sentinel != .none) Space.space else Space.none;
try renderExpression(r, slice.ast.sliced, .none);
try renderToken(r, slice.ast.lbracket, .none); // lbracket
@@ -1031,14 +1032,14 @@ fn renderSlice(
try renderExpression(r, slice.ast.start, after_start_space);
try renderToken(r, start_last + 1, after_dots_space); // ellipsis2 ("..")
- if (slice.ast.end != 0) {
- const after_end_space = if (slice.ast.sentinel != 0) Space.space else Space.none;
- try renderExpression(r, slice.ast.end, after_end_space);
+ if (slice.ast.end.unwrap()) |end| {
+ const after_end_space = if (slice.ast.sentinel != .none) Space.space else Space.none;
+ try renderExpression(r, end, after_end_space);
}
- if (slice.ast.sentinel != 0) {
- try renderToken(r, tree.firstToken(slice.ast.sentinel) - 1, .none); // colon
- try renderExpression(r, slice.ast.sentinel, .none);
+ if (slice.ast.sentinel.unwrap()) |sentinel| {
+ try renderToken(r, tree.firstToken(sentinel) - 1, .none); // colon
+ try renderExpression(r, sentinel, .none);
}
try renderToken(r, tree.lastToken(slice_node), space); // rbracket
@@ -1050,12 +1051,8 @@ fn renderAsmOutput(
space: Space,
) Error!void {
const tree = r.tree;
- const token_tags = tree.tokens.items(.tag);
- const node_tags = tree.nodes.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
- const datas = tree.nodes.items(.data);
- assert(node_tags[asm_output] == .asm_output);
- const symbolic_name = main_tokens[asm_output];
+ assert(tree.nodeTag(asm_output) == .asm_output);
+ const symbolic_name = tree.nodeMainToken(asm_output);
try renderToken(r, symbolic_name - 1, .none); // lbracket
try renderIdentifier(r, symbolic_name, .none, .eagerly_unquote); // ident
@@ -1063,10 +1060,11 @@ fn renderAsmOutput(
try renderToken(r, symbolic_name + 2, .space); // "constraint"
try renderToken(r, symbolic_name + 3, .none); // lparen
- if (token_tags[symbolic_name + 4] == .arrow) {
+ if (tree.tokenTag(symbolic_name + 4) == .arrow) {
+ const type_expr, const rparen = tree.nodeData(asm_output).opt_node_and_token;
try renderToken(r, symbolic_name + 4, .space); // ->
- try renderExpression(r, datas[asm_output].lhs, Space.none);
- return renderToken(r, datas[asm_output].rhs, space); // rparen
+ try renderExpression(r, type_expr.unwrap().?, Space.none);
+ return renderToken(r, rparen, space);
} else {
try renderIdentifier(r, symbolic_name + 4, .none, .eagerly_unquote); // ident
return renderToken(r, symbolic_name + 5, space); // rparen
@@ -1079,19 +1077,17 @@ fn renderAsmInput(
space: Space,
) Error!void {
const tree = r.tree;
- const node_tags = tree.nodes.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
- const datas = tree.nodes.items(.data);
- assert(node_tags[asm_input] == .asm_input);
- const symbolic_name = main_tokens[asm_input];
+ assert(tree.nodeTag(asm_input) == .asm_input);
+ const symbolic_name = tree.nodeMainToken(asm_input);
+ const expr, const rparen = tree.nodeData(asm_input).node_and_token;
try renderToken(r, symbolic_name - 1, .none); // lbracket
try renderIdentifier(r, symbolic_name, .none, .eagerly_unquote); // ident
try renderToken(r, symbolic_name + 1, .space); // rbracket
try renderToken(r, symbolic_name + 2, .space); // "constraint"
try renderToken(r, symbolic_name + 3, .none); // lparen
- try renderExpression(r, datas[asm_input].lhs, Space.none);
- return renderToken(r, datas[asm_input].rhs, space); // rparen
+ try renderExpression(r, expr, Space.none);
+ return renderToken(r, rparen, space);
}
fn renderVarDecl(
@@ -1147,15 +1143,15 @@ fn renderVarDeclWithoutFixups(
try renderToken(r, var_decl.ast.mut_token, .space); // var
- if (var_decl.ast.type_node != 0 or var_decl.ast.align_node != 0 or
- var_decl.ast.addrspace_node != 0 or var_decl.ast.section_node != 0 or
- var_decl.ast.init_node != 0)
+ if (var_decl.ast.type_node != .none or var_decl.ast.align_node != .none or
+ var_decl.ast.addrspace_node != .none or var_decl.ast.section_node != .none or
+ var_decl.ast.init_node != .none)
{
- const name_space = if (var_decl.ast.type_node == 0 and
- (var_decl.ast.align_node != 0 or
- var_decl.ast.addrspace_node != 0 or
- var_decl.ast.section_node != 0 or
- var_decl.ast.init_node != 0))
+ const name_space = if (var_decl.ast.type_node == .none and
+ (var_decl.ast.align_node != .none or
+ var_decl.ast.addrspace_node != .none or
+ var_decl.ast.section_node != .none or
+ var_decl.ast.init_node != .none))
Space.space
else
Space.none;
@@ -1165,26 +1161,26 @@ fn renderVarDeclWithoutFixups(
return renderIdentifier(r, var_decl.ast.mut_token + 1, space, .preserve_when_shadowing); // name
}
- if (var_decl.ast.type_node != 0) {
+ if (var_decl.ast.type_node.unwrap()) |type_node| {
try renderToken(r, var_decl.ast.mut_token + 2, Space.space); // :
- if (var_decl.ast.align_node != 0 or var_decl.ast.addrspace_node != 0 or
- var_decl.ast.section_node != 0 or var_decl.ast.init_node != 0)
+ if (var_decl.ast.align_node != .none or var_decl.ast.addrspace_node != .none or
+ var_decl.ast.section_node != .none or var_decl.ast.init_node != .none)
{
- try renderExpression(r, var_decl.ast.type_node, .space);
+ try renderExpression(r, type_node, .space);
} else {
- return renderExpression(r, var_decl.ast.type_node, space);
+ return renderExpression(r, type_node, space);
}
}
- if (var_decl.ast.align_node != 0) {
- const lparen = tree.firstToken(var_decl.ast.align_node) - 1;
+ if (var_decl.ast.align_node.unwrap()) |align_node| {
+ const lparen = tree.firstToken(align_node) - 1;
const align_kw = lparen - 1;
- const rparen = tree.lastToken(var_decl.ast.align_node) + 1;
+ const rparen = tree.lastToken(align_node) + 1;
try renderToken(r, align_kw, Space.none); // align
try renderToken(r, lparen, Space.none); // (
- try renderExpression(r, var_decl.ast.align_node, Space.none);
- if (var_decl.ast.addrspace_node != 0 or var_decl.ast.section_node != 0 or
- var_decl.ast.init_node != 0)
+ try renderExpression(r, align_node, Space.none);
+ if (var_decl.ast.addrspace_node != .none or var_decl.ast.section_node != .none or
+ var_decl.ast.init_node != .none)
{
try renderToken(r, rparen, .space); // )
} else {
@@ -1192,14 +1188,14 @@ fn renderVarDeclWithoutFixups(
}
}
- if (var_decl.ast.addrspace_node != 0) {
- const lparen = tree.firstToken(var_decl.ast.addrspace_node) - 1;
+ if (var_decl.ast.addrspace_node.unwrap()) |addrspace_node| {
+ const lparen = tree.firstToken(addrspace_node) - 1;
const addrspace_kw = lparen - 1;
- const rparen = tree.lastToken(var_decl.ast.addrspace_node) + 1;
+ const rparen = tree.lastToken(addrspace_node) + 1;
try renderToken(r, addrspace_kw, Space.none); // addrspace
try renderToken(r, lparen, Space.none); // (
- try renderExpression(r, var_decl.ast.addrspace_node, Space.none);
- if (var_decl.ast.section_node != 0 or var_decl.ast.init_node != 0) {
+ try renderExpression(r, addrspace_node, Space.none);
+ if (var_decl.ast.section_node != .none or var_decl.ast.init_node != .none) {
try renderToken(r, rparen, .space); // )
} else {
try renderToken(r, rparen, .none); // )
@@ -1207,27 +1203,27 @@ fn renderVarDeclWithoutFixups(
}
}
- if (var_decl.ast.section_node != 0) {
- const lparen = tree.firstToken(var_decl.ast.section_node) - 1;
+ if (var_decl.ast.section_node.unwrap()) |section_node| {
+ const lparen = tree.firstToken(section_node) - 1;
const section_kw = lparen - 1;
- const rparen = tree.lastToken(var_decl.ast.section_node) + 1;
+ const rparen = tree.lastToken(section_node) + 1;
try renderToken(r, section_kw, Space.none); // linksection
try renderToken(r, lparen, Space.none); // (
- try renderExpression(r, var_decl.ast.section_node, Space.none);
- if (var_decl.ast.init_node != 0) {
+ try renderExpression(r, section_node, Space.none);
+ if (var_decl.ast.init_node != .none) {
try renderToken(r, rparen, .space); // )
} else {
return renderToken(r, rparen, space); // )
}
}
- assert(var_decl.ast.init_node != 0);
+ const init_node = var_decl.ast.init_node.unwrap().?;
- const eq_token = tree.firstToken(var_decl.ast.init_node) - 1;
+ const eq_token = tree.firstToken(init_node) - 1;
const eq_space: Space = if (tree.tokensOnSameLine(eq_token, eq_token + 1)) .space else .newline;
try ais.pushIndent(.after_equals);
try renderToken(r, eq_token, eq_space); // =
- try renderExpression(r, var_decl.ast.init_node, space); // ;
+ try renderExpression(r, init_node, space); // ;
ais.popIndent();
}
@@ -1236,7 +1232,7 @@ fn renderIf(r: *Render, if_node: Ast.full.If, space: Space) Error!void {
.ast = .{
.while_token = if_node.ast.if_token,
.cond_expr = if_node.ast.cond_expr,
- .cont_expr = 0,
+ .cont_expr = .none,
.then_expr = if_node.ast.then_expr,
.else_expr = if_node.ast.else_expr,
},
@@ -1252,7 +1248,6 @@ fn renderIf(r: *Render, if_node: Ast.full.If, space: Space) Error!void {
/// respective values set to null.
fn renderWhile(r: *Render, while_node: Ast.full.While, space: Space) Error!void {
const tree = r.tree;
- const token_tags = tree.tokens.items(.tag);
if (while_node.label_token) |label| {
try renderIdentifier(r, label, .none, .eagerly_unquote); // label
@@ -1273,7 +1268,7 @@ fn renderWhile(r: *Render, while_node: Ast.full.While, space: Space) Error!void
try renderToken(r, last_prefix_token, .space);
try renderToken(r, payload_token - 1, .none); // |
const ident = blk: {
- if (token_tags[payload_token] == .asterisk) {
+ if (tree.tokenTag(payload_token) == .asterisk) {
try renderToken(r, payload_token, .none); // *
break :blk payload_token + 1;
} else {
@@ -1282,7 +1277,7 @@ fn renderWhile(r: *Render, while_node: Ast.full.While, space: Space) Error!void
};
try renderIdentifier(r, ident, .none, .preserve_when_shadowing); // identifier
const pipe = blk: {
- if (token_tags[ident + 1] == .comma) {
+ if (tree.tokenTag(ident + 1) == .comma) {
try renderToken(r, ident + 1, .space); // ,
try renderIdentifier(r, ident + 2, .none, .preserve_when_shadowing); // index
break :blk ident + 3;
@@ -1293,13 +1288,13 @@ fn renderWhile(r: *Render, while_node: Ast.full.While, space: Space) Error!void
last_prefix_token = pipe;
}
- if (while_node.ast.cont_expr != 0) {
+ if (while_node.ast.cont_expr.unwrap()) |cont_expr| {
try renderToken(r, last_prefix_token, .space);
- const lparen = tree.firstToken(while_node.ast.cont_expr) - 1;
+ const lparen = tree.firstToken(cont_expr) - 1;
try renderToken(r, lparen - 1, .space); // :
try renderToken(r, lparen, .none); // lparen
- try renderExpression(r, while_node.ast.cont_expr, .none);
- last_prefix_token = tree.lastToken(while_node.ast.cont_expr) + 1; // rparen
+ try renderExpression(r, cont_expr, .none);
+ last_prefix_token = tree.lastToken(cont_expr) + 1; // rparen
}
try renderThenElse(
@@ -1317,15 +1312,14 @@ fn renderThenElse(
r: *Render,
last_prefix_token: Ast.TokenIndex,
then_expr: Ast.Node.Index,
- else_token: Ast.TokenIndex,
+ else_token: ?Ast.TokenIndex,
maybe_error_token: ?Ast.TokenIndex,
- else_expr: Ast.Node.Index,
+ opt_else_expr: Ast.Node.OptionalIndex,
space: Space,
) Error!void {
const tree = r.tree;
const ais = r.ais;
- const node_tags = tree.nodes.items(.tag);
- const then_expr_is_block = nodeIsBlock(node_tags[then_expr]);
+ const then_expr_is_block = nodeIsBlock(tree.nodeTag(then_expr));
const indent_then_expr = !then_expr_is_block and
!tree.tokensOnSameLine(last_prefix_token, tree.firstToken(then_expr));
@@ -1341,7 +1335,7 @@ fn renderThenElse(
try renderToken(r, last_prefix_token, .space);
}
- if (else_expr != 0) {
+ if (opt_else_expr.unwrap()) |else_expr| {
if (indent_then_expr) {
try renderExpression(r, then_expr, .newline);
} else {
@@ -1350,18 +1344,18 @@ fn renderThenElse(
if (indent_then_expr) ais.popIndent();
- var last_else_token = else_token;
+ var last_else_token = else_token.?;
if (maybe_error_token) |error_token| {
- try renderToken(r, else_token, .space); // else
+ try renderToken(r, last_else_token, .space); // else
try renderToken(r, error_token - 1, .none); // |
try renderIdentifier(r, error_token, .none, .preserve_when_shadowing); // identifier
last_else_token = error_token + 1; // |
}
const indent_else_expr = indent_then_expr and
- !nodeIsBlock(node_tags[else_expr]) and
- !nodeIsIfForWhileSwitch(node_tags[else_expr]);
+ !nodeIsBlock(tree.nodeTag(else_expr)) and
+ !nodeIsIfForWhileSwitch(tree.nodeTag(else_expr));
if (indent_else_expr) {
try ais.pushIndent(.normal);
try renderToken(r, last_else_token, .newline);
@@ -1398,21 +1392,21 @@ fn renderFor(r: *Render, for_node: Ast.full.For, space: Space) Error!void {
var cur = for_node.payload_token;
const pipe = std.mem.indexOfScalarPos(std.zig.Token.Tag, token_tags, cur, .pipe).?;
- if (token_tags[pipe - 1] == .comma) {
+ if (tree.tokenTag(@intCast(pipe - 1)) == .comma) {
try ais.pushIndent(.normal);
try renderToken(r, cur - 1, .newline); // |
while (true) {
- if (token_tags[cur] == .asterisk) {
+ if (tree.tokenTag(cur) == .asterisk) {
try renderToken(r, cur, .none); // *
cur += 1;
}
try renderIdentifier(r, cur, .none, .preserve_when_shadowing); // identifier
cur += 1;
- if (token_tags[cur] == .comma) {
+ if (tree.tokenTag(cur) == .comma) {
try renderToken(r, cur, .newline); // ,
cur += 1;
}
- if (token_tags[cur] == .pipe) {
+ if (tree.tokenTag(cur) == .pipe) {
break;
}
}
@@ -1420,17 +1414,17 @@ fn renderFor(r: *Render, for_node: Ast.full.For, space: Space) Error!void {
} else {
try renderToken(r, cur - 1, .none); // |
while (true) {
- if (token_tags[cur] == .asterisk) {
+ if (tree.tokenTag(cur) == .asterisk) {
try renderToken(r, cur, .none); // *
cur += 1;
}
try renderIdentifier(r, cur, .none, .preserve_when_shadowing); // identifier
cur += 1;
- if (token_tags[cur] == .comma) {
+ if (tree.tokenTag(cur) == .comma) {
try renderToken(r, cur, .space); // ,
cur += 1;
}
- if (token_tags[cur] == .pipe) {
+ if (tree.tokenTag(cur) == .pipe) {
break;
}
}
@@ -1456,7 +1450,7 @@ fn renderContainerField(
const tree = r.tree;
const ais = r.ais;
var field = field_param;
- if (container != .tuple) field.convertToNonTupleLike(tree.nodes);
+ if (container != .tuple) field.convertToNonTupleLike(&tree);
const quote: QuoteBehavior = switch (container) {
.@"enum" => .eagerly_unquote_except_underscore,
.tuple, .other => .eagerly_unquote,
@@ -1465,67 +1459,74 @@ fn renderContainerField(
if (field.comptime_token) |t| {
try renderToken(r, t, .space); // comptime
}
- if (field.ast.type_expr == 0 and field.ast.value_expr == 0) {
- if (field.ast.align_expr != 0) {
+ if (field.ast.type_expr == .none and field.ast.value_expr == .none) {
+ if (field.ast.align_expr.unwrap()) |align_expr| {
try renderIdentifier(r, field.ast.main_token, .space, quote); // name
- const lparen_token = tree.firstToken(field.ast.align_expr) - 1;
+ const lparen_token = tree.firstToken(align_expr) - 1;
const align_kw = lparen_token - 1;
- const rparen_token = tree.lastToken(field.ast.align_expr) + 1;
+ const rparen_token = tree.lastToken(align_expr) + 1;
try renderToken(r, align_kw, .none); // align
try renderToken(r, lparen_token, .none); // (
- try renderExpression(r, field.ast.align_expr, .none); // alignment
+ try renderExpression(r, align_expr, .none); // alignment
return renderToken(r, rparen_token, .space); // )
}
return renderIdentifierComma(r, field.ast.main_token, space, quote); // name
}
- if (field.ast.type_expr != 0 and field.ast.value_expr == 0) {
+ if (field.ast.type_expr != .none and field.ast.value_expr == .none) {
+ const type_expr = field.ast.type_expr.unwrap().?;
if (!field.ast.tuple_like) {
try renderIdentifier(r, field.ast.main_token, .none, quote); // name
try renderToken(r, field.ast.main_token + 1, .space); // :
}
- if (field.ast.align_expr != 0) {
- try renderExpression(r, field.ast.type_expr, .space); // type
- const align_token = tree.firstToken(field.ast.align_expr) - 2;
+ if (field.ast.align_expr.unwrap()) |align_expr| {
+ try renderExpression(r, type_expr, .space); // type
+ const align_token = tree.firstToken(align_expr) - 2;
try renderToken(r, align_token, .none); // align
try renderToken(r, align_token + 1, .none); // (
- try renderExpression(r, field.ast.align_expr, .none); // alignment
- const rparen = tree.lastToken(field.ast.align_expr) + 1;
+ try renderExpression(r, align_expr, .none); // alignment
+ const rparen = tree.lastToken(align_expr) + 1;
return renderTokenComma(r, rparen, space); // )
} else {
- return renderExpressionComma(r, field.ast.type_expr, space); // type
+ return renderExpressionComma(r, type_expr, space); // type
}
}
- if (field.ast.type_expr == 0 and field.ast.value_expr != 0) {
+ if (field.ast.type_expr == .none and field.ast.value_expr != .none) {
+ const value_expr = field.ast.value_expr.unwrap().?;
+
try renderIdentifier(r, field.ast.main_token, .space, quote); // name
- if (field.ast.align_expr != 0) {
- const lparen_token = tree.firstToken(field.ast.align_expr) - 1;
+ if (field.ast.align_expr.unwrap()) |align_expr| {
+ const lparen_token = tree.firstToken(align_expr) - 1;
const align_kw = lparen_token - 1;
- const rparen_token = tree.lastToken(field.ast.align_expr) + 1;
+ const rparen_token = tree.lastToken(align_expr) + 1;
try renderToken(r, align_kw, .none); // align
try renderToken(r, lparen_token, .none); // (
- try renderExpression(r, field.ast.align_expr, .none); // alignment
+ try renderExpression(r, align_expr, .none); // alignment
try renderToken(r, rparen_token, .space); // )
}
try renderToken(r, field.ast.main_token + 1, .space); // =
- return renderExpressionComma(r, field.ast.value_expr, space); // value
+ return renderExpressionComma(r, value_expr, space); // value
}
if (!field.ast.tuple_like) {
try renderIdentifier(r, field.ast.main_token, .none, quote); // name
try renderToken(r, field.ast.main_token + 1, .space); // :
}
- try renderExpression(r, field.ast.type_expr, .space); // type
- if (field.ast.align_expr != 0) {
- const lparen_token = tree.firstToken(field.ast.align_expr) - 1;
+ const type_expr = field.ast.type_expr.unwrap().?;
+ const value_expr = field.ast.value_expr.unwrap().?;
+
+ try renderExpression(r, type_expr, .space); // type
+
+ if (field.ast.align_expr.unwrap()) |align_expr| {
+ const lparen_token = tree.firstToken(align_expr) - 1;
const align_kw = lparen_token - 1;
- const rparen_token = tree.lastToken(field.ast.align_expr) + 1;
+ const rparen_token = tree.lastToken(align_expr) + 1;
try renderToken(r, align_kw, .none); // align
try renderToken(r, lparen_token, .none); // (
- try renderExpression(r, field.ast.align_expr, .none); // alignment
+ try renderExpression(r, align_expr, .none); // alignment
try renderToken(r, rparen_token, .space); // )
}
- const eq_token = tree.firstToken(field.ast.value_expr) - 1;
+ const eq_token = tree.firstToken(value_expr) - 1;
const eq_space: Space = if (tree.tokensOnSameLine(eq_token, eq_token + 1)) .space else .newline;
try ais.pushIndent(.after_equals);
@@ -1533,19 +1534,18 @@ fn renderContainerField(
if (eq_space == .space) {
ais.popIndent();
- try renderExpressionComma(r, field.ast.value_expr, space); // value
+ try renderExpressionComma(r, value_expr, space); // value
return;
}
- const token_tags = tree.tokens.items(.tag);
- const maybe_comma = tree.lastToken(field.ast.value_expr) + 1;
+ const maybe_comma = tree.lastToken(value_expr) + 1;
- if (token_tags[maybe_comma] == .comma) {
- try renderExpression(r, field.ast.value_expr, .none); // value
+ if (tree.tokenTag(maybe_comma) == .comma) {
+ try renderExpression(r, value_expr, .none); // value
ais.popIndent();
try renderToken(r, maybe_comma, .newline);
} else {
- try renderExpression(r, field.ast.value_expr, space); // value
+ try renderExpression(r, value_expr, space); // value
ais.popIndent();
}
}
@@ -1558,8 +1558,6 @@ fn renderBuiltinCall(
) Error!void {
const tree = r.tree;
const ais = r.ais;
- const token_tags = tree.tokens.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
try renderToken(r, builtin_token, .none); // @name
@@ -1572,8 +1570,8 @@ fn renderBuiltinCall(
const slice = tree.tokenSlice(builtin_token);
if (mem.eql(u8, slice, "@import")) f: {
const param = params[0];
- const str_lit_token = main_tokens[param];
- assert(token_tags[str_lit_token] == .string_literal);
+ const str_lit_token = tree.nodeMainToken(param);
+ assert(tree.tokenTag(str_lit_token) == .string_literal);
const token_bytes = tree.tokenSlice(str_lit_token);
const imported_string = std.zig.string_literal.parseAlloc(r.gpa, token_bytes) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
@@ -1592,13 +1590,13 @@ fn renderBuiltinCall(
const last_param = params[params.len - 1];
const after_last_param_token = tree.lastToken(last_param) + 1;
- if (token_tags[after_last_param_token] != .comma) {
+ if (tree.tokenTag(after_last_param_token) != .comma) {
// Render all on one line, no trailing comma.
try renderToken(r, builtin_token + 1, .none); // (
for (params, 0..) |param_node, i| {
const first_param_token = tree.firstToken(param_node);
- if (token_tags[first_param_token] == .multiline_string_literal_line or
+ if (tree.tokenTag(first_param_token) == .multiline_string_literal_line or
hasSameLineComment(tree, first_param_token - 1))
{
try ais.pushIndent(.normal);
@@ -1633,11 +1631,9 @@ fn renderBuiltinCall(
fn renderFnProto(r: *Render, fn_proto: Ast.full.FnProto, space: Space) Error!void {
const tree = r.tree;
const ais = r.ais;
- const token_tags = tree.tokens.items(.tag);
- const token_starts = tree.tokens.items(.start);
const after_fn_token = fn_proto.ast.fn_token + 1;
- const lparen = if (token_tags[after_fn_token] == .identifier) blk: {
+ const lparen = if (tree.tokenTag(after_fn_token) == .identifier) blk: {
try renderToken(r, fn_proto.ast.fn_token, .space); // fn
try renderIdentifier(r, after_fn_token, .none, .preserve_when_shadowing); // name
break :blk after_fn_token + 1;
@@ -1645,41 +1641,42 @@ fn renderFnProto(r: *Render, fn_proto: Ast.full.FnProto, space: Space) Error!voi
try renderToken(r, fn_proto.ast.fn_token, .space); // fn
break :blk fn_proto.ast.fn_token + 1;
};
- assert(token_tags[lparen] == .l_paren);
+ assert(tree.tokenTag(lparen) == .l_paren);
- const maybe_bang = tree.firstToken(fn_proto.ast.return_type) - 1;
+ const return_type = fn_proto.ast.return_type.unwrap().?;
+ const maybe_bang = tree.firstToken(return_type) - 1;
const rparen = blk: {
// These may appear in any order, so we have to check the token_starts array
// to find out which is first.
- var rparen = if (token_tags[maybe_bang] == .bang) maybe_bang - 1 else maybe_bang;
- var smallest_start = token_starts[maybe_bang];
- if (fn_proto.ast.align_expr != 0) {
- const tok = tree.firstToken(fn_proto.ast.align_expr) - 3;
- const start = token_starts[tok];
+ var rparen = if (tree.tokenTag(maybe_bang) == .bang) maybe_bang - 1 else maybe_bang;
+ var smallest_start = tree.tokenStart(maybe_bang);
+ if (fn_proto.ast.align_expr.unwrap()) |align_expr| {
+ const tok = tree.firstToken(align_expr) - 3;
+ const start = tree.tokenStart(tok);
if (start < smallest_start) {
rparen = tok;
smallest_start = start;
}
}
- if (fn_proto.ast.addrspace_expr != 0) {
- const tok = tree.firstToken(fn_proto.ast.addrspace_expr) - 3;
- const start = token_starts[tok];
+ if (fn_proto.ast.addrspace_expr.unwrap()) |addrspace_expr| {
+ const tok = tree.firstToken(addrspace_expr) - 3;
+ const start = tree.tokenStart(tok);
if (start < smallest_start) {
rparen = tok;
smallest_start = start;
}
}
- if (fn_proto.ast.section_expr != 0) {
- const tok = tree.firstToken(fn_proto.ast.section_expr) - 3;
- const start = token_starts[tok];
+ if (fn_proto.ast.section_expr.unwrap()) |section_expr| {
+ const tok = tree.firstToken(section_expr) - 3;
+ const start = tree.tokenStart(tok);
if (start < smallest_start) {
rparen = tok;
smallest_start = start;
}
}
- if (fn_proto.ast.callconv_expr != 0) {
- const tok = tree.firstToken(fn_proto.ast.callconv_expr) - 3;
- const start = token_starts[tok];
+ if (fn_proto.ast.callconv_expr.unwrap()) |callconv_expr| {
+ const tok = tree.firstToken(callconv_expr) - 3;
+ const start = tree.tokenStart(tok);
if (start < smallest_start) {
rparen = tok;
smallest_start = start;
@@ -1687,11 +1684,11 @@ fn renderFnProto(r: *Render, fn_proto: Ast.full.FnProto, space: Space) Error!voi
}
break :blk rparen;
};
- assert(token_tags[rparen] == .r_paren);
+ assert(tree.tokenTag(rparen) == .r_paren);
// The params list is a sparse set that does *not* include anytype or ... parameters.
- const trailing_comma = token_tags[rparen - 1] == .comma;
+ const trailing_comma = tree.tokenTag(rparen - 1) == .comma;
if (!trailing_comma and !hasComment(tree, lparen, rparen)) {
// Render all on one line, no trailing comma.
try renderToken(r, lparen, .none); // (
@@ -1700,7 +1697,7 @@ fn renderFnProto(r: *Render, fn_proto: Ast.full.FnProto, space: Space) Error!voi
var last_param_token = lparen;
while (true) {
last_param_token += 1;
- switch (token_tags[last_param_token]) {
+ switch (tree.tokenTag(last_param_token)) {
.doc_comment => {
try renderToken(r, last_param_token, .newline);
continue;
@@ -1725,15 +1722,15 @@ fn renderFnProto(r: *Render, fn_proto: Ast.full.FnProto, space: Space) Error!voi
},
else => {}, // Parameter type without a name.
}
- if (token_tags[last_param_token] == .identifier and
- token_tags[last_param_token + 1] == .colon)
+ if (tree.tokenTag(last_param_token) == .identifier and
+ tree.tokenTag(last_param_token + 1) == .colon)
{
try renderIdentifier(r, last_param_token, .none, .preserve_when_shadowing); // name
- last_param_token += 1;
+ last_param_token = last_param_token + 1;
try renderToken(r, last_param_token, .space); // :
last_param_token += 1;
}
- if (token_tags[last_param_token] == .keyword_anytype) {
+ if (tree.tokenTag(last_param_token) == .keyword_anytype) {
try renderToken(r, last_param_token, .none); // anytype
continue;
}
@@ -1751,7 +1748,7 @@ fn renderFnProto(r: *Render, fn_proto: Ast.full.FnProto, space: Space) Error!voi
var last_param_token = lparen;
while (true) {
last_param_token += 1;
- switch (token_tags[last_param_token]) {
+ switch (tree.tokenTag(last_param_token)) {
.doc_comment => {
try renderToken(r, last_param_token, .newline);
continue;
@@ -1767,24 +1764,24 @@ fn renderFnProto(r: *Render, fn_proto: Ast.full.FnProto, space: Space) Error!voi
.identifier => {},
.keyword_anytype => {
try renderToken(r, last_param_token, .comma); // anytype
- if (token_tags[last_param_token + 1] == .comma)
+ if (tree.tokenTag(last_param_token + 1) == .comma)
last_param_token += 1;
continue;
},
.r_paren => break,
else => {}, // Parameter type without a name.
}
- if (token_tags[last_param_token] == .identifier and
- token_tags[last_param_token + 1] == .colon)
+ if (tree.tokenTag(last_param_token) == .identifier and
+ tree.tokenTag(last_param_token + 1) == .colon)
{
try renderIdentifier(r, last_param_token, .none, .preserve_when_shadowing); // name
last_param_token += 1;
try renderToken(r, last_param_token, .space); // :
last_param_token += 1;
}
- if (token_tags[last_param_token] == .keyword_anytype) {
+ if (tree.tokenTag(last_param_token) == .keyword_anytype) {
try renderToken(r, last_param_token, .comma); // anytype
- if (token_tags[last_param_token + 1] == .comma)
+ if (tree.tokenTag(last_param_token + 1) == .comma)
last_param_token += 1;
continue;
}
@@ -1794,60 +1791,62 @@ fn renderFnProto(r: *Render, fn_proto: Ast.full.FnProto, space: Space) Error!voi
try renderExpression(r, param, .comma);
ais.popSpace();
last_param_token = tree.lastToken(param);
- if (token_tags[last_param_token + 1] == .comma) last_param_token += 1;
+ if (tree.tokenTag(last_param_token + 1) == .comma) last_param_token += 1;
}
ais.popIndent();
}
try renderToken(r, rparen, .space); // )
- if (fn_proto.ast.align_expr != 0) {
- const align_lparen = tree.firstToken(fn_proto.ast.align_expr) - 1;
- const align_rparen = tree.lastToken(fn_proto.ast.align_expr) + 1;
+ if (fn_proto.ast.align_expr.unwrap()) |align_expr| {
+ const align_lparen = tree.firstToken(align_expr) - 1;
+ const align_rparen = tree.lastToken(align_expr) + 1;
try renderToken(r, align_lparen - 1, .none); // align
try renderToken(r, align_lparen, .none); // (
- try renderExpression(r, fn_proto.ast.align_expr, .none);
+ try renderExpression(r, align_expr, .none);
try renderToken(r, align_rparen, .space); // )
}
- if (fn_proto.ast.addrspace_expr != 0) {
- const align_lparen = tree.firstToken(fn_proto.ast.addrspace_expr) - 1;
- const align_rparen = tree.lastToken(fn_proto.ast.addrspace_expr) + 1;
+ if (fn_proto.ast.addrspace_expr.unwrap()) |addrspace_expr| {
+ const align_lparen = tree.firstToken(addrspace_expr) - 1;
+ const align_rparen = tree.lastToken(addrspace_expr) + 1;
try renderToken(r, align_lparen - 1, .none); // addrspace
try renderToken(r, align_lparen, .none); // (
- try renderExpression(r, fn_proto.ast.addrspace_expr, .none);
+ try renderExpression(r, addrspace_expr, .none);
try renderToken(r, align_rparen, .space); // )
}
- if (fn_proto.ast.section_expr != 0) {
- const section_lparen = tree.firstToken(fn_proto.ast.section_expr) - 1;
- const section_rparen = tree.lastToken(fn_proto.ast.section_expr) + 1;
+ if (fn_proto.ast.section_expr.unwrap()) |section_expr| {
+ const section_lparen = tree.firstToken(section_expr) - 1;
+ const section_rparen = tree.lastToken(section_expr) + 1;
try renderToken(r, section_lparen - 1, .none); // section
try renderToken(r, section_lparen, .none); // (
- try renderExpression(r, fn_proto.ast.section_expr, .none);
+ try renderExpression(r, section_expr, .none);
try renderToken(r, section_rparen, .space); // )
}
- // Keep in sync with logic in `renderMember`. Search this file for the marker PROMOTE_CALLCONV_INLINE
- const is_callconv_inline = mem.eql(u8, "@\"inline\"", tree.tokenSlice(tree.nodes.items(.main_token)[fn_proto.ast.callconv_expr]));
- const is_declaration = fn_proto.name_token != null;
- if (fn_proto.ast.callconv_expr != 0 and !(is_declaration and is_callconv_inline)) {
- const callconv_lparen = tree.firstToken(fn_proto.ast.callconv_expr) - 1;
- const callconv_rparen = tree.lastToken(fn_proto.ast.callconv_expr) + 1;
+ if (fn_proto.ast.callconv_expr.unwrap()) |callconv_expr| {
+ // Keep in sync with logic in `renderMember`. Search this file for the marker PROMOTE_CALLCONV_INLINE
+ const is_callconv_inline = mem.eql(u8, "@\"inline\"", tree.tokenSlice(tree.nodeMainToken(callconv_expr)));
+ const is_declaration = fn_proto.name_token != null;
+ if (!(is_declaration and is_callconv_inline)) {
+ const callconv_lparen = tree.firstToken(callconv_expr) - 1;
+ const callconv_rparen = tree.lastToken(callconv_expr) + 1;
- try renderToken(r, callconv_lparen - 1, .none); // callconv
- try renderToken(r, callconv_lparen, .none); // (
- try renderExpression(r, fn_proto.ast.callconv_expr, .none);
- try renderToken(r, callconv_rparen, .space); // )
+ try renderToken(r, callconv_lparen - 1, .none); // callconv
+ try renderToken(r, callconv_lparen, .none); // (
+ try renderExpression(r, callconv_expr, .none);
+ try renderToken(r, callconv_rparen, .space); // )
+ }
}
- if (token_tags[maybe_bang] == .bang) {
+ if (tree.tokenTag(maybe_bang) == .bang) {
try renderToken(r, maybe_bang, .none); // !
}
- return renderExpression(r, fn_proto.ast.return_type, space);
+ return renderExpression(r, return_type, space);
}
fn renderSwitchCase(
@@ -1857,9 +1856,7 @@ fn renderSwitchCase(
) Error!void {
const ais = r.ais;
const tree = r.tree;
- const node_tags = tree.nodes.items(.tag);
- const token_tags = tree.tokens.items(.tag);
- const trailing_comma = token_tags[switch_case.ast.arrow_token - 1] == .comma;
+ const trailing_comma = tree.tokenTag(switch_case.ast.arrow_token - 1) == .comma;
const has_comment_before_arrow = blk: {
if (switch_case.ast.values.len == 0) break :blk false;
break :blk hasComment(tree, tree.firstToken(switch_case.ast.values[0]), switch_case.ast.arrow_token);
@@ -1886,7 +1883,7 @@ fn renderSwitchCase(
}
// Render the arrow and everything after it
- const pre_target_space = if (node_tags[switch_case.ast.target_expr] == .multiline_string_literal)
+ const pre_target_space = if (tree.nodeTag(switch_case.ast.target_expr) == .multiline_string_literal)
// Newline gets inserted when rendering the target expr.
Space.none
else
@@ -1896,12 +1893,12 @@ fn renderSwitchCase(
if (switch_case.payload_token) |payload_token| {
try renderToken(r, payload_token - 1, .none); // pipe
- const ident = payload_token + @intFromBool(token_tags[payload_token] == .asterisk);
- if (token_tags[payload_token] == .asterisk) {
+ const ident = payload_token + @intFromBool(tree.tokenTag(payload_token) == .asterisk);
+ if (tree.tokenTag(payload_token) == .asterisk) {
try renderToken(r, payload_token, .none); // asterisk
}
try renderIdentifier(r, ident, .none, .preserve_when_shadowing); // identifier
- if (token_tags[ident + 1] == .comma) {
+ if (tree.tokenTag(ident + 1) == .comma) {
try renderToken(r, ident + 1, .space); // ,
try renderIdentifier(r, ident + 2, .none, .preserve_when_shadowing); // identifier
try renderToken(r, ident + 3, pre_target_space); // pipe
@@ -1921,12 +1918,9 @@ fn renderBlock(
) Error!void {
const tree = r.tree;
const ais = r.ais;
- const token_tags = tree.tokens.items(.tag);
- const lbrace = tree.nodes.items(.main_token)[block_node];
+ const lbrace = tree.nodeMainToken(block_node);
- if (token_tags[lbrace - 1] == .colon and
- token_tags[lbrace - 2] == .identifier)
- {
+ if (tree.isTokenPrecededByTags(lbrace, &.{ .identifier, .colon })) {
try renderIdentifier(r, lbrace - 2, .none, .eagerly_unquote); // identifier
try renderToken(r, lbrace - 1, .space); // :
}
@@ -1948,13 +1942,12 @@ fn finishRenderBlock(
space: Space,
) Error!void {
const tree = r.tree;
- const node_tags = tree.nodes.items(.tag);
const ais = r.ais;
for (statements, 0..) |stmt, i| {
if (i != 0) try renderExtraNewline(r, stmt);
if (r.fixups.omit_nodes.contains(stmt)) continue;
try ais.pushSpace(.semicolon);
- switch (node_tags[stmt]) {
+ switch (tree.nodeTag(stmt)) {
.global_var_decl,
.local_var_decl,
.simple_var_decl,
@@ -1978,12 +1971,13 @@ fn renderStructInit(
) Error!void {
const tree = r.tree;
const ais = r.ais;
- const token_tags = tree.tokens.items(.tag);
- if (struct_init.ast.type_expr == 0) {
- try renderToken(r, struct_init.ast.lbrace - 1, .none); // .
+
+ if (struct_init.ast.type_expr.unwrap()) |type_expr| {
+ try renderExpression(r, type_expr, .none); // T
} else {
- try renderExpression(r, struct_init.ast.type_expr, .none); // T
+ try renderToken(r, struct_init.ast.lbrace - 1, .none); // .
}
+
if (struct_init.ast.fields.len == 0) {
try ais.pushIndent(.normal);
try renderToken(r, struct_init.ast.lbrace, .none); // lbrace
@@ -1992,7 +1986,7 @@ fn renderStructInit(
}
const rbrace = tree.lastToken(struct_node);
- const trailing_comma = token_tags[rbrace - 1] == .comma;
+ const trailing_comma = tree.tokenTag(rbrace - 1) == .comma;
if (trailing_comma or hasComment(tree, struct_init.ast.lbrace, rbrace)) {
// Render one field init per line.
try ais.pushIndent(.normal);
@@ -2002,9 +1996,8 @@ fn renderStructInit(
try renderIdentifier(r, struct_init.ast.lbrace + 2, .space, .eagerly_unquote); // name
// Don't output a space after the = if expression is a multiline string,
// since then it will start on the next line.
- const nodes = tree.nodes.items(.tag);
const field_node = struct_init.ast.fields[0];
- const expr = nodes[field_node];
+ const expr = tree.nodeTag(field_node);
var space_after_equal: Space = if (expr == .multiline_string_literal) .none else .space;
try renderToken(r, struct_init.ast.lbrace + 3, space_after_equal); // =
@@ -2017,7 +2010,7 @@ fn renderStructInit(
try renderExtraNewlineToken(r, init_token - 3);
try renderToken(r, init_token - 3, .none); // .
try renderIdentifier(r, init_token - 2, .space, .eagerly_unquote); // name
- space_after_equal = if (nodes[field_init] == .multiline_string_literal) .none else .space;
+ space_after_equal = if (tree.nodeTag(field_init) == .multiline_string_literal) .none else .space;
try renderToken(r, init_token - 1, space_after_equal); // =
try ais.pushSpace(.comma);
@@ -2050,12 +2043,11 @@ fn renderArrayInit(
const tree = r.tree;
const ais = r.ais;
const gpa = r.gpa;
- const token_tags = tree.tokens.items(.tag);
- if (array_init.ast.type_expr == 0) {
- try renderToken(r, array_init.ast.lbrace - 1, .none); // .
+ if (array_init.ast.type_expr.unwrap()) |type_expr| {
+ try renderExpression(r, type_expr, .none); // T
} else {
- try renderExpression(r, array_init.ast.type_expr, .none); // T
+ try renderToken(r, array_init.ast.lbrace - 1, .none); // .
}
if (array_init.ast.elements.len == 0) {
@@ -2067,14 +2059,14 @@ fn renderArrayInit(
const last_elem = array_init.ast.elements[array_init.ast.elements.len - 1];
const last_elem_token = tree.lastToken(last_elem);
- const trailing_comma = token_tags[last_elem_token + 1] == .comma;
+ const trailing_comma = tree.tokenTag(last_elem_token + 1) == .comma;
const rbrace = if (trailing_comma) last_elem_token + 2 else last_elem_token + 1;
- assert(token_tags[rbrace] == .r_brace);
+ assert(tree.tokenTag(rbrace) == .r_brace);
if (array_init.ast.elements.len == 1) {
const only_elem = array_init.ast.elements[0];
const first_token = tree.firstToken(only_elem);
- if (token_tags[first_token] != .multiline_string_literal_line and
+ if (tree.tokenTag(first_token) != .multiline_string_literal_line and
!anythingBetween(tree, last_elem_token, rbrace))
{
try renderToken(r, array_init.ast.lbrace, .none);
@@ -2137,7 +2129,7 @@ fn renderArrayInit(
}
const maybe_comma = expr_last_token + 1;
- if (token_tags[maybe_comma] == .comma) {
+ if (tree.tokenTag(maybe_comma) == .comma) {
if (hasSameLineComment(tree, maybe_comma))
break :sec_end i - this_line_size + 1;
}
@@ -2277,13 +2269,12 @@ fn renderContainerDecl(
) Error!void {
const tree = r.tree;
const ais = r.ais;
- const token_tags = tree.tokens.items(.tag);
if (container_decl.layout_token) |layout_token| {
try renderToken(r, layout_token, .space);
}
- const container: Container = switch (token_tags[container_decl.ast.main_token]) {
+ const container: Container = switch (tree.tokenTag(container_decl.ast.main_token)) {
.keyword_enum => .@"enum",
.keyword_struct => for (container_decl.ast.members) |member| {
if (tree.fullContainerField(member)) |field| if (!field.ast.tuple_like) break .other;
@@ -2296,10 +2287,10 @@ fn renderContainerDecl(
try renderToken(r, container_decl.ast.main_token, .none); // union
try renderToken(r, enum_token - 1, .none); // lparen
try renderToken(r, enum_token, .none); // enum
- if (container_decl.ast.arg != 0) {
+ if (container_decl.ast.arg.unwrap()) |arg| {
try renderToken(r, enum_token + 1, .none); // lparen
- try renderExpression(r, container_decl.ast.arg, .none);
- const rparen = tree.lastToken(container_decl.ast.arg) + 1;
+ try renderExpression(r, arg, .none);
+ const rparen = tree.lastToken(arg) + 1;
try renderToken(r, rparen, .none); // rparen
try renderToken(r, rparen + 1, .space); // rparen
lbrace = rparen + 2;
@@ -2307,11 +2298,11 @@ fn renderContainerDecl(
try renderToken(r, enum_token + 1, .space); // rparen
lbrace = enum_token + 2;
}
- } else if (container_decl.ast.arg != 0) {
+ } else if (container_decl.ast.arg.unwrap()) |arg| {
try renderToken(r, container_decl.ast.main_token, .none); // union
try renderToken(r, container_decl.ast.main_token + 1, .none); // lparen
- try renderExpression(r, container_decl.ast.arg, .none);
- const rparen = tree.lastToken(container_decl.ast.arg) + 1;
+ try renderExpression(r, arg, .none);
+ const rparen = tree.lastToken(arg) + 1;
try renderToken(r, rparen, .space); // rparen
lbrace = rparen + 1;
} else {
@@ -2320,9 +2311,10 @@ fn renderContainerDecl(
}
const rbrace = tree.lastToken(container_decl_node);
+
if (container_decl.ast.members.len == 0) {
try ais.pushIndent(.normal);
- if (token_tags[lbrace + 1] == .container_doc_comment) {
+ if (tree.tokenTag(lbrace + 1) == .container_doc_comment) {
try renderToken(r, lbrace, .newline); // lbrace
try renderContainerDocComments(r, lbrace + 1);
} else {
@@ -2332,7 +2324,7 @@ fn renderContainerDecl(
return renderToken(r, rbrace, space); // rbrace
}
- const src_has_trailing_comma = token_tags[rbrace - 1] == .comma;
+ const src_has_trailing_comma = tree.tokenTag(rbrace - 1) == .comma;
if (!src_has_trailing_comma) one_line: {
// We print all the members in-line unless one of the following conditions are true:
@@ -2342,10 +2334,10 @@ fn renderContainerDecl(
}
// 2. The container has a container comment.
- if (token_tags[lbrace + 1] == .container_doc_comment) break :one_line;
+ if (tree.tokenTag(lbrace + 1) == .container_doc_comment) break :one_line;
// 3. A member of the container has a doc comment.
- for (token_tags[lbrace + 1 .. rbrace - 1]) |tag| {
+ for (tree.tokens.items(.tag)[lbrace + 1 .. rbrace - 1]) |tag| {
if (tag == .doc_comment) break :one_line;
}
@@ -2365,12 +2357,12 @@ fn renderContainerDecl(
// One member per line.
try ais.pushIndent(.normal);
try renderToken(r, lbrace, .newline); // lbrace
- if (token_tags[lbrace + 1] == .container_doc_comment) {
+ if (tree.tokenTag(lbrace + 1) == .container_doc_comment) {
try renderContainerDocComments(r, lbrace + 1);
}
for (container_decl.ast.members, 0..) |member, i| {
if (i != 0) try renderExtraNewline(r, member);
- switch (tree.nodes.items(.tag)[member]) {
+ switch (tree.nodeTag(member)) {
// For container fields, ensure a trailing comma is added if necessary.
.container_field_init,
.container_field_align,
@@ -2396,7 +2388,6 @@ fn renderAsm(
) Error!void {
const tree = r.tree;
const ais = r.ais;
- const token_tags = tree.tokens.items(.tag);
try renderToken(r, asm_node.ast.asm_token, .space); // asm
@@ -2422,13 +2413,13 @@ fn renderAsm(
while (true) : (tok_i += 1) {
try renderToken(r, tok_i, .none);
tok_i += 1;
- switch (token_tags[tok_i]) {
+ switch (tree.tokenTag(tok_i)) {
.r_paren => {
ais.popIndent();
return renderToken(r, tok_i, space);
},
.comma => {
- if (token_tags[tok_i + 1] == .r_paren) {
+ if (tree.tokenTag(tok_i + 1) == .r_paren) {
ais.popIndent();
return renderToken(r, tok_i + 1, space);
} else {
@@ -2480,7 +2471,7 @@ fn renderAsm(
ais.popSpace();
const comma_or_colon = tree.lastToken(asm_output) + 1;
ais.popIndent();
- break :colon2 switch (token_tags[comma_or_colon]) {
+ break :colon2 switch (tree.tokenTag(comma_or_colon)) {
.comma => comma_or_colon + 1,
else => comma_or_colon,
};
@@ -2516,7 +2507,7 @@ fn renderAsm(
ais.popSpace();
const comma_or_colon = tree.lastToken(asm_input) + 1;
ais.popIndent();
- break :colon3 switch (token_tags[comma_or_colon]) {
+ break :colon3 switch (tree.tokenTag(comma_or_colon)) {
.comma => comma_or_colon + 1,
else => comma_or_colon,
};
@@ -2529,7 +2520,7 @@ fn renderAsm(
const first_clobber = asm_node.first_clobber.?;
var tok_i = first_clobber;
while (true) {
- switch (token_tags[tok_i + 1]) {
+ switch (tree.tokenTag(tok_i + 1)) {
.r_paren => {
ais.setIndentDelta(indent_delta);
try renderToken(r, tok_i, .newline);
@@ -2537,7 +2528,7 @@ fn renderAsm(
return renderToken(r, tok_i + 1, space);
},
.comma => {
- switch (token_tags[tok_i + 2]) {
+ switch (tree.tokenTag(tok_i + 2)) {
.r_paren => {
ais.setIndentDelta(indent_delta);
try renderToken(r, tok_i, .newline);
@@ -2576,7 +2567,6 @@ fn renderParamList(
) Error!void {
const tree = r.tree;
const ais = r.ais;
- const token_tags = tree.tokens.items(.tag);
if (params.len == 0) {
try ais.pushIndent(.normal);
@@ -2587,7 +2577,7 @@ fn renderParamList(
const last_param = params[params.len - 1];
const after_last_param_tok = tree.lastToken(last_param) + 1;
- if (token_tags[after_last_param_tok] == .comma) {
+ if (tree.tokenTag(after_last_param_tok) == .comma) {
try ais.pushIndent(.normal);
try renderToken(r, lparen, .newline); // (
for (params, 0..) |param_node, i| {
@@ -2616,7 +2606,7 @@ fn renderParamList(
if (i + 1 < params.len) {
const comma = tree.lastToken(param_node) + 1;
const next_multiline_string =
- token_tags[tree.firstToken(params[i + 1])] == .multiline_string_literal_line;
+ tree.tokenTag(tree.firstToken(params[i + 1])) == .multiline_string_literal_line;
const comma_space: Space = if (next_multiline_string) .none else .space;
try renderToken(r, comma, comma_space);
}
@@ -2629,9 +2619,8 @@ fn renderParamList(
/// If a comma is present, and `space` is `Space.comma`, render only a single comma.
fn renderExpressionComma(r: *Render, node: Ast.Node.Index, space: Space) Error!void {
const tree = r.tree;
- const token_tags = tree.tokens.items(.tag);
const maybe_comma = tree.lastToken(node) + 1;
- if (token_tags[maybe_comma] == .comma and space != .comma) {
+ if (tree.tokenTag(maybe_comma) == .comma and space != .comma) {
try renderExpression(r, node, .none);
return renderToken(r, maybe_comma, space);
} else {
@@ -2643,9 +2632,8 @@ fn renderExpressionComma(r: *Render, node: Ast.Node.Index, space: Space) Error!v
/// If a comma is present, and `space` is `Space.comma`, render only a single comma.
fn renderTokenComma(r: *Render, token: Ast.TokenIndex, space: Space) Error!void {
const tree = r.tree;
- const token_tags = tree.tokens.items(.tag);
const maybe_comma = token + 1;
- if (token_tags[maybe_comma] == .comma and space != .comma) {
+ if (tree.tokenTag(maybe_comma) == .comma and space != .comma) {
try renderToken(r, token, .none);
return renderToken(r, maybe_comma, space);
} else {
@@ -2657,9 +2645,8 @@ fn renderTokenComma(r: *Render, token: Ast.TokenIndex, space: Space) Error!void
/// If a comma is present, and `space` is `Space.comma`, render only a single comma.
fn renderIdentifierComma(r: *Render, token: Ast.TokenIndex, space: Space, quote: QuoteBehavior) Error!void {
const tree = r.tree;
- const token_tags = tree.tokens.items(.tag);
const maybe_comma = token + 1;
- if (token_tags[maybe_comma] == .comma and space != .comma) {
+ if (tree.tokenTag(maybe_comma) == .comma and space != .comma) {
try renderIdentifier(r, token, .none, quote);
return renderToken(r, maybe_comma, space);
} else {
@@ -2709,37 +2696,39 @@ fn renderTokenOverrideSpaceMode(r: *Render, token_index: Ast.TokenIndex, space:
fn renderSpace(r: *Render, token_index: Ast.TokenIndex, lexeme_len: usize, space: Space) Error!void {
const tree = r.tree;
const ais = r.ais;
- const token_tags = tree.tokens.items(.tag);
- const token_starts = tree.tokens.items(.start);
- const token_start = token_starts[token_index];
+ const next_token_tag = tree.tokenTag(token_index + 1);
if (space == .skip) return;
- if (space == .comma and token_tags[token_index + 1] != .comma) {
+ if (space == .comma and next_token_tag != .comma) {
try ais.writer().writeByte(',');
}
if (space == .semicolon or space == .comma) ais.enableSpaceMode(space);
defer ais.disableSpaceMode();
- const comment = try renderComments(r, token_start + lexeme_len, token_starts[token_index + 1]);
+ const comment = try renderComments(
+ r,
+ tree.tokenStart(token_index) + lexeme_len,
+ tree.tokenStart(token_index + 1),
+ );
switch (space) {
.none => {},
.space => if (!comment) try ais.writer().writeByte(' '),
.newline => if (!comment) try ais.insertNewline(),
- .comma => if (token_tags[token_index + 1] == .comma) {
+ .comma => if (next_token_tag == .comma) {
try renderToken(r, token_index + 1, .newline);
} else if (!comment) {
try ais.insertNewline();
},
- .comma_space => if (token_tags[token_index + 1] == .comma) {
+ .comma_space => if (next_token_tag == .comma) {
try renderToken(r, token_index + 1, .space);
} else if (!comment) {
try ais.writer().writeByte(' ');
},
- .semicolon => if (token_tags[token_index + 1] == .semicolon) {
+ .semicolon => if (next_token_tag == .semicolon) {
try renderToken(r, token_index + 1, .newline);
} else if (!comment) {
try ais.insertNewline();
@@ -2770,8 +2759,7 @@ const QuoteBehavior = enum {
fn renderIdentifier(r: *Render, token_index: Ast.TokenIndex, space: Space, quote: QuoteBehavior) Error!void {
const tree = r.tree;
- const token_tags = tree.tokens.items(.tag);
- assert(token_tags[token_index] == .identifier);
+ assert(tree.tokenTag(token_index) == .identifier);
const lexeme = tokenSliceForRender(tree, token_index);
if (r.fixups.rename_identifiers.get(lexeme)) |mangled| {
@@ -2880,8 +2868,7 @@ fn renderIdentifier(r: *Render, token_index: Ast.TokenIndex, space: Space, quote
fn renderQuotedIdentifier(r: *Render, token_index: Ast.TokenIndex, space: Space, comptime unquote: bool) !void {
const tree = r.tree;
const ais = r.ais;
- const token_tags = tree.tokens.items(.tag);
- assert(token_tags[token_index] == .identifier);
+ assert(tree.tokenTag(token_index) == .identifier);
const lexeme = tokenSliceForRender(tree, token_index);
assert(lexeme.len >= 3 and lexeme[0] == '@');
@@ -2934,12 +2921,10 @@ fn renderIdentifierContents(writer: anytype, bytes: []const u8) !void {
/// fn_proto should be wrapped and have a trailing comma inserted even if
/// there is none in the source.
fn hasComment(tree: Ast, start_token: Ast.TokenIndex, end_token: Ast.TokenIndex) bool {
- const token_starts = tree.tokens.items(.start);
-
- var i = start_token;
- while (i < end_token) : (i += 1) {
- const start = token_starts[i] + tree.tokenSlice(i).len;
- const end = token_starts[i + 1];
+ for (start_token..end_token) |i| {
+ const token: Ast.TokenIndex = @intCast(i);
+ const start = tree.tokenStart(token) + tree.tokenSlice(token).len;
+ const end = tree.tokenStart(token + 1);
if (mem.indexOf(u8, tree.source[start..end], "//") != null) return true;
}
@@ -2949,16 +2934,11 @@ fn hasComment(tree: Ast, start_token: Ast.TokenIndex, end_token: Ast.TokenIndex)
/// Returns true if there exists a multiline string literal between the start
/// of token `start_token` and the start of token `end_token`.
fn hasMultilineString(tree: Ast, start_token: Ast.TokenIndex, end_token: Ast.TokenIndex) bool {
- const token_tags = tree.tokens.items(.tag);
-
- for (token_tags[start_token..end_token]) |tag| {
- switch (tag) {
- .multiline_string_literal_line => return true,
- else => continue,
- }
- }
-
- return false;
+ return std.mem.indexOfScalar(
+ Token.Tag,
+ tree.tokens.items(.tag)[start_token..end_token],
+ .multiline_string_literal_line,
+ ) != null;
}
/// Assumes that start is the first byte past the previous token and
@@ -3034,18 +3014,17 @@ fn renderExtraNewline(r: *Render, node: Ast.Node.Index) Error!void {
fn renderExtraNewlineToken(r: *Render, token_index: Ast.TokenIndex) Error!void {
const tree = r.tree;
const ais = r.ais;
- const token_starts = tree.tokens.items(.start);
- const token_start = token_starts[token_index];
+ const token_start = tree.tokenStart(token_index);
if (token_start == 0) return;
const prev_token_end = if (token_index == 0)
0
else
- token_starts[token_index - 1] + tokenSliceForRender(tree, token_index - 1).len;
+ tree.tokenStart(token_index - 1) + tokenSliceForRender(tree, token_index - 1).len;
// If there is a immediately preceding comment or doc_comment,
// skip it because required extra newline has already been rendered.
if (mem.indexOf(u8, tree.source[prev_token_end..token_start], "//") != null) return;
- if (token_index > 0 and tree.tokens.items(.tag)[token_index - 1] == .doc_comment) return;
+ if (tree.isTokenPrecededByTags(token_index, &.{.doc_comment})) return;
// Iterate backwards to the end of the previous token, stopping if a
// non-whitespace character is encountered or two newlines have been found.
@@ -3063,10 +3042,9 @@ fn renderExtraNewlineToken(r: *Render, token_index: Ast.TokenIndex) Error!void {
fn renderDocComments(r: *Render, end_token: Ast.TokenIndex) Error!void {
const tree = r.tree;
// Search backwards for the first doc comment.
- const token_tags = tree.tokens.items(.tag);
if (end_token == 0) return;
var tok = end_token - 1;
- while (token_tags[tok] == .doc_comment) {
+ while (tree.tokenTag(tok) == .doc_comment) {
if (tok == 0) break;
tok -= 1;
} else {
@@ -3076,7 +3054,7 @@ fn renderDocComments(r: *Render, end_token: Ast.TokenIndex) Error!void {
if (first_tok == end_token) return;
if (first_tok != 0) {
- const prev_token_tag = token_tags[first_tok - 1];
+ const prev_token_tag = tree.tokenTag(first_tok - 1);
// Prevent accidental use of `renderDocComments` for a function argument doc comment
assert(prev_token_tag != .l_paren);
@@ -3086,7 +3064,7 @@ fn renderDocComments(r: *Render, end_token: Ast.TokenIndex) Error!void {
}
}
- while (token_tags[tok] == .doc_comment) : (tok += 1) {
+ while (tree.tokenTag(tok) == .doc_comment) : (tok += 1) {
try renderToken(r, tok, .newline);
}
}
@@ -3094,15 +3072,14 @@ fn renderDocComments(r: *Render, end_token: Ast.TokenIndex) Error!void {
/// start_token is first container doc comment token.
fn renderContainerDocComments(r: *Render, start_token: Ast.TokenIndex) Error!void {
const tree = r.tree;
- const token_tags = tree.tokens.items(.tag);
var tok = start_token;
- while (token_tags[tok] == .container_doc_comment) : (tok += 1) {
+ while (tree.tokenTag(tok) == .container_doc_comment) : (tok += 1) {
try renderToken(r, tok, .newline);
}
// Render extra newline if there is one between final container doc comment and
// the next token. If the next token is a doc comment, that code path
// will have its own logic to insert a newline.
- if (token_tags[tok] != .doc_comment) {
+ if (tree.tokenTag(tok) != .doc_comment) {
try renderExtraNewlineToken(r, tok);
}
}
@@ -3112,11 +3089,10 @@ fn discardAllParams(r: *Render, fn_proto_node: Ast.Node.Index) Error!void {
const ais = r.ais;
var buf: [1]Ast.Node.Index = undefined;
const fn_proto = tree.fullFnProto(&buf, fn_proto_node).?;
- const token_tags = tree.tokens.items(.tag);
var it = fn_proto.iterate(tree);
while (it.next()) |param| {
const name_ident = param.name_token.?;
- assert(token_tags[name_ident] == .identifier);
+ assert(tree.tokenTag(name_ident) == .identifier);
const w = ais.writer();
try w.writeAll("_ = ");
try w.writeAll(tokenSliceForRender(r.tree, name_ident));
@@ -3126,7 +3102,7 @@ fn discardAllParams(r: *Render, fn_proto_node: Ast.Node.Index) Error!void {
fn tokenSliceForRender(tree: Ast, token_index: Ast.TokenIndex) []const u8 {
var ret = tree.tokenSlice(token_index);
- switch (tree.tokens.items(.tag)[token_index]) {
+ switch (tree.tokenTag(token_index)) {
.container_doc_comment, .doc_comment => {
ret = mem.trimRight(u8, ret, &std.ascii.whitespace);
},
@@ -3136,8 +3112,7 @@ fn tokenSliceForRender(tree: Ast, token_index: Ast.TokenIndex) []const u8 {
}
fn hasSameLineComment(tree: Ast, token_index: Ast.TokenIndex) bool {
- const token_starts = tree.tokens.items(.start);
- const between_source = tree.source[token_starts[token_index]..token_starts[token_index + 1]];
+ const between_source = tree.source[tree.tokenStart(token_index)..tree.tokenStart(token_index + 1)];
for (between_source) |byte| switch (byte) {
'\n' => return false,
'/' => return true,
@@ -3150,8 +3125,7 @@ fn hasSameLineComment(tree: Ast, token_index: Ast.TokenIndex) bool {
/// start_token and end_token.
fn anythingBetween(tree: Ast, start_token: Ast.TokenIndex, end_token: Ast.TokenIndex) bool {
if (start_token + 1 != end_token) return true;
- const token_starts = tree.tokens.items(.start);
- const between_source = tree.source[token_starts[start_token]..token_starts[start_token + 1]];
+ const between_source = tree.source[tree.tokenStart(start_token)..tree.tokenStart(start_token + 1)];
for (between_source) |byte| switch (byte) {
'/' => return true,
else => continue,
@@ -3245,12 +3219,10 @@ fn nodeCausesSliceOpSpace(tag: Ast.Node.Tag) bool {
// Returns the number of nodes in `exprs` that are on the same line as `rtoken`.
fn rowSize(tree: Ast, exprs: []const Ast.Node.Index, rtoken: Ast.TokenIndex) usize {
- const token_tags = tree.tokens.items(.tag);
-
const first_token = tree.firstToken(exprs[0]);
if (tree.tokensOnSameLine(first_token, rtoken)) {
const maybe_comma = rtoken - 1;
- if (token_tags[maybe_comma] == .comma)
+ if (tree.tokenTag(maybe_comma) == .comma)
return 1;
return exprs.len; // no newlines
}
lib/std/zig/Zir.zig
@@ -80,9 +80,18 @@ pub fn extraData(code: Zir, comptime T: type, index: usize) ExtraData(T) {
Inst.Declaration.Name,
std.zig.SimpleComptimeReason,
NullTerminatedString,
+ // Ast.TokenIndex is missing because it is a u32.
+ Ast.OptionalTokenIndex,
+ Ast.Node.Index,
+ Ast.Node.OptionalIndex,
=> @enumFromInt(code.extra[i]),
- i32,
+ Ast.TokenOffset,
+ Ast.OptionalTokenOffset,
+ Ast.Node.Offset,
+ Ast.Node.OptionalOffset,
+ => @enumFromInt(@as(i32, @bitCast(code.extra[i]))),
+
Inst.Call.Flags,
Inst.BuiltinCall.Flags,
Inst.SwitchBlock.Bits,
@@ -1904,22 +1913,22 @@ pub const Inst = struct {
/// `small` is `fields_len: u16`.
tuple_decl,
/// Implements the `@This` builtin.
- /// `operand` is `src_node: i32`.
+ /// `operand` is `src_node: Ast.Node.Offset`.
this,
/// Implements the `@returnAddress` builtin.
- /// `operand` is `src_node: i32`.
+ /// `operand` is `src_node: Ast.Node.Offset`.
ret_addr,
/// Implements the `@src` builtin.
/// `operand` is payload index to `LineColumn`.
builtin_src,
/// Implements the `@errorReturnTrace` builtin.
- /// `operand` is `src_node: i32`.
+ /// `operand` is `src_node: Ast.Node.Offset`.
error_return_trace,
/// Implements the `@frame` builtin.
- /// `operand` is `src_node: i32`.
+ /// `operand` is `src_node: Ast.Node.Offset`.
frame,
/// Implements the `@frameAddress` builtin.
- /// `operand` is `src_node: i32`.
+ /// `operand` is `src_node: Ast.Node.Offset`.
frame_address,
/// Same as `alloc` from `Tag` but may contain an alignment instruction.
/// `operand` is payload index to `AllocExtended`.
@@ -2004,9 +2013,9 @@ pub const Inst = struct {
/// `operand` is payload index to `UnNode`.
await_nosuspend,
/// Implements `@breakpoint`.
- /// `operand` is `src_node: i32`.
+ /// `operand` is `src_node: Ast.Node.Offset`.
breakpoint,
- /// Implement builtin `@disableInstrumentation`. `operand` is `src_node: i32`.
+ /// Implement builtin `@disableInstrumentation`. `operand` is `src_node: Ast.Node.Offset`.
disable_instrumentation,
/// Implement builtin `@disableIntrinsics`. `operand` is `src_node: i32`.
disable_intrinsics,
@@ -2040,7 +2049,7 @@ pub const Inst = struct {
/// `operand` is payload index to `UnNode`.
c_va_end,
/// Implement builtin `@cVaStart`.
- /// `operand` is `src_node: i32`.
+ /// `operand` is `src_node: Ast.Node.Offset`.
c_va_start,
/// Implements the following builtins:
/// `@ptrCast`, `@alignCast`, `@addrSpaceCast`, `@constCast`, `@volatileCast`.
@@ -2067,7 +2076,7 @@ pub const Inst = struct {
/// `operand` is payload index to `UnNode`.
work_group_id,
/// Implements the `@inComptime` builtin.
- /// `operand` is `src_node: i32`.
+ /// `operand` is `src_node: Ast.Node.Offset`.
in_comptime,
/// Restores the error return index to its last saved state in a given
/// block. If the block is `.none`, restores to the state from the point
@@ -2077,7 +2086,7 @@ pub const Inst = struct {
/// `small` is undefined.
restore_err_ret_index,
/// Retrieves a value from the current type declaration scope's closure.
- /// `operand` is `src_node: i32`.
+ /// `operand` is `src_node: Ast.Node.Offset`.
/// `small` is closure index.
closure_get,
/// Used as a placeholder instruction which is just a dummy index for Sema to replace
@@ -2091,7 +2100,7 @@ pub const Inst = struct {
/// Uses the `pl_node` union field with payload `FieldParentPtr`.
field_parent_ptr,
/// Get a type or value from `std.builtin`.
- /// `operand` is `src_node: i32`.
+ /// `operand` is `src_node: Ast.Node.Offset`.
/// `small` is an `Inst.BuiltinValue`.
builtin_value,
/// Provide a `@branchHint` for the current block.
@@ -2286,28 +2295,28 @@ pub const Inst = struct {
/// Used for unary operators, with an AST node source location.
un_node: struct {
/// Offset from Decl AST node index.
- src_node: i32,
+ src_node: Ast.Node.Offset,
/// The meaning of this operand depends on the corresponding `Tag`.
operand: Ref,
},
/// Used for unary operators, with a token source location.
un_tok: struct {
/// Offset from Decl AST token index.
- src_tok: Ast.TokenIndex,
+ src_tok: Ast.TokenOffset,
/// The meaning of this operand depends on the corresponding `Tag`.
operand: Ref,
},
pl_node: struct {
/// Offset from Decl AST node index.
/// `Tag` determines which kind of AST node this points to.
- src_node: i32,
+ src_node: Ast.Node.Offset,
/// index into extra.
/// `Tag` determines what lives there.
payload_index: u32,
},
pl_tok: struct {
/// Offset from Decl AST token index.
- src_tok: Ast.TokenIndex,
+ src_tok: Ast.TokenOffset,
/// index into extra.
/// `Tag` determines what lives there.
payload_index: u32,
@@ -2328,16 +2337,16 @@ pub const Inst = struct {
/// Offset into `string_bytes`. Null-terminated.
start: NullTerminatedString,
/// Offset from Decl AST token index.
- src_tok: u32,
+ src_tok: Ast.TokenOffset,
pub fn get(self: @This(), code: Zir) [:0]const u8 {
return code.nullTerminatedString(self.start);
}
},
/// Offset from Decl AST token index.
- tok: Ast.TokenIndex,
+ tok: Ast.TokenOffset,
/// Offset from Decl AST node index.
- node: i32,
+ node: Ast.Node.Offset,
int: u64,
float: f64,
ptr_type: struct {
@@ -2358,14 +2367,14 @@ pub const Inst = struct {
int_type: struct {
/// Offset from Decl AST node index.
/// `Tag` determines which kind of AST node this points to.
- src_node: i32,
+ src_node: Ast.Node.Offset,
signedness: std.builtin.Signedness,
bit_count: u16,
},
@"unreachable": struct {
/// Offset from Decl AST node index.
/// `Tag` determines which kind of AST node this points to.
- src_node: i32,
+ src_node: Ast.Node.Offset,
},
@"break": struct {
operand: Ref,
@@ -2377,7 +2386,7 @@ pub const Inst = struct {
/// with an AST node source location.
inst_node: struct {
/// Offset from Decl AST node index.
- src_node: i32,
+ src_node: Ast.Node.Offset,
/// The meaning of this operand depends on the corresponding `Tag`.
inst: Index,
},
@@ -2456,9 +2465,7 @@ pub const Inst = struct {
};
pub const Break = struct {
- pub const no_src_node = std.math.maxInt(i32);
-
- operand_src_node: i32,
+ operand_src_node: Ast.Node.OptionalOffset,
block_inst: Index,
};
@@ -2467,7 +2474,7 @@ pub const Inst = struct {
/// 1. Input for every inputs_len
/// 2. clobber: NullTerminatedString // index into string_bytes (null terminated) for every clobbers_len.
pub const Asm = struct {
- src_node: i32,
+ src_node: Ast.Node.Offset,
// null-terminated string index
asm_source: NullTerminatedString,
/// 1 bit for each outputs_len: whether it uses `-> T` or not.
@@ -2582,7 +2589,7 @@ pub const Inst = struct {
/// Trailing: operand: Ref, // for each `operands_len` (stored in `small`).
pub const NodeMultiOp = struct {
- src_node: i32,
+ src_node: Ast.Node.Offset,
};
/// This data is stored inside extra, with trailing operands according to `body_len`.
@@ -3033,7 +3040,7 @@ pub const Inst = struct {
/// Trailing:
/// 0. operand: Ref // for each `operands_len`
pub const TypeOfPeer = struct {
- src_node: i32,
+ src_node: Ast.Node.Offset,
body_len: u32,
body_index: u32,
};
@@ -3084,7 +3091,7 @@ pub const Inst = struct {
/// 4. host_size: Ref // if `has_bit_range` flag is set
pub const PtrType = struct {
elem_type: Ref,
- src_node: i32,
+ src_node: Ast.Node.Offset,
};
pub const ArrayTypeSentinel = struct {
@@ -3116,7 +3123,7 @@ pub const Inst = struct {
start: Ref,
len: Ref,
sentinel: Ref,
- start_src_node_offset: i32,
+ start_src_node_offset: Ast.Node.Offset,
};
/// The meaning of these operands depends on the corresponding `Tag`.
@@ -3126,13 +3133,13 @@ pub const Inst = struct {
};
pub const BinNode = struct {
- node: i32,
+ node: Ast.Node.Offset,
lhs: Ref,
rhs: Ref,
};
pub const UnNode = struct {
- node: i32,
+ node: Ast.Node.Offset,
operand: Ref,
};
@@ -3186,7 +3193,7 @@ pub const Inst = struct {
pub const SwitchBlockErrUnion = struct {
operand: Ref,
bits: Bits,
- main_src_node_offset: i32,
+ main_src_node_offset: Ast.Node.Offset,
pub const Bits = packed struct(u32) {
/// If true, one or more prongs have multiple items.
@@ -3592,7 +3599,7 @@ pub const Inst = struct {
/// init: Inst.Ref, // `.none` for non-`comptime` fields
/// }
pub const TupleDecl = struct {
- src_node: i32, // relative
+ src_node: Ast.Node.Offset,
};
/// Trailing:
@@ -3666,7 +3673,7 @@ pub const Inst = struct {
};
pub const Cmpxchg = struct {
- node: i32,
+ node: Ast.Node.Offset,
ptr: Ref,
expected_value: Ref,
new_value: Ref,
@@ -3706,7 +3713,7 @@ pub const Inst = struct {
};
pub const FieldParentPtr = struct {
- src_node: i32,
+ src_node: Ast.Node.Offset,
parent_ptr_type: Ref,
field_name: Ref,
field_ptr: Ref,
@@ -3720,7 +3727,7 @@ pub const Inst = struct {
};
pub const Select = struct {
- node: i32,
+ node: Ast.Node.Offset,
elem_type: Ref,
pred: Ref,
a: Ref,
@@ -3728,7 +3735,7 @@ pub const Inst = struct {
};
pub const AsyncCall = struct {
- node: i32,
+ node: Ast.Node.Offset,
frame_buffer: Ref,
result_ptr: Ref,
fn_ptr: Ref,
@@ -3753,7 +3760,7 @@ pub const Inst = struct {
/// 0. type_inst: Ref, // if small 0b000X is set
/// 1. align_inst: Ref, // if small 0b00X0 is set
pub const AllocExtended = struct {
- src_node: i32,
+ src_node: Ast.Node.Offset,
pub const Small = packed struct {
has_type: bool,
@@ -3778,9 +3785,9 @@ pub const Inst = struct {
pub const Item = struct {
/// null terminated string index
msg: NullTerminatedString,
- node: Ast.Node.Index,
- /// If node is 0 then this will be populated.
- token: Ast.TokenIndex,
+ node: Ast.Node.OptionalIndex,
+ /// If node is .none then this will be populated.
+ token: Ast.OptionalTokenIndex,
/// Can be used in combination with `token`.
byte_offset: u32,
/// 0 or a payload index of a `Block`, each is a payload
@@ -3818,7 +3825,7 @@ pub const Inst = struct {
};
pub const Src = struct {
- node: i32,
+ node: Ast.Node.Offset,
line: u32,
column: u32,
};
@@ -3833,7 +3840,7 @@ pub const Inst = struct {
/// The value being destructured.
operand: Ref,
/// The `destructure_assign` node.
- destructure_node: i32,
+ destructure_node: Ast.Node.Offset,
/// The expected field count.
expect_len: u32,
};
@@ -3848,7 +3855,7 @@ pub const Inst = struct {
};
pub const RestoreErrRetIndex = struct {
- src_node: i32,
+ src_node: Ast.Node.Offset,
/// If `.none`, restore the trace to its state upon function entry.
block: Ref,
/// If `.none`, restore unconditionally.
lib/std/zig/Zoir.zig
@@ -228,8 +228,8 @@ pub const NullTerminatedString = enum(u32) {
pub const CompileError = extern struct {
msg: NullTerminatedString,
- token: Ast.TokenIndex,
- /// If `token == invalid_token`, this is an `Ast.Node.Index`.
+ token: Ast.OptionalTokenIndex,
+ /// If `token == .none`, this is an `Ast.Node.Index`.
/// Otherwise, this is a byte offset into `token`.
node_or_offset: u32,
@@ -243,14 +243,12 @@ pub const CompileError = extern struct {
pub const Note = extern struct {
msg: NullTerminatedString,
- token: Ast.TokenIndex,
- /// If `token == invalid_token`, this is an `Ast.Node.Index`.
+ token: Ast.OptionalTokenIndex,
+ /// If `token == .none`, this is an `Ast.Node.Index`.
/// Otherwise, this is a byte offset into `token`.
node_or_offset: u32,
};
- pub const invalid_token: Ast.TokenIndex = std.math.maxInt(Ast.TokenIndex);
-
comptime {
assert(std.meta.hasUniqueRepresentation(CompileError));
assert(std.meta.hasUniqueRepresentation(Note));
lib/std/zig/ZonGen.zig
@@ -48,7 +48,7 @@ pub fn generate(gpa: Allocator, tree: Ast, options: Options) Allocator.Error!Zoi
}
if (tree.errors.len == 0) {
- const root_ast_node = tree.nodes.items(.data)[0].lhs;
+ const root_ast_node = tree.rootDecls()[0];
try zg.nodes.append(gpa, undefined); // index 0; root node
try zg.expr(root_ast_node, .root);
} else {
@@ -97,11 +97,8 @@ pub fn generate(gpa: Allocator, tree: Ast, options: Options) Allocator.Error!Zoi
fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator.Error!void {
const gpa = zg.gpa;
const tree = zg.tree;
- const node_tags = tree.nodes.items(.tag);
- const node_datas = tree.nodes.items(.data);
- const main_tokens = tree.nodes.items(.main_token);
- switch (node_tags[node]) {
+ switch (tree.nodeTag(node)) {
.root => unreachable,
.@"usingnamespace" => unreachable,
.test_decl => unreachable,
@@ -173,7 +170,7 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
.bool_not,
.bit_not,
.negation_wrap,
- => try zg.addErrorTok(main_tokens[node], "operator '{s}' is not allowed in ZON", .{tree.tokenSlice(main_tokens[node])}),
+ => try zg.addErrorTok(tree.nodeMainToken(node), "operator '{s}' is not allowed in ZON", .{tree.tokenSlice(tree.nodeMainToken(node))}),
.error_union,
.merge_error_sets,
@@ -251,8 +248,8 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
.slice_sentinel,
=> try zg.addErrorNode(node, "slice operator is not allowed in ZON", .{}),
- .deref, .address_of => try zg.addErrorTok(main_tokens[node], "pointers are not available in ZON", .{}),
- .unwrap_optional => try zg.addErrorTok(main_tokens[node], "optionals are not available in ZON", .{}),
+ .deref, .address_of => try zg.addErrorTok(tree.nodeMainToken(node), "pointers are not available in ZON", .{}),
+ .unwrap_optional => try zg.addErrorTok(tree.nodeMainToken(node), "optionals are not available in ZON", .{}),
.error_value => try zg.addErrorNode(node, "errors are not available in ZON", .{}),
.array_access => try zg.addErrorNode(node, "array indexing is not allowed in ZON", .{}),
@@ -262,12 +259,9 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
.block,
.block_semicolon,
=> {
- const size = switch (node_tags[node]) {
- .block_two, .block_two_semicolon => @intFromBool(node_datas[node].lhs != 0) + @intFromBool(node_datas[node].rhs != 0),
- .block, .block_semicolon => node_datas[node].rhs - node_datas[node].lhs,
- else => unreachable,
- };
- if (size == 0) {
+ var buffer: [2]Ast.Node.Index = undefined;
+ const statements = tree.blockStatements(&buffer, node).?;
+ if (statements.len == 0) {
try zg.addErrorNodeNotes(node, "void literals are not available in ZON", .{}, &.{
try zg.errNoteNode(node, "void union payloads can be represented by enum literals", .{}),
});
@@ -288,9 +282,9 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
var buf: [2]Ast.Node.Index = undefined;
const type_node = if (tree.fullArrayInit(&buf, node)) |full|
- full.ast.type_expr
+ full.ast.type_expr.unwrap().?
else if (tree.fullStructInit(&buf, node)) |full|
- full.ast.type_expr
+ full.ast.type_expr.unwrap().?
else
unreachable;
@@ -300,18 +294,18 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
},
.grouped_expression => {
- try zg.addErrorTokNotes(main_tokens[node], "expression grouping is not allowed in ZON", .{}, &.{
- try zg.errNoteTok(main_tokens[node], "these parentheses are always redundant", .{}),
+ try zg.addErrorTokNotes(tree.nodeMainToken(node), "expression grouping is not allowed in ZON", .{}, &.{
+ try zg.errNoteTok(tree.nodeMainToken(node), "these parentheses are always redundant", .{}),
});
- return zg.expr(node_datas[node].lhs, dest_node);
+ return zg.expr(tree.nodeData(node).node_and_token[0], dest_node);
},
.negation => {
- const child_node = node_datas[node].lhs;
- switch (node_tags[child_node]) {
+ const child_node = tree.nodeData(node).node;
+ switch (tree.nodeTag(child_node)) {
.number_literal => return zg.numberLiteral(child_node, node, dest_node, .negative),
.identifier => {
- const child_ident = tree.tokenSlice(main_tokens[child_node]);
+ const child_ident = tree.tokenSlice(tree.nodeMainToken(child_node));
if (mem.eql(u8, child_ident, "inf")) {
zg.setNode(dest_node, .{
.tag = .neg_inf,
@@ -323,7 +317,7 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
},
else => {},
}
- try zg.addErrorTok(main_tokens[node], "expected number or 'inf' after '-'", .{});
+ try zg.addErrorTok(tree.nodeMainToken(node), "expected number or 'inf' after '-'", .{});
},
.number_literal => try zg.numberLiteral(node, node, dest_node, .positive),
.char_literal => try zg.charLiteral(node, dest_node),
@@ -331,7 +325,7 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
.identifier => try zg.identifier(node, dest_node),
.enum_literal => {
- const str_index = zg.identAsString(main_tokens[node]) catch |err| switch (err) {
+ const str_index = zg.identAsString(tree.nodeMainToken(node)) catch |err| switch (err) {
error.BadString => undefined, // doesn't matter, there's an error
error.OutOfMemory => |e| return e,
};
@@ -369,7 +363,7 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
var buf: [2]Ast.Node.Index = undefined;
const full = tree.fullArrayInit(&buf, node).?;
assert(full.ast.elements.len != 0); // Otherwise it would be a struct init
- assert(full.ast.type_expr == 0); // The tag was `array_init_dot_*`
+ assert(full.ast.type_expr == .none); // The tag was `array_init_dot_*`
const first_elem: u32 = @intCast(zg.nodes.len);
try zg.nodes.resize(gpa, zg.nodes.len + full.ast.elements.len);
@@ -398,7 +392,7 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
=> {
var buf: [2]Ast.Node.Index = undefined;
const full = tree.fullStructInit(&buf, node).?;
- assert(full.ast.type_expr == 0); // The tag was `struct_init_dot_*`
+ assert(full.ast.type_expr == .none); // The tag was `struct_init_dot_*`
if (full.ast.fields.len == 0) {
zg.setNode(dest_node, .{
@@ -460,7 +454,7 @@ fn expr(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) Allocator
fn appendIdentStr(zg: *ZonGen, ident_token: Ast.TokenIndex) !u32 {
const tree = zg.tree;
- assert(tree.tokens.items(.tag)[ident_token] == .identifier);
+ assert(tree.tokenTag(ident_token) == .identifier);
const ident_name = tree.tokenSlice(ident_token);
if (!mem.startsWith(u8, ident_name, "@")) {
const start = zg.string_bytes.items.len;
@@ -493,19 +487,16 @@ fn appendIdentStr(zg: *ZonGen, ident_token: Ast.TokenIndex) !u32 {
/// Estimates the size of a string node without parsing it.
pub fn strLitSizeHint(tree: Ast, node: Ast.Node.Index) usize {
- switch (tree.nodes.items(.tag)[node]) {
+ switch (tree.nodeTag(node)) {
// Parsed string literals are typically around the size of the raw strings.
.string_literal => {
- const token = tree.nodes.items(.main_token)[node];
+ const token = tree.nodeMainToken(node);
const raw_string = tree.tokenSlice(token);
return raw_string.len;
},
// Multiline string literal lengths can be computed exactly.
.multiline_string_literal => {
- const first_tok, const last_tok = bounds: {
- const node_data = tree.nodes.items(.data)[node];
- break :bounds .{ node_data.lhs, node_data.rhs };
- };
+ const first_tok, const last_tok = tree.nodeData(node).token_and_token;
var size = tree.tokenSlice(first_tok)[2..].len;
for (first_tok + 1..last_tok + 1) |tok_idx| {
@@ -524,17 +515,14 @@ pub fn parseStrLit(
node: Ast.Node.Index,
writer: anytype,
) error{OutOfMemory}!std.zig.string_literal.Result {
- switch (tree.nodes.items(.tag)[node]) {
+ switch (tree.nodeTag(node)) {
.string_literal => {
- const token = tree.nodes.items(.main_token)[node];
+ const token = tree.nodeMainToken(node);
const raw_string = tree.tokenSlice(token);
return std.zig.string_literal.parseWrite(writer, raw_string);
},
.multiline_string_literal => {
- const first_tok, const last_tok = bounds: {
- const node_data = tree.nodes.items(.data)[node];
- break :bounds .{ node_data.lhs, node_data.rhs };
- };
+ const first_tok, const last_tok = tree.nodeData(node).token_and_token;
// First line: do not append a newline.
{
@@ -572,7 +560,7 @@ fn strLitAsString(zg: *ZonGen, str_node: Ast.Node.Index) !StringLiteralResult {
switch (try parseStrLit(zg.tree, str_node, zg.string_bytes.writer(zg.gpa))) {
.success => {},
.failure => |err| {
- const token = zg.tree.nodes.items(.main_token)[str_node];
+ const token = zg.tree.nodeMainToken(str_node);
const raw_string = zg.tree.tokenSlice(token);
try zg.lowerStrLitError(err, token, raw_string, 0);
return error.BadString;
@@ -620,7 +608,7 @@ fn identAsString(zg: *ZonGen, ident_token: Ast.TokenIndex) !Zoir.NullTerminatedS
fn numberLiteral(zg: *ZonGen, num_node: Ast.Node.Index, src_node: Ast.Node.Index, dest_node: Zoir.Node.Index, sign: enum { negative, positive }) !void {
const tree = zg.tree;
- const num_token = tree.nodes.items(.main_token)[num_node];
+ const num_token = tree.nodeMainToken(num_node);
const num_bytes = tree.tokenSlice(num_token);
switch (std.zig.parseNumberLiteral(num_bytes)) {
@@ -724,8 +712,8 @@ fn setBigIntLiteralNode(zg: *ZonGen, dest_node: Zoir.Node.Index, src_node: Ast.N
fn charLiteral(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) !void {
const tree = zg.tree;
- assert(tree.nodes.items(.tag)[node] == .char_literal);
- const main_token = tree.nodes.items(.main_token)[node];
+ assert(tree.nodeTag(node) == .char_literal);
+ const main_token = tree.nodeMainToken(node);
const slice = tree.tokenSlice(main_token);
switch (std.zig.parseCharLiteral(slice)) {
.success => |codepoint| zg.setNode(dest_node, .{
@@ -739,8 +727,8 @@ fn charLiteral(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) !v
fn identifier(zg: *ZonGen, node: Ast.Node.Index, dest_node: Zoir.Node.Index) !void {
const tree = zg.tree;
- assert(tree.nodes.items(.tag)[node] == .identifier);
- const main_token = tree.nodes.items(.main_token)[node];
+ assert(tree.nodeTag(node) == .identifier);
+ const main_token = tree.nodeMainToken(node);
const ident = tree.tokenSlice(main_token);
const tag: Zoir.Node.Repr.Tag = t: {
@@ -823,8 +811,8 @@ fn errNoteNode(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, a
return .{
.msg = @enumFromInt(message_idx),
- .token = Zoir.CompileError.invalid_token,
- .node_or_offset = node,
+ .token = .none,
+ .node_or_offset = @intFromEnum(node),
};
}
@@ -836,33 +824,33 @@ fn errNoteTok(zg: *ZonGen, tok: Ast.TokenIndex, comptime format: []const u8, arg
return .{
.msg = @enumFromInt(message_idx),
- .token = tok,
+ .token = .fromToken(tok),
.node_or_offset = 0,
};
}
fn addErrorNode(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, args: anytype) Allocator.Error!void {
- return zg.addErrorInner(Zoir.CompileError.invalid_token, node, format, args, &.{});
+ return zg.addErrorInner(.none, @intFromEnum(node), format, args, &.{});
}
fn addErrorTok(zg: *ZonGen, tok: Ast.TokenIndex, comptime format: []const u8, args: anytype) Allocator.Error!void {
- return zg.addErrorInner(tok, 0, format, args, &.{});
+ return zg.addErrorInner(.fromToken(tok), 0, format, args, &.{});
}
fn addErrorNodeNotes(zg: *ZonGen, node: Ast.Node.Index, comptime format: []const u8, args: anytype, notes: []const Zoir.CompileError.Note) Allocator.Error!void {
- return zg.addErrorInner(Zoir.CompileError.invalid_token, node, format, args, notes);
+ return zg.addErrorInner(.none, @intFromEnum(node), format, args, notes);
}
fn addErrorTokNotes(zg: *ZonGen, tok: Ast.TokenIndex, comptime format: []const u8, args: anytype, notes: []const Zoir.CompileError.Note) Allocator.Error!void {
- return zg.addErrorInner(tok, 0, format, args, notes);
+ return zg.addErrorInner(.fromToken(tok), 0, format, args, notes);
}
fn addErrorTokOff(zg: *ZonGen, tok: Ast.TokenIndex, offset: u32, comptime format: []const u8, args: anytype) Allocator.Error!void {
- return zg.addErrorInner(tok, offset, format, args, &.{});
+ return zg.addErrorInner(.fromToken(tok), offset, format, args, &.{});
}
fn addErrorTokNotesOff(zg: *ZonGen, tok: Ast.TokenIndex, offset: u32, comptime format: []const u8, args: anytype, notes: []const Zoir.CompileError.Note) Allocator.Error!void {
- return zg.addErrorInner(tok, offset, format, args, notes);
+ return zg.addErrorInner(.fromToken(tok), offset, format, args, notes);
}
fn addErrorInner(
zg: *ZonGen,
- token: Ast.TokenIndex,
+ token: Ast.OptionalTokenIndex,
node_or_offset: u32,
comptime format: []const u8,
args: anytype,
lib/std/zon/parse.zig
@@ -196,16 +196,15 @@ pub const Error = union(enum) {
return .{ .err = self, .status = status };
}
- fn zoirErrorLocation(ast: Ast, maybe_token: Ast.TokenIndex, node_or_offset: u32) Ast.Location {
- if (maybe_token == Zoir.CompileError.invalid_token) {
- const main_tokens = ast.nodes.items(.main_token);
- const ast_node = node_or_offset;
- const token = main_tokens[ast_node];
- return ast.tokenLocation(0, token);
- } else {
- var location = ast.tokenLocation(0, maybe_token);
+ fn zoirErrorLocation(ast: Ast, maybe_token: Ast.OptionalTokenIndex, node_or_offset: u32) Ast.Location {
+ if (maybe_token.unwrap()) |token| {
+ var location = ast.tokenLocation(0, token);
location.column += node_or_offset;
return location;
+ } else {
+ const ast_node: Ast.Node.Index = @enumFromInt(node_or_offset);
+ const token = ast.nodeMainToken(ast_node);
+ return ast.tokenLocation(0, token);
}
}
};
@@ -632,7 +631,7 @@ const Parser = struct {
switch (try ZonGen.parseStrLit(self.ast, ast_node, buf.writer(self.gpa))) {
.success => {},
.failure => |err| {
- const token = self.ast.nodes.items(.main_token)[ast_node];
+ const token = self.ast.nodeMainToken(ast_node);
const raw_string = self.ast.tokenSlice(token);
return self.failTokenFmt(token, @intCast(err.offset()), "{s}", .{err.fmt(raw_string)});
},
@@ -1005,8 +1004,7 @@ const Parser = struct {
args: anytype,
) error{ OutOfMemory, ParseZon } {
@branchHint(.cold);
- const main_tokens = self.ast.nodes.items(.main_token);
- const token = main_tokens[node.getAstNode(self.zoir)];
+ const token = self.ast.nodeMainToken(node.getAstNode(self.zoir));
return self.failTokenFmt(token, 0, fmt, args);
}
@@ -1025,8 +1023,7 @@ const Parser = struct {
message: []const u8,
) error{ParseZon} {
@branchHint(.cold);
- const main_tokens = self.ast.nodes.items(.main_token);
- const token = main_tokens[node.getAstNode(self.zoir)];
+ const token = self.ast.nodeMainToken(node.getAstNode(self.zoir));
return self.failToken(.{
.token = token,
.offset = 0,
@@ -1059,10 +1056,7 @@ const Parser = struct {
const struct_init = self.ast.fullStructInit(&buf, node.getAstNode(self.zoir)).?;
const field_node = struct_init.ast.fields[f];
break :b self.ast.firstToken(field_node) - 2;
- } else b: {
- const main_tokens = self.ast.nodes.items(.main_token);
- break :b main_tokens[node.getAstNode(self.zoir)];
- };
+ } else self.ast.nodeMainToken(node.getAstNode(self.zoir));
switch (@typeInfo(T)) {
inline .@"struct", .@"union", .@"enum" => |info| {
const note: Error.TypeCheckFailure.Note = if (info.fields.len == 0) b: {
src/Package/Fetch.zig
@@ -30,7 +30,7 @@
arena: std.heap.ArenaAllocator,
location: Location,
location_tok: std.zig.Ast.TokenIndex,
-hash_tok: std.zig.Ast.TokenIndex,
+hash_tok: std.zig.Ast.OptionalTokenIndex,
name_tok: std.zig.Ast.TokenIndex,
lazy_status: LazyStatus,
parent_package_root: Cache.Path,
@@ -317,8 +317,8 @@ pub fn run(f: *Fetch) RunError!void {
f.location_tok,
try eb.addString("expected path relative to build root; found absolute path"),
);
- if (f.hash_tok != 0) return f.fail(
- f.hash_tok,
+ if (f.hash_tok.unwrap()) |hash_tok| return f.fail(
+ hash_tok,
try eb.addString("path-based dependencies are not hashed"),
);
// Packages fetched by URL may not use relative paths to escape outside the
@@ -555,17 +555,18 @@ fn runResource(
// job is done.
if (remote_hash) |declared_hash| {
+ const hash_tok = f.hash_tok.unwrap().?;
if (declared_hash.isOld()) {
const actual_hex = Package.multiHashHexDigest(f.computed_hash.digest);
if (!std.mem.eql(u8, declared_hash.toSlice(), &actual_hex)) {
- return f.fail(f.hash_tok, try eb.printString(
+ return f.fail(hash_tok, try eb.printString(
"hash mismatch: manifest declares {s} but the fetched package has {s}",
.{ declared_hash.toSlice(), actual_hex },
));
}
} else {
if (!computed_package_hash.eql(&declared_hash)) {
- return f.fail(f.hash_tok, try eb.printString(
+ return f.fail(hash_tok, try eb.printString(
"hash mismatch: manifest declares {s} but the fetched package has {s}",
.{ declared_hash.toSlice(), computed_package_hash.toSlice() },
));
@@ -813,15 +814,14 @@ fn srcLoc(
) Allocator.Error!ErrorBundle.SourceLocationIndex {
const ast = f.parent_manifest_ast orelse return .none;
const eb = &f.error_bundle;
- const token_starts = ast.tokens.items(.start);
const start_loc = ast.tokenLocation(0, tok);
const src_path = try eb.printString("{}" ++ fs.path.sep_str ++ Manifest.basename, .{f.parent_package_root});
const msg_off = 0;
return eb.addSourceLocation(.{
.src_path = src_path,
- .span_start = token_starts[tok],
- .span_end = @intCast(token_starts[tok] + ast.tokenSlice(tok).len),
- .span_main = token_starts[tok] + msg_off,
+ .span_start = ast.tokenStart(tok),
+ .span_end = @intCast(ast.tokenStart(tok) + ast.tokenSlice(tok).len),
+ .span_main = ast.tokenStart(tok) + msg_off,
.line = @intCast(start_loc.line),
.column = @intCast(start_loc.column),
.source_line = try eb.addString(ast.source[start_loc.line_start..start_loc.line_end]),
@@ -2322,7 +2322,7 @@ const TestFetchBuilder = struct {
.arena = std.heap.ArenaAllocator.init(allocator),
.location = .{ .path_or_url = path_or_url },
.location_tok = 0,
- .hash_tok = 0,
+ .hash_tok = .none,
.name_tok = 0,
.lazy_status = .eager,
.parent_package_root = Cache.Path{ .root_dir = Cache.Directory{ .handle = cache_dir, .path = null } },
src/Package/Manifest.zig
@@ -17,8 +17,8 @@ pub const Dependency = struct {
location_tok: Ast.TokenIndex,
location_node: Ast.Node.Index,
hash: ?[]const u8,
- hash_tok: Ast.TokenIndex,
- hash_node: Ast.Node.Index,
+ hash_tok: Ast.OptionalTokenIndex,
+ hash_node: Ast.Node.OptionalIndex,
node: Ast.Node.Index,
name_tok: Ast.TokenIndex,
lazy: bool,
@@ -40,7 +40,7 @@ id: u32,
version: std.SemanticVersion,
version_node: Ast.Node.Index,
dependencies: std.StringArrayHashMapUnmanaged(Dependency),
-dependencies_node: Ast.Node.Index,
+dependencies_node: Ast.Node.OptionalIndex,
paths: std.StringArrayHashMapUnmanaged(void),
minimum_zig_version: ?std.SemanticVersion,
@@ -58,10 +58,7 @@ pub const ParseOptions = struct {
pub const Error = Allocator.Error;
pub fn parse(gpa: Allocator, ast: Ast, options: ParseOptions) Error!Manifest {
- const node_tags = ast.nodes.items(.tag);
- const node_datas = ast.nodes.items(.data);
- assert(node_tags[0] == .root);
- const main_node_index = node_datas[0].lhs;
+ const main_node_index = ast.nodeData(.root).node;
var arena_instance = std.heap.ArenaAllocator.init(gpa);
errdefer arena_instance.deinit();
@@ -75,9 +72,9 @@ pub fn parse(gpa: Allocator, ast: Ast, options: ParseOptions) Error!Manifest {
.name = undefined,
.id = 0,
.version = undefined,
- .version_node = 0,
+ .version_node = undefined,
.dependencies = .{},
- .dependencies_node = 0,
+ .dependencies_node = .none,
.paths = .{},
.allow_missing_paths_field = options.allow_missing_paths_field,
.allow_name_string = options.allow_name_string,
@@ -121,8 +118,6 @@ pub fn copyErrorsIntoBundle(
src_path: u32,
eb: *std.zig.ErrorBundle.Wip,
) Allocator.Error!void {
- const token_starts = ast.tokens.items(.start);
-
for (man.errors) |msg| {
const start_loc = ast.tokenLocation(0, msg.tok);
@@ -130,9 +125,9 @@ pub fn copyErrorsIntoBundle(
.msg = try eb.addString(msg.msg),
.src_loc = try eb.addSourceLocation(.{
.src_path = src_path,
- .span_start = token_starts[msg.tok],
- .span_end = @intCast(token_starts[msg.tok] + ast.tokenSlice(msg.tok).len),
- .span_main = token_starts[msg.tok] + msg.off,
+ .span_start = ast.tokenStart(msg.tok),
+ .span_end = @intCast(ast.tokenStart(msg.tok) + ast.tokenSlice(msg.tok).len),
+ .span_main = ast.tokenStart(msg.tok) + msg.off,
.line = @intCast(start_loc.line),
.column = @intCast(start_loc.column),
.source_line = try eb.addString(ast.source[start_loc.line_start..start_loc.line_end]),
@@ -153,7 +148,7 @@ const Parse = struct {
version: std.SemanticVersion,
version_node: Ast.Node.Index,
dependencies: std.StringArrayHashMapUnmanaged(Dependency),
- dependencies_node: Ast.Node.Index,
+ dependencies_node: Ast.Node.OptionalIndex,
paths: std.StringArrayHashMapUnmanaged(void),
allow_missing_paths_field: bool,
allow_name_string: bool,
@@ -164,8 +159,7 @@ const Parse = struct {
fn parseRoot(p: *Parse, node: Ast.Node.Index) !void {
const ast = p.ast;
- const main_tokens = ast.nodes.items(.main_token);
- const main_token = main_tokens[node];
+ const main_token = ast.nodeMainToken(node);
var buf: [2]Ast.Node.Index = undefined;
const struct_init = ast.fullStructInit(&buf, node) orelse {
@@ -184,7 +178,7 @@ const Parse = struct {
// things manually provides an opportunity to do any additional verification
// that is desirable on a per-field basis.
if (mem.eql(u8, field_name, "dependencies")) {
- p.dependencies_node = field_init;
+ p.dependencies_node = field_init.toOptional();
try parseDependencies(p, field_init);
} else if (mem.eql(u8, field_name, "paths")) {
have_included_paths = true;
@@ -198,17 +192,17 @@ const Parse = struct {
p.version_node = field_init;
const version_text = try parseString(p, field_init);
if (version_text.len > max_version_len) {
- try appendError(p, main_tokens[field_init], "version string length {d} exceeds maximum of {d}", .{ version_text.len, max_version_len });
+ try appendError(p, ast.nodeMainToken(field_init), "version string length {d} exceeds maximum of {d}", .{ version_text.len, max_version_len });
}
p.version = std.SemanticVersion.parse(version_text) catch |err| v: {
- try appendError(p, main_tokens[field_init], "unable to parse semantic version: {s}", .{@errorName(err)});
+ try appendError(p, ast.nodeMainToken(field_init), "unable to parse semantic version: {s}", .{@errorName(err)});
break :v undefined;
};
have_version = true;
} else if (mem.eql(u8, field_name, "minimum_zig_version")) {
const version_text = try parseString(p, field_init);
p.minimum_zig_version = std.SemanticVersion.parse(version_text) catch |err| v: {
- try appendError(p, main_tokens[field_init], "unable to parse semantic version: {s}", .{@errorName(err)});
+ try appendError(p, ast.nodeMainToken(field_init), "unable to parse semantic version: {s}", .{@errorName(err)});
break :v null;
};
} else {
@@ -251,11 +245,10 @@ const Parse = struct {
fn parseDependencies(p: *Parse, node: Ast.Node.Index) !void {
const ast = p.ast;
- const main_tokens = ast.nodes.items(.main_token);
var buf: [2]Ast.Node.Index = undefined;
const struct_init = ast.fullStructInit(&buf, node) orelse {
- const tok = main_tokens[node];
+ const tok = ast.nodeMainToken(node);
return fail(p, tok, "expected dependencies expression to be a struct", .{});
};
@@ -269,23 +262,22 @@ const Parse = struct {
fn parseDependency(p: *Parse, node: Ast.Node.Index) !Dependency {
const ast = p.ast;
- const main_tokens = ast.nodes.items(.main_token);
var buf: [2]Ast.Node.Index = undefined;
const struct_init = ast.fullStructInit(&buf, node) orelse {
- const tok = main_tokens[node];
+ const tok = ast.nodeMainToken(node);
return fail(p, tok, "expected dependency expression to be a struct", .{});
};
var dep: Dependency = .{
.location = undefined,
- .location_tok = 0,
+ .location_tok = undefined,
.location_node = undefined,
.hash = null,
- .hash_tok = 0,
- .hash_node = undefined,
+ .hash_tok = .none,
+ .hash_node = .none,
.node = node,
- .name_tok = 0,
+ .name_tok = undefined,
.lazy = false,
};
var has_location = false;
@@ -299,7 +291,7 @@ const Parse = struct {
// that is desirable on a per-field basis.
if (mem.eql(u8, field_name, "url")) {
if (has_location) {
- return fail(p, main_tokens[field_init], "dependency should specify only one of 'url' and 'path' fields.", .{});
+ return fail(p, ast.nodeMainToken(field_init), "dependency should specify only one of 'url' and 'path' fields.", .{});
}
dep.location = .{
.url = parseString(p, field_init) catch |err| switch (err) {
@@ -308,11 +300,11 @@ const Parse = struct {
},
};
has_location = true;
- dep.location_tok = main_tokens[field_init];
+ dep.location_tok = ast.nodeMainToken(field_init);
dep.location_node = field_init;
} else if (mem.eql(u8, field_name, "path")) {
if (has_location) {
- return fail(p, main_tokens[field_init], "dependency should specify only one of 'url' and 'path' fields.", .{});
+ return fail(p, ast.nodeMainToken(field_init), "dependency should specify only one of 'url' and 'path' fields.", .{});
}
dep.location = .{
.path = parseString(p, field_init) catch |err| switch (err) {
@@ -321,15 +313,15 @@ const Parse = struct {
},
};
has_location = true;
- dep.location_tok = main_tokens[field_init];
+ dep.location_tok = ast.nodeMainToken(field_init);
dep.location_node = field_init;
} else if (mem.eql(u8, field_name, "hash")) {
dep.hash = parseHash(p, field_init) catch |err| switch (err) {
error.ParseFailure => continue,
else => |e| return e,
};
- dep.hash_tok = main_tokens[field_init];
- dep.hash_node = field_init;
+ dep.hash_tok = .fromToken(ast.nodeMainToken(field_init));
+ dep.hash_node = field_init.toOptional();
} else if (mem.eql(u8, field_name, "lazy")) {
dep.lazy = parseBool(p, field_init) catch |err| switch (err) {
error.ParseFailure => continue,
@@ -342,7 +334,7 @@ const Parse = struct {
}
if (!has_location) {
- try appendError(p, main_tokens[node], "dependency requires location field, one of 'url' or 'path'.", .{});
+ try appendError(p, ast.nodeMainToken(node), "dependency requires location field, one of 'url' or 'path'.", .{});
}
return dep;
@@ -350,11 +342,10 @@ const Parse = struct {
fn parseIncludedPaths(p: *Parse, node: Ast.Node.Index) !void {
const ast = p.ast;
- const main_tokens = ast.nodes.items(.main_token);
var buf: [2]Ast.Node.Index = undefined;
const array_init = ast.fullArrayInit(&buf, node) orelse {
- const tok = main_tokens[node];
+ const tok = ast.nodeMainToken(node);
return fail(p, tok, "expected paths expression to be a list of strings", .{});
};
@@ -369,12 +360,10 @@ const Parse = struct {
fn parseBool(p: *Parse, node: Ast.Node.Index) !bool {
const ast = p.ast;
- const node_tags = ast.nodes.items(.tag);
- const main_tokens = ast.nodes.items(.main_token);
- if (node_tags[node] != .identifier) {
- return fail(p, main_tokens[node], "expected identifier", .{});
+ if (ast.nodeTag(node) != .identifier) {
+ return fail(p, ast.nodeMainToken(node), "expected identifier", .{});
}
- const ident_token = main_tokens[node];
+ const ident_token = ast.nodeMainToken(node);
const token_bytes = ast.tokenSlice(ident_token);
if (mem.eql(u8, token_bytes, "true")) {
return true;
@@ -387,10 +376,8 @@ const Parse = struct {
fn parseFingerprint(p: *Parse, node: Ast.Node.Index) !Package.Fingerprint {
const ast = p.ast;
- const node_tags = ast.nodes.items(.tag);
- const main_tokens = ast.nodes.items(.main_token);
- const main_token = main_tokens[node];
- if (node_tags[node] != .number_literal) {
+ const main_token = ast.nodeMainToken(node);
+ if (ast.nodeTag(node) != .number_literal) {
return fail(p, main_token, "expected integer literal", .{});
}
const token_bytes = ast.tokenSlice(main_token);
@@ -406,11 +393,9 @@ const Parse = struct {
fn parseName(p: *Parse, node: Ast.Node.Index) ![]const u8 {
const ast = p.ast;
- const node_tags = ast.nodes.items(.tag);
- const main_tokens = ast.nodes.items(.main_token);
- const main_token = main_tokens[node];
+ const main_token = ast.nodeMainToken(node);
- if (p.allow_name_string and node_tags[node] == .string_literal) {
+ if (p.allow_name_string and ast.nodeTag(node) == .string_literal) {
const name = try parseString(p, node);
if (!std.zig.isValidId(name))
return fail(p, main_token, "name must be a valid bare zig identifier (hint: switch from string to enum literal)", .{});
@@ -423,7 +408,7 @@ const Parse = struct {
return name;
}
- if (node_tags[node] != .enum_literal)
+ if (ast.nodeTag(node) != .enum_literal)
return fail(p, main_token, "expected enum literal", .{});
const ident_name = ast.tokenSlice(main_token);
@@ -440,12 +425,10 @@ const Parse = struct {
fn parseString(p: *Parse, node: Ast.Node.Index) ![]const u8 {
const ast = p.ast;
- const node_tags = ast.nodes.items(.tag);
- const main_tokens = ast.nodes.items(.main_token);
- if (node_tags[node] != .string_literal) {
- return fail(p, main_tokens[node], "expected string literal", .{});
+ if (ast.nodeTag(node) != .string_literal) {
+ return fail(p, ast.nodeMainToken(node), "expected string literal", .{});
}
- const str_lit_token = main_tokens[node];
+ const str_lit_token = ast.nodeMainToken(node);
const token_bytes = ast.tokenSlice(str_lit_token);
p.buf.clearRetainingCapacity();
try parseStrLit(p, str_lit_token, &p.buf, token_bytes, 0);
@@ -455,8 +438,7 @@ const Parse = struct {
fn parseHash(p: *Parse, node: Ast.Node.Index) ![]const u8 {
const ast = p.ast;
- const main_tokens = ast.nodes.items(.main_token);
- const tok = main_tokens[node];
+ const tok = ast.nodeMainToken(node);
const h = try parseString(p, node);
if (h.len > Package.Hash.max_len) {
@@ -469,8 +451,7 @@ const Parse = struct {
/// TODO: try to DRY this with AstGen.identifierTokenString
fn identifierTokenString(p: *Parse, token: Ast.TokenIndex) InnerError![]const u8 {
const ast = p.ast;
- const token_tags = ast.tokens.items(.tag);
- assert(token_tags[token] == .identifier);
+ assert(ast.tokenTag(token) == .identifier);
const ident_name = ast.tokenSlice(token);
if (!mem.startsWith(u8, ident_name, "@")) {
return ident_name;
src/Zcu/PerThread.zig
@@ -841,7 +841,7 @@ fn analyzeComptimeUnit(pt: Zcu.PerThread, cu_id: InternPool.ComptimeUnit.Id) Zcu
.comptime_reason = .{ .reason = .{
.src = .{
.base_node_inst = comptime_unit.zir_index,
- .offset = .{ .token_offset = 0 },
+ .offset = .{ .token_offset = .zero },
},
.r = .{ .simple = .comptime_keyword },
} },
@@ -1042,11 +1042,11 @@ fn analyzeNavVal(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zcu.CompileErr
const zir_decl = zir.getDeclaration(inst_resolved.inst);
assert(old_nav.is_usingnamespace == (zir_decl.kind == .@"usingnamespace"));
- const ty_src = block.src(.{ .node_offset_var_decl_ty = 0 });
- const init_src = block.src(.{ .node_offset_var_decl_init = 0 });
- const align_src = block.src(.{ .node_offset_var_decl_align = 0 });
- const section_src = block.src(.{ .node_offset_var_decl_section = 0 });
- const addrspace_src = block.src(.{ .node_offset_var_decl_addrspace = 0 });
+ const ty_src = block.src(.{ .node_offset_var_decl_ty = .zero });
+ const init_src = block.src(.{ .node_offset_var_decl_init = .zero });
+ const align_src = block.src(.{ .node_offset_var_decl_align = .zero });
+ const section_src = block.src(.{ .node_offset_var_decl_section = .zero });
+ const addrspace_src = block.src(.{ .node_offset_var_decl_addrspace = .zero });
block.comptime_reason = .{ .reason = .{
.src = init_src,
@@ -1135,7 +1135,7 @@ fn analyzeNavVal(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zcu.CompileErr
break :l zir.nullTerminatedString(zir_decl.lib_name);
} else null;
if (lib_name) |l| {
- const lib_name_src = block.src(.{ .node_offset_lib_name = 0 });
+ const lib_name_src = block.src(.{ .node_offset_lib_name = .zero });
try sema.handleExternLibName(&block, lib_name_src, l);
}
break :val .fromInterned(try pt.getExtern(.{
@@ -1233,7 +1233,7 @@ fn analyzeNavVal(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zcu.CompileErr
}
if (zir_decl.linkage == .@"export") {
- const export_src = block.src(.{ .token_offset = @intFromBool(zir_decl.is_pub) });
+ const export_src = block.src(.{ .token_offset = @enumFromInt(@intFromBool(zir_decl.is_pub)) });
const name_slice = zir.nullTerminatedString(zir_decl.name);
const name_ip = try ip.getOrPutString(gpa, pt.tid, name_slice, .no_embedded_nulls);
try sema.analyzeExport(&block, export_src, .{ .name = name_ip }, nav_id);
@@ -1414,7 +1414,7 @@ fn analyzeNavType(pt: Zcu.PerThread, nav_id: InternPool.Nav.Index) Zcu.CompileEr
const zir_decl = zir.getDeclaration(inst_resolved.inst);
assert(old_nav.is_usingnamespace == (zir_decl.kind == .@"usingnamespace"));
- const ty_src = block.src(.{ .node_offset_var_decl_ty = 0 });
+ const ty_src = block.src(.{ .node_offset_var_decl_ty = .zero });
block.comptime_reason = .{ .reason = .{
.src = ty_src,
@@ -2743,7 +2743,7 @@ fn analyzeFnBodyInner(pt: Zcu.PerThread, func_index: InternPool.Index) Zcu.SemaE
if (sema.fn_ret_ty_ies) |ies| {
sema.resolveInferredErrorSetPtr(&inner_block, .{
.base_node_inst = inner_block.src_base_inst,
- .offset = Zcu.LazySrcLoc.Offset.nodeOffset(0),
+ .offset = Zcu.LazySrcLoc.Offset.nodeOffset(.zero),
}, ies) catch |err| switch (err) {
error.ComptimeReturn => unreachable,
error.ComptimeBreak => unreachable,
@@ -2762,7 +2762,7 @@ fn analyzeFnBodyInner(pt: Zcu.PerThread, func_index: InternPool.Index) Zcu.SemaE
// result in circular dependency errors.
// TODO: this can go away once we fix backends having to resolve `StackTrace`.
// The codegen timing guarantees that the parameter types will be populated.
- sema.resolveFnTypes(fn_ty, inner_block.nodeOffset(0)) catch |err| switch (err) {
+ sema.resolveFnTypes(fn_ty, inner_block.nodeOffset(.zero)) catch |err| switch (err) {
error.ComptimeReturn => unreachable,
error.ComptimeBreak => unreachable,
else => |e| return e,
src/main.zig
@@ -5224,7 +5224,7 @@ fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
.arena = std.heap.ArenaAllocator.init(gpa),
.location = .{ .relative_path = build_mod.root },
.location_tok = 0,
- .hash_tok = 0,
+ .hash_tok = .none,
.name_tok = 0,
.lazy_status = .eager,
.parent_package_root = build_mod.root,
@@ -6285,8 +6285,10 @@ fn cmdAstCheck(
file.tree.?.tokens.len * (@sizeOf(std.zig.Token.Tag) + @sizeOf(Ast.ByteOffset));
const tree_bytes = @sizeOf(Ast) + file.tree.?.nodes.len *
(@sizeOf(Ast.Node.Tag) +
- @sizeOf(Ast.Node.Data) +
- @sizeOf(Ast.TokenIndex));
+ @sizeOf(Ast.TokenIndex) +
+ // Here we don't use @sizeOf(Ast.Node.Data) because it would include
+ // the debug safety tag but we want to measure release size.
+ 8);
const instruction_bytes = file.zir.?.instructions.len *
// Here we don't use @sizeOf(Zir.Inst.Data) because it would include
// the debug safety tag but we want to measure release size.
@@ -7126,7 +7128,7 @@ fn cmdFetch(
.arena = std.heap.ArenaAllocator.init(gpa),
.location = .{ .path_or_url = path_or_url },
.location_tok = 0,
- .hash_tok = 0,
+ .hash_tok = .none,
.name_tok = 0,
.lazy_status = .eager,
.parent_package_root = undefined,
@@ -7282,8 +7284,8 @@ fn cmdFetch(
warn("overwriting existing dependency named '{s}'", .{name});
try fixups.replace_nodes_with_string.put(gpa, dep.location_node, location_replace);
- if (dep.hash_node != 0) {
- try fixups.replace_nodes_with_string.put(gpa, dep.hash_node, hash_replace);
+ if (dep.hash_node.unwrap()) |hash_node| {
+ try fixups.replace_nodes_with_string.put(gpa, hash_node, hash_replace);
} else {
// https://github.com/ziglang/zig/issues/21690
}
@@ -7292,9 +7294,9 @@ fn cmdFetch(
const deps = manifest.dependencies.values();
const last_dep_node = deps[deps.len - 1].node;
try fixups.append_string_after_node.put(gpa, last_dep_node, new_node_text);
- } else if (manifest.dependencies_node != 0) {
+ } else if (manifest.dependencies_node.unwrap()) |dependencies_node| {
// Add fixup for replacing the entire dependencies struct.
- try fixups.replace_nodes_with_string.put(gpa, manifest.dependencies_node, dependencies_init);
+ try fixups.replace_nodes_with_string.put(gpa, dependencies_node, dependencies_init);
} else {
// Add fixup for adding dependencies struct.
try fixups.append_string_after_node.put(gpa, manifest.version_node, dependencies_text);
src/print_zir.zig
@@ -24,7 +24,7 @@ pub fn renderAsTextToFile(
.file = scope_file,
.code = scope_file.zir.?,
.indent = 0,
- .parent_decl_node = 0,
+ .parent_decl_node = .root,
.recurse_decls = true,
.recurse_blocks = true,
};
@@ -185,10 +185,6 @@ const Writer = struct {
}
} = .{},
- fn relativeToNodeIndex(self: *Writer, offset: i32) Ast.Node.Index {
- return @bitCast(offset + @as(i32, @bitCast(self.parent_decl_node)));
- }
-
fn writeInstToStream(
self: *Writer,
stream: anytype,
@@ -595,7 +591,7 @@ const Writer = struct {
const prev_parent_decl_node = self.parent_decl_node;
self.parent_decl_node = inst_data.node;
defer self.parent_decl_node = prev_parent_decl_node;
- try self.writeSrcNode(stream, 0);
+ try self.writeSrcNode(stream, .zero);
},
.builtin_extern,
@@ -631,7 +627,8 @@ const Writer = struct {
fn writeExtNode(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
try stream.writeAll(")) ");
- try self.writeSrcNode(stream, @bitCast(extended.operand));
+ const src_node: Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
+ try self.writeSrcNode(stream, src_node);
}
fn writeArrayInitElemType(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void {
@@ -1579,7 +1576,7 @@ const Writer = struct {
try stream.writeByteNTimes(' ', self.indent);
try stream.writeAll("}) ");
}
- try self.writeSrcNode(stream, 0);
+ try self.writeSrcNode(stream, .zero);
}
fn writeUnionDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
@@ -1659,7 +1656,7 @@ const Writer = struct {
if (fields_len == 0) {
try stream.writeAll("}) ");
- try self.writeSrcNode(stream, 0);
+ try self.writeSrcNode(stream, .zero);
return;
}
try stream.writeAll(", ");
@@ -1730,7 +1727,7 @@ const Writer = struct {
self.indent -= 2;
try stream.writeByteNTimes(' ', self.indent);
try stream.writeAll("}) ");
- try self.writeSrcNode(stream, 0);
+ try self.writeSrcNode(stream, .zero);
}
fn writeEnumDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
@@ -1849,7 +1846,7 @@ const Writer = struct {
try stream.writeByteNTimes(' ', self.indent);
try stream.writeAll("}) ");
}
- try self.writeSrcNode(stream, 0);
+ try self.writeSrcNode(stream, .zero);
}
fn writeOpaqueDecl(
@@ -1893,7 +1890,7 @@ const Writer = struct {
try stream.writeByteNTimes(' ', self.indent);
try stream.writeAll("}) ");
}
- try self.writeSrcNode(stream, 0);
+ try self.writeSrcNode(stream, .zero);
}
fn writeTupleDecl(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
@@ -2539,7 +2536,7 @@ const Writer = struct {
ret_ty_body: []const Zir.Inst.Index,
ret_ty_is_generic: bool,
body: []const Zir.Inst.Index,
- src_node: i32,
+ src_node: Ast.Node.Offset,
src_locs: Zir.Inst.Func.SrcLocs,
noalias_bits: u32,
) !void {
@@ -2647,18 +2644,20 @@ const Writer = struct {
}
try stream.writeAll(") ");
- try self.writeSrcNode(stream, 0);
+ try self.writeSrcNode(stream, .zero);
}
fn writeClosureGet(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
try stream.print("{d})) ", .{extended.small});
- try self.writeSrcNode(stream, @bitCast(extended.operand));
+ const src_node: Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
+ try self.writeSrcNode(stream, src_node);
}
fn writeBuiltinValue(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
const val: Zir.Inst.BuiltinValue = @enumFromInt(extended.small);
try stream.print("{s})) ", .{@tagName(val)});
- try self.writeSrcNode(stream, @bitCast(extended.operand));
+ const src_node: Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
+ try self.writeSrcNode(stream, src_node);
}
fn writeInplaceArithResultTy(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
@@ -2760,9 +2759,9 @@ const Writer = struct {
try stream.writeAll(name);
}
- fn writeSrcNode(self: *Writer, stream: anytype, src_node: i32) !void {
+ fn writeSrcNode(self: *Writer, stream: anytype, src_node: Ast.Node.Offset) !void {
const tree = self.file.tree orelse return;
- const abs_node = self.relativeToNodeIndex(src_node);
+ const abs_node = src_node.toAbsolute(self.parent_decl_node);
const src_span = tree.nodeToSpan(abs_node);
const start = self.line_col_cursor.find(tree.source, src_span.start);
const end = self.line_col_cursor.find(tree.source, src_span.end);
@@ -2772,10 +2771,10 @@ const Writer = struct {
});
}
- fn writeSrcTok(self: *Writer, stream: anytype, src_tok: u32) !void {
+ fn writeSrcTok(self: *Writer, stream: anytype, src_tok: Ast.TokenOffset) !void {
const tree = self.file.tree orelse return;
- const abs_tok = tree.firstToken(self.parent_decl_node) + src_tok;
- const span_start = tree.tokens.items(.start)[abs_tok];
+ const abs_tok = src_tok.toAbsolute(tree.firstToken(self.parent_decl_node));
+ const span_start = tree.tokenStart(abs_tok);
const span_end = span_start + @as(u32, @intCast(tree.tokenSlice(abs_tok).len));
const start = self.line_col_cursor.find(tree.source, span_start);
const end = self.line_col_cursor.find(tree.source, span_end);
@@ -2785,9 +2784,9 @@ const Writer = struct {
});
}
- fn writeSrcTokAbs(self: *Writer, stream: anytype, src_tok: u32) !void {
+ fn writeSrcTokAbs(self: *Writer, stream: anytype, src_tok: Ast.TokenIndex) !void {
const tree = self.file.tree orelse return;
- const span_start = tree.tokens.items(.start)[src_tok];
+ const span_start = tree.tokenStart(src_tok);
const span_end = span_start + @as(u32, @intCast(tree.tokenSlice(src_tok).len));
const start = self.line_col_cursor.find(tree.source, span_start);
const end = self.line_col_cursor.find(tree.source, span_end);
src/Sema.zig
@@ -407,18 +407,18 @@ pub const Block = struct {
return block.comptime_reason != null;
}
- fn builtinCallArgSrc(block: *Block, builtin_call_node: i32, arg_index: u32) LazySrcLoc {
+ fn builtinCallArgSrc(block: *Block, builtin_call_node: std.zig.Ast.Node.Offset, arg_index: u32) LazySrcLoc {
return block.src(.{ .node_offset_builtin_call_arg = .{
.builtin_call_node = builtin_call_node,
.arg_index = arg_index,
} });
}
- pub fn nodeOffset(block: Block, node_offset: i32) LazySrcLoc {
+ pub fn nodeOffset(block: Block, node_offset: std.zig.Ast.Node.Offset) LazySrcLoc {
return block.src(LazySrcLoc.Offset.nodeOffset(node_offset));
}
- fn tokenOffset(block: Block, tok_offset: u32) LazySrcLoc {
+ fn tokenOffset(block: Block, tok_offset: std.zig.Ast.TokenOffset) LazySrcLoc {
return block.src(.{ .token_offset = tok_offset });
}
@@ -1860,7 +1860,7 @@ fn analyzeBodyInner(
if (!block.isComptime()) break :blk try sema.zirTry(block, inst);
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = block.nodeOffset(inst_data.src_node);
- const operand_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
+ const operand_src = block.src(.{ .node_offset_try_operand = inst_data.src_node });
const extra = sema.code.extraData(Zir.Inst.Try, inst_data.payload_index);
const inline_body = sema.code.bodySlice(extra.end, extra.data.body_len);
const err_union = try sema.resolveInst(extra.data.operand);
@@ -1883,7 +1883,7 @@ fn analyzeBodyInner(
if (!block.isComptime()) break :blk try sema.zirTryPtr(block, inst);
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = block.nodeOffset(inst_data.src_node);
- const operand_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
+ const operand_src = block.src(.{ .node_offset_try_operand = inst_data.src_node });
const extra = sema.code.extraData(Zir.Inst.Try, inst_data.payload_index);
const inline_body = sema.code.bodySlice(extra.end, extra.data.body_len);
const operand = try sema.resolveInst(extra.data.operand);
@@ -2166,7 +2166,7 @@ pub fn setupErrorReturnTrace(sema: *Sema, block: *Block, last_arg_index: usize)
const addrs_ptr = try err_trace_block.addTy(.alloc, try pt.singleMutPtrType(addr_arr_ty));
// var st: StackTrace = undefined;
- const stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(0), .StackTrace);
+ const stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(.zero), .StackTrace);
try stack_trace_ty.resolveFields(pt);
const st_ptr = try err_trace_block.addTy(.alloc, try pt.singleMutPtrType(stack_trace_ty));
@@ -2901,7 +2901,7 @@ fn zirStructDecl(
const tracked_inst = try block.trackZir(inst);
const src: LazySrcLoc = .{
.base_node_inst = tracked_inst,
- .offset = LazySrcLoc.Offset.nodeOffset(0),
+ .offset = LazySrcLoc.Offset.nodeOffset(.zero),
};
var extra_index = extra.end;
@@ -3114,7 +3114,7 @@ fn zirEnumDecl(
var extra_index: usize = extra.end;
const tracked_inst = try block.trackZir(inst);
- const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(0) };
+ const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(.zero) };
const tag_type_ref = if (small.has_tag_type) blk: {
const tag_type_ref: Zir.Inst.Ref = @enumFromInt(sema.code.extra[extra_index]);
@@ -3277,7 +3277,7 @@ fn zirUnionDecl(
var extra_index: usize = extra.end;
const tracked_inst = try block.trackZir(inst);
- const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(0) };
+ const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(.zero) };
extra_index += @intFromBool(small.has_tag_type);
const captures_len = if (small.has_captures_len) blk: {
@@ -3402,7 +3402,7 @@ fn zirOpaqueDecl(
var extra_index: usize = extra.end;
const tracked_inst = try block.trackZir(inst);
- const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(0) };
+ const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(.zero) };
const captures_len = if (small.has_captures_len) blk: {
const captures_len = sema.code.extra[extra_index];
@@ -3835,7 +3835,7 @@ fn zirMakePtrConst(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErro
if (try elem_ty.comptimeOnlySema(pt)) {
// The value was initialized through RLS, so we didn't detect the runtime condition earlier.
// TODO: source location of runtime control flow
- const init_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
+ const init_src = block.src(.{ .node_offset_var_decl_init = inst_data.src_node });
return sema.fail(block, init_src, "value with comptime-only type '{}' depends on runtime control flow", .{elem_ty.fmt(pt)});
}
@@ -6690,8 +6690,8 @@ fn zirBreak(sema: *Sema, start_block: *Block, inst: Zir.Inst.Index) CompileError
if (block.label) |label| {
if (label.zir_block == zir_block) {
const br_ref = try start_block.addBr(label.merges.block_inst, operand);
- const src_loc = if (extra.operand_src_node != Zir.Inst.Break.no_src_node)
- start_block.nodeOffset(extra.operand_src_node)
+ const src_loc = if (extra.operand_src_node.unwrap()) |operand_src_node|
+ start_block.nodeOffset(operand_src_node)
else
null;
try label.merges.src_locs.append(sema.gpa, src_loc);
@@ -6715,8 +6715,7 @@ fn zirSwitchContinue(sema: *Sema, start_block: *Block, inst: Zir.Inst.Index) Com
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].@"break";
const extra = sema.code.extraData(Zir.Inst.Break, inst_data.payload_index).data;
- assert(extra.operand_src_node != Zir.Inst.Break.no_src_node);
- const operand_src = start_block.nodeOffset(extra.operand_src_node);
+ const operand_src = start_block.nodeOffset(extra.operand_src_node.unwrap().?);
const uncoerced_operand = try sema.resolveInst(inst_data.operand);
const switch_inst = extra.block_inst;
@@ -7048,7 +7047,7 @@ pub fn analyzeSaveErrRetIndex(sema: *Sema, block: *Block) SemaError!Air.Inst.Ref
if (!block.ownerModule().error_tracing) return .none;
- const stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(0), .StackTrace);
+ const stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(.zero), .StackTrace);
try stack_trace_ty.resolveFields(pt);
const field_name = try zcu.intern_pool.getOrPutString(gpa, pt.tid, "index", .no_embedded_nulls);
const field_index = sema.structFieldIndex(block, stack_trace_ty, field_name, LazySrcLoc.unneeded) catch |err| switch (err) {
@@ -7346,7 +7345,7 @@ fn checkCallArgumentCount(
if (maybe_func_inst) |func_inst| {
try sema.errNote(.{
.base_node_inst = func_inst,
- .offset = LazySrcLoc.Offset.nodeOffset(0),
+ .offset = LazySrcLoc.Offset.nodeOffset(.zero),
}, msg, "function declared here", .{});
}
break :msg msg;
@@ -7418,7 +7417,7 @@ const CallArgsInfo = union(enum) {
/// The list of resolved (but uncoerced) arguments is known ahead of time, but
/// originated from a usage of the @call builtin at the given node offset.
call_builtin: struct {
- call_node_offset: i32,
+ call_node_offset: std.zig.Ast.Node.Offset,
args: []const Air.Inst.Ref,
},
@@ -7436,7 +7435,7 @@ const CallArgsInfo = union(enum) {
/// analyzing arguments.
call_inst: Zir.Inst.Index,
/// The node offset of `call_inst`.
- call_node_offset: i32,
+ call_node_offset: std.zig.Ast.Node.Offset,
/// The number of arguments to this call, not including `bound_arg`.
num_args: u32,
/// The ZIR corresponding to all function arguments (other than `bound_arg`, if it
@@ -7599,7 +7598,7 @@ fn analyzeCall(
const maybe_func_inst = try sema.funcDeclSrcInst(callee);
const func_ret_ty_src: LazySrcLoc = if (maybe_func_inst) |fn_decl_inst| .{
.base_node_inst = fn_decl_inst,
- .offset = .{ .node_offset_fn_type_ret_ty = 0 },
+ .offset = .{ .node_offset_fn_type_ret_ty = .zero },
} else func_src;
const func_ty_info = zcu.typeToFunc(func_ty).?;
@@ -7613,7 +7612,7 @@ fn analyzeCall(
errdefer msg.destroy(gpa);
if (maybe_func_inst) |func_inst| try sema.errNote(.{
.base_node_inst = func_inst,
- .offset = .nodeOffset(0),
+ .offset = .nodeOffset(.zero),
}, msg, "function declared here", .{});
break :msg msg;
});
@@ -9574,7 +9573,7 @@ const Section = union(enum) {
fn funcCommon(
sema: *Sema,
block: *Block,
- src_node_offset: i32,
+ src_node_offset: std.zig.Ast.Node.Offset,
func_inst: Zir.Inst.Index,
cc: std.builtin.CallingConvention,
/// this might be Type.generic_poison
@@ -9948,7 +9947,7 @@ fn finishFunc(
if (!is_generic and sema.wantErrorReturnTracing(return_type)) {
// Make sure that StackTrace's fields are resolved so that the backend can
// lower this fn type.
- const unresolved_stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(0), .StackTrace);
+ const unresolved_stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(.zero), .StackTrace);
try unresolved_stack_trace_ty.resolveFields(pt);
}
@@ -12599,7 +12598,7 @@ fn analyzeSwitchRuntimeBlock(
union_originally: bool,
maybe_union_ty: Type,
err_set: bool,
- switch_node_offset: i32,
+ switch_node_offset: std.zig.Ast.Node.Offset,
special_prong_src: LazySrcLoc,
seen_enum_fields: []?LazySrcLoc,
seen_errors: SwitchErrorSet,
@@ -13219,7 +13218,7 @@ fn resolveSwitchComptimeLoop(
maybe_ptr_operand_ty: Type,
cond_ty: Type,
init_cond_val: Value,
- switch_node_offset: i32,
+ switch_node_offset: std.zig.Ast.Node.Offset,
special: SpecialProng,
case_vals: std.ArrayListUnmanaged(Air.Inst.Ref),
scalar_cases_len: u32,
@@ -13255,7 +13254,7 @@ fn resolveSwitchComptimeLoop(
const extra = sema.code.extraData(Zir.Inst.Break, break_inst.data.@"break".payload_index).data;
if (extra.block_inst != spa.switch_block_inst) return error.ComptimeBreak;
// This is a `switch_continue` targeting this block. Change the operand and start over.
- const src = child_block.nodeOffset(extra.operand_src_node);
+ const src = child_block.nodeOffset(extra.operand_src_node.unwrap().?);
const new_operand_uncoerced = try sema.resolveInst(break_inst.data.@"break".operand);
const new_operand = try sema.coerce(child_block, maybe_ptr_operand_ty, new_operand_uncoerced, src);
@@ -13287,7 +13286,7 @@ fn resolveSwitchComptime(
cond_operand: Air.Inst.Ref,
operand_val: Value,
operand_ty: Type,
- switch_node_offset: i32,
+ switch_node_offset: std.zig.Ast.Node.Offset,
special: SpecialProng,
case_vals: std.ArrayListUnmanaged(Air.Inst.Ref),
scalar_cases_len: u32,
@@ -13837,7 +13836,7 @@ fn validateSwitchNoRange(
block: *Block,
ranges_len: u32,
operand_ty: Type,
- src_node_offset: i32,
+ src_node_offset: std.zig.Ast.Node.Offset,
) CompileError!void {
if (ranges_len == 0)
return;
@@ -14158,14 +14157,24 @@ fn zirShl(
const pt = sema.pt;
const zcu = pt.zcu;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
- const src = block.nodeOffset(inst_data.src_node);
- const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
- const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
const lhs = try sema.resolveInst(extra.lhs);
const rhs = try sema.resolveInst(extra.rhs);
const lhs_ty = sema.typeOf(lhs);
const rhs_ty = sema.typeOf(rhs);
+
+ const src = block.nodeOffset(inst_data.src_node);
+ const lhs_src = switch (air_tag) {
+ .shl, .shl_sat => block.src(.{ .node_offset_bin_lhs = inst_data.src_node }),
+ .shl_exact => block.builtinCallArgSrc(inst_data.src_node, 0),
+ else => unreachable,
+ };
+ const rhs_src = switch (air_tag) {
+ .shl, .shl_sat => block.src(.{ .node_offset_bin_rhs = inst_data.src_node }),
+ .shl_exact => block.builtinCallArgSrc(inst_data.src_node, 1),
+ else => unreachable,
+ };
+
try sema.checkVectorizableBinaryOperands(block, src, lhs_ty, rhs_ty, lhs_src, rhs_src);
const scalar_ty = lhs_ty.scalarType(zcu);
@@ -14329,14 +14338,24 @@ fn zirShr(
const pt = sema.pt;
const zcu = pt.zcu;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
- const src = block.nodeOffset(inst_data.src_node);
- const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
- const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
const lhs = try sema.resolveInst(extra.lhs);
const rhs = try sema.resolveInst(extra.rhs);
const lhs_ty = sema.typeOf(lhs);
const rhs_ty = sema.typeOf(rhs);
+
+ const src = block.nodeOffset(inst_data.src_node);
+ const lhs_src = switch (air_tag) {
+ .shr => block.src(.{ .node_offset_bin_lhs = inst_data.src_node }),
+ .shr_exact => block.builtinCallArgSrc(inst_data.src_node, 0),
+ else => unreachable,
+ };
+ const rhs_src = switch (air_tag) {
+ .shr => block.src(.{ .node_offset_bin_rhs = inst_data.src_node }),
+ .shr_exact => block.builtinCallArgSrc(inst_data.src_node, 1),
+ else => unreachable,
+ };
+
try sema.checkVectorizableBinaryOperands(block, src, lhs_ty, rhs_ty, lhs_src, rhs_src);
const scalar_ty = lhs_ty.scalarType(zcu);
@@ -14560,7 +14579,7 @@ fn zirBitNot(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
fn analyzeTupleCat(
sema: *Sema,
block: *Block,
- src_node: i32,
+ src_node: std.zig.Ast.Node.Offset,
lhs: Air.Inst.Ref,
rhs: Air.Inst.Ref,
) CompileError!Air.Inst.Ref {
@@ -15005,7 +15024,7 @@ fn getArrayCatInfo(sema: *Sema, block: *Block, src: LazySrcLoc, operand: Air.Ins
fn analyzeTupleMul(
sema: *Sema,
block: *Block,
- src_node: i32,
+ src_node: std.zig.Ast.Node.Offset,
operand: Air.Inst.Ref,
factor: usize,
) CompileError!Air.Inst.Ref {
@@ -15494,8 +15513,8 @@ fn zirDivExact(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
const zcu = pt.zcu;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
- const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
- const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
+ const lhs_src = block.builtinCallArgSrc(inst_data.src_node, 0);
+ const rhs_src = block.builtinCallArgSrc(inst_data.src_node, 1);
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
const lhs = try sema.resolveInst(extra.lhs);
const rhs = try sema.resolveInst(extra.rhs);
@@ -15660,8 +15679,8 @@ fn zirDivFloor(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
const zcu = pt.zcu;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
- const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
- const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
+ const lhs_src = block.builtinCallArgSrc(inst_data.src_node, 0);
+ const rhs_src = block.builtinCallArgSrc(inst_data.src_node, 1);
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
const lhs = try sema.resolveInst(extra.lhs);
const rhs = try sema.resolveInst(extra.rhs);
@@ -15771,8 +15790,8 @@ fn zirDivTrunc(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
const zcu = pt.zcu;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
- const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
- const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
+ const lhs_src = block.builtinCallArgSrc(inst_data.src_node, 0);
+ const rhs_src = block.builtinCallArgSrc(inst_data.src_node, 1);
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
const lhs = try sema.resolveInst(extra.lhs);
const rhs = try sema.resolveInst(extra.rhs);
@@ -16201,8 +16220,8 @@ fn zirMod(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Ins
const zcu = pt.zcu;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
- const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
- const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
+ const lhs_src = block.builtinCallArgSrc(inst_data.src_node, 0);
+ const rhs_src = block.builtinCallArgSrc(inst_data.src_node, 1);
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
const lhs = try sema.resolveInst(extra.lhs);
const rhs = try sema.resolveInst(extra.rhs);
@@ -16297,8 +16316,8 @@ fn zirRem(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Ins
const zcu = pt.zcu;
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
- const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
- const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
+ const lhs_src = block.builtinCallArgSrc(inst_data.src_node, 0);
+ const rhs_src = block.builtinCallArgSrc(inst_data.src_node, 1);
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
const lhs = try sema.resolveInst(extra.lhs);
const rhs = try sema.resolveInst(extra.rhs);
@@ -17867,7 +17886,7 @@ fn zirClosureGet(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstDat
const ip = &zcu.intern_pool;
const captures = Type.fromInterned(zcu.namespacePtr(block.namespace).owner_type).getCaptures(zcu);
- const src_node: i32 = @bitCast(extended.operand);
+ const src_node: std.zig.Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
const src = block.nodeOffset(src_node);
const capture_ty = switch (captures.get(ip)[extended.small].unwrap()) {
@@ -17891,8 +17910,8 @@ fn zirClosureGet(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstDat
});
break :name null;
};
- const node: std.zig.Ast.Node.Index = @bitCast(src_node + @as(i32, @bitCast(src_base_node)));
- const token = tree.nodes.items(.main_token)[node];
+ const node = src_node.toAbsolute(src_base_node);
+ const token = tree.nodeMainToken(node);
break :name tree.tokenSlice(token);
};
@@ -17919,8 +17938,8 @@ fn zirClosureGet(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstDat
});
break :name null;
};
- const node: std.zig.Ast.Node.Index = @bitCast(src_node + @as(i32, @bitCast(src_base_node)));
- const token = tree.nodes.items(.main_token)[node];
+ const node = src_node.toAbsolute(src_base_node);
+ const token = tree.nodeMainToken(node);
break :name tree.tokenSlice(token);
};
@@ -17930,7 +17949,7 @@ fn zirClosureGet(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstDat
try sema.errMsg(src, "variable not accessible from inner function", .{});
errdefer msg.destroy(sema.gpa);
- try sema.errNote(block.nodeOffset(0), msg, "crossed function definition here", .{});
+ try sema.errNote(block.nodeOffset(.zero), msg, "crossed function definition here", .{});
// TODO add "declared here" note
break :msg msg;
@@ -17962,7 +17981,8 @@ fn zirFrameAddress(
block: *Block,
extended: Zir.Inst.Extended.InstData,
) CompileError!Air.Inst.Ref {
- const src = block.nodeOffset(@bitCast(extended.operand));
+ const src_node: std.zig.Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
+ const src = block.nodeOffset(src_node);
try sema.requireRuntimeBlock(block, src, null);
return try block.addNoOp(.frame_addr);
}
@@ -18059,7 +18079,7 @@ fn zirBuiltinSrc(
} });
};
- const src_loc_ty = try sema.getBuiltinType(block.nodeOffset(0), .SourceLocation);
+ const src_loc_ty = try sema.getBuiltinType(block.nodeOffset(.zero), .SourceLocation);
const fields = .{
// module: [:0]const u8,
module_name_val,
@@ -19528,7 +19548,7 @@ fn zirCondbr(
fn zirTry(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = parent_block.nodeOffset(inst_data.src_node);
- const operand_src = parent_block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
+ const operand_src = parent_block.src(.{ .node_offset_try_operand = inst_data.src_node });
const extra = sema.code.extraData(Zir.Inst.Try, inst_data.payload_index);
const body = sema.code.bodySlice(extra.end, extra.data.body_len);
const err_union = try sema.resolveInst(extra.data.operand);
@@ -19587,7 +19607,7 @@ fn zirTry(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileError!
fn zirTryPtr(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = parent_block.nodeOffset(inst_data.src_node);
- const operand_src = parent_block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
+ const operand_src = parent_block.src(.{ .node_offset_try_operand = inst_data.src_node });
const extra = sema.code.extraData(Zir.Inst.Try, inst_data.payload_index);
const body = sema.code.bodySlice(extra.end, extra.data.body_len);
const operand = try sema.resolveInst(extra.data.operand);
@@ -19790,7 +19810,7 @@ fn zirRetImplicit(
}
const operand = try sema.resolveInst(inst_data.operand);
- const ret_ty_src = block.src(.{ .node_offset_fn_type_ret_ty = 0 });
+ const ret_ty_src = block.src(.{ .node_offset_fn_type_ret_ty = .zero });
const base_tag = sema.fn_ret_ty.baseZigTypeTag(zcu);
if (base_tag == .noreturn) {
const msg = msg: {
@@ -21277,7 +21297,7 @@ fn getErrorReturnTrace(sema: *Sema, block: *Block) CompileError!Air.Inst.Ref {
const pt = sema.pt;
const zcu = pt.zcu;
const ip = &zcu.intern_pool;
- const stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(0), .StackTrace);
+ const stack_trace_ty = try sema.getBuiltinType(block.nodeOffset(.zero), .StackTrace);
try stack_trace_ty.resolveFields(pt);
const ptr_stack_trace_ty = try pt.singleMutPtrType(stack_trace_ty);
const opt_ptr_stack_trace_ty = try pt.optionalType(ptr_stack_trace_ty.toIntern());
@@ -21299,7 +21319,8 @@ fn zirFrame(
block: *Block,
extended: Zir.Inst.Extended.InstData,
) CompileError!Air.Inst.Ref {
- const src = block.nodeOffset(@bitCast(extended.operand));
+ const src_node: std.zig.Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
+ const src = block.nodeOffset(src_node);
return sema.failWithUseOfAsync(block, src);
}
@@ -21553,13 +21574,13 @@ fn zirReify(
const tracked_inst = try block.trackZir(inst);
const src: LazySrcLoc = .{
.base_node_inst = tracked_inst,
- .offset = LazySrcLoc.Offset.nodeOffset(0),
+ .offset = LazySrcLoc.Offset.nodeOffset(.zero),
};
const operand_src: LazySrcLoc = .{
.base_node_inst = tracked_inst,
.offset = .{
.node_offset_builtin_call_arg = .{
- .builtin_call_node = 0, // `tracked_inst` is precisely the `reify` instruction, so offset is 0
+ .builtin_call_node = .zero, // `tracked_inst` is precisely the `reify` instruction, so offset is 0
.arg_index = 0,
},
},
@@ -22867,7 +22888,8 @@ fn zirCVaEnd(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) C
}
fn zirCVaStart(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) CompileError!Air.Inst.Ref {
- const src = block.nodeOffset(@bitCast(extended.operand));
+ const src_node: std.zig.Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
+ const src = block.nodeOffset(src_node);
const va_list_ty = try sema.getBuiltinType(src, .VaList);
try sema.requireRuntimeBlock(block, src, null);
@@ -24272,12 +24294,12 @@ fn zirOffsetOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
fn bitOffsetOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!u64 {
const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].pl_node;
const src = block.src(.{ .node_offset_bin_op = inst_data.src_node });
- const lhs_src = block.src(.{ .node_offset_bin_lhs = inst_data.src_node });
- const rhs_src = block.src(.{ .node_offset_bin_rhs = inst_data.src_node });
+ const ty_src = block.builtinCallArgSrc(inst_data.src_node, 0);
+ const field_name_src = block.builtinCallArgSrc(inst_data.src_node, 1);
const extra = sema.code.extraData(Zir.Inst.Bin, inst_data.payload_index).data;
- const ty = try sema.resolveType(block, lhs_src, extra.lhs);
- const field_name = try sema.resolveConstStringIntern(block, rhs_src, extra.rhs, .{ .simple = .field_name });
+ const ty = try sema.resolveType(block, ty_src, extra.lhs);
+ const field_name = try sema.resolveConstStringIntern(block, field_name_src, extra.rhs, .{ .simple = .field_name });
const pt = sema.pt;
const zcu = pt.zcu;
@@ -24285,15 +24307,15 @@ fn bitOffsetOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!u6
try ty.resolveLayout(pt);
switch (ty.zigTypeTag(zcu)) {
.@"struct" => {},
- else => return sema.fail(block, lhs_src, "expected struct type, found '{}'", .{ty.fmt(pt)}),
+ else => return sema.fail(block, ty_src, "expected struct type, found '{}'", .{ty.fmt(pt)}),
}
const field_index = if (ty.isTuple(zcu)) blk: {
if (field_name.eqlSlice("len", ip)) {
return sema.fail(block, src, "no offset available for 'len' field of tuple", .{});
}
- break :blk try sema.tupleFieldIndex(block, ty, field_name, rhs_src);
- } else try sema.structFieldIndex(block, ty, field_name, rhs_src);
+ break :blk try sema.tupleFieldIndex(block, ty, field_name, field_name_src);
+ } else try sema.structFieldIndex(block, ty, field_name, field_name_src);
if (ty.structFieldIsComptime(field_index, zcu)) {
return sema.fail(block, src, "no offset available for comptime field", .{});
@@ -25077,7 +25099,7 @@ fn zirShuffle(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
fn analyzeShuffle(
sema: *Sema,
block: *Block,
- src_node: i32,
+ src_node: std.zig.Ast.Node.Offset,
elem_ty: Type,
a_arg: Air.Inst.Ref,
b_arg: Air.Inst.Ref,
@@ -27004,7 +27026,8 @@ fn zirBuiltinValue(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstD
const gpa = zcu.gpa;
const ip = &zcu.intern_pool;
- const src = block.nodeOffset(@bitCast(extended.operand));
+ const src_node: std.zig.Ast.Node.Offset = @enumFromInt(@as(i32, @bitCast(extended.operand)));
+ const src = block.nodeOffset(src_node);
const value: Zir.Inst.BuiltinValue = @enumFromInt(extended.small);
const ty = switch (value) {
@@ -29479,7 +29502,7 @@ const CoerceOpts = struct {
return .{
.base_node_inst = func_inst,
.offset = .{ .fn_proto_param_type = .{
- .fn_proto_node_offset = 0,
+ .fn_proto_node_offset = .zero,
.param_index = info.param_i,
} },
};
@@ -30084,7 +30107,7 @@ fn coerceExtra(
const ret_ty_src: LazySrcLoc = .{
.base_node_inst = ip.getNav(zcu.funcInfo(sema.func_index).owner_nav).srcInst(ip),
- .offset = .{ .node_offset_fn_type_ret_ty = 0 },
+ .offset = .{ .node_offset_fn_type_ret_ty = .zero },
};
try sema.errNote(ret_ty_src, msg, "'noreturn' declared here", .{});
break :msg msg;
@@ -30124,7 +30147,7 @@ fn coerceExtra(
{
const ret_ty_src: LazySrcLoc = .{
.base_node_inst = ip.getNav(zcu.funcInfo(sema.func_index).owner_nav).srcInst(ip),
- .offset = .{ .node_offset_fn_type_ret_ty = 0 },
+ .offset = .{ .node_offset_fn_type_ret_ty = .zero },
};
if (inst_ty.isError(zcu) and !dest_ty.isError(zcu)) {
try sema.errNote(ret_ty_src, msg, "function cannot return an error", .{});
@@ -32325,7 +32348,7 @@ pub fn ensureNavResolved(sema: *Sema, src: LazySrcLoc, nav_index: InternPool.Nav
if (zcu.analysis_in_progress.contains(anal_unit)) {
return sema.failWithOwnedErrorMsg(null, try sema.errMsg(.{
.base_node_inst = nav.analysis.?.zir_index,
- .offset = LazySrcLoc.Offset.nodeOffset(0),
+ .offset = LazySrcLoc.Offset.nodeOffset(.zero),
}, "dependency loop detected", .{}));
}
@@ -33942,7 +33965,7 @@ const PeerTypeCandidateSrc = union(enum) {
/// index i in this slice
override: []const ?LazySrcLoc,
/// resolvePeerTypes originates from a @TypeOf(...) call
- typeof_builtin_call_node_offset: i32,
+ typeof_builtin_call_node_offset: std.zig.Ast.Node.Offset,
pub fn resolve(
self: PeerTypeCandidateSrc,
@@ -35545,7 +35568,7 @@ fn backingIntType(
const backing_int_src: LazySrcLoc = .{
.base_node_inst = struct_type.zir_index,
- .offset = .{ .node_offset_container_tag = 0 },
+ .offset = .{ .node_offset_container_tag = .zero },
};
block.comptime_reason = .{ .reason = .{
.src = backing_int_src,
@@ -35566,7 +35589,7 @@ fn backingIntType(
struct_type.setBackingIntType(ip, backing_int_ty.toIntern());
} else {
if (fields_bit_sum > std.math.maxInt(u16)) {
- return sema.fail(&block, block.nodeOffset(0), "size of packed struct '{d}' exceeds maximum bit width of 65535", .{fields_bit_sum});
+ return sema.fail(&block, block.nodeOffset(.zero), "size of packed struct '{d}' exceeds maximum bit width of 65535", .{fields_bit_sum});
}
const backing_int_ty = try pt.intType(.unsigned, @intCast(fields_bit_sum));
struct_type.setBackingIntType(ip, backing_int_ty.toIntern());
@@ -36167,7 +36190,7 @@ fn structFields(
.comptime_reason = .{ .reason = .{
.src = .{
.base_node_inst = struct_type.zir_index,
- .offset = .nodeOffset(0),
+ .offset = .nodeOffset(.zero),
},
.r = .{ .simple = .struct_fields },
} },
@@ -36508,7 +36531,7 @@ fn unionFields(
const src: LazySrcLoc = .{
.base_node_inst = union_type.zir_index,
- .offset = .nodeOffset(0),
+ .offset = .nodeOffset(.zero),
};
var block_scope: Block = .{
@@ -36537,7 +36560,7 @@ fn unionFields(
if (tag_type_ref != .none) {
const tag_ty_src: LazySrcLoc = .{
.base_node_inst = union_type.zir_index,
- .offset = .{ .node_offset_container_tag = 0 },
+ .offset = .{ .node_offset_container_tag = .zero },
};
const provided_ty = try sema.resolveType(&block_scope, tag_ty_src, tag_type_ref);
if (small.auto_enum_tag) {
@@ -38512,7 +38535,7 @@ pub fn resolveDeclaredEnum(
const zcu = pt.zcu;
const gpa = zcu.gpa;
- const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(0) };
+ const src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = LazySrcLoc.Offset.nodeOffset(.zero) };
var arena: std.heap.ArenaAllocator = .init(gpa);
defer arena.deinit();
@@ -38599,7 +38622,7 @@ fn resolveDeclaredEnumInner(
const bit_bags_count = std.math.divCeil(usize, fields_len, 32) catch unreachable;
- const tag_ty_src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = .{ .node_offset_container_tag = 0 } };
+ const tag_ty_src: LazySrcLoc = .{ .base_node_inst = tracked_inst, .offset = .{ .node_offset_container_tag = .zero } };
const int_tag_ty = ty: {
if (body.len != 0) {
@@ -38752,9 +38775,9 @@ pub fn resolveNavPtrModifiers(
const gpa = zcu.gpa;
const ip = &zcu.intern_pool;
- const align_src = block.src(.{ .node_offset_var_decl_align = 0 });
- const section_src = block.src(.{ .node_offset_var_decl_section = 0 });
- const addrspace_src = block.src(.{ .node_offset_var_decl_addrspace = 0 });
+ const align_src = block.src(.{ .node_offset_var_decl_align = .zero });
+ const section_src = block.src(.{ .node_offset_var_decl_section = .zero });
+ const addrspace_src = block.src(.{ .node_offset_var_decl_addrspace = .zero });
const alignment: InternPool.Alignment = a: {
const align_body = zir_decl.align_body orelse break :a .none;
@@ -38827,7 +38850,7 @@ pub fn analyzeMemoizedState(sema: *Sema, block: *Block, simple_src: LazySrcLoc,
const src: LazySrcLoc = .{
.base_node_inst = ip.getNav(nav).srcInst(ip),
- .offset = .nodeOffset(0),
+ .offset = .nodeOffset(.zero),
};
const result = try sema.analyzeNavVal(block, src, nav);
src/Type.zig
@@ -3505,7 +3505,7 @@ pub fn srcLocOrNull(ty: Type, zcu: *Zcu) ?Zcu.LazySrcLoc {
},
else => return null,
},
- .offset = Zcu.LazySrcLoc.Offset.nodeOffset(0),
+ .offset = Zcu.LazySrcLoc.Offset.nodeOffset(.zero),
};
}
src/Zcu.zig
@@ -134,7 +134,7 @@ failed_types: std.AutoArrayHashMapUnmanaged(InternPool.Index, *ErrorMsg) = .empt
/// The value is the source location of the `@compileLog` call, convertible to a `LazySrcLoc`.
compile_log_sources: std.AutoArrayHashMapUnmanaged(AnalUnit, extern struct {
base_node_inst: InternPool.TrackedInst.Index,
- node_offset: i32,
+ node_offset: Ast.Node.Offset,
pub fn src(self: @This()) LazySrcLoc {
return .{
.base_node_inst = self.base_node_inst,
@@ -1031,10 +1031,6 @@ pub const SrcLoc = struct {
return tree.firstToken(src_loc.base_node);
}
- pub fn relativeToNodeIndex(src_loc: SrcLoc, offset: i32) Ast.Node.Index {
- return @bitCast(offset + @as(i32, @bitCast(src_loc.base_node)));
- }
-
pub const Span = Ast.Span;
pub fn span(src_loc: SrcLoc, gpa: Allocator) !Span {
@@ -1046,7 +1042,7 @@ pub const SrcLoc = struct {
.token_abs => |tok_index| {
const tree = try src_loc.file_scope.getTree(gpa);
- const start = tree.tokens.items(.start)[tok_index];
+ const start = tree.tokenStart(tok_index);
const end = start + @as(u32, @intCast(tree.tokenSlice(tok_index).len));
return Span{ .start = start, .end = end, .main = start };
},
@@ -1057,133 +1053,137 @@ pub const SrcLoc = struct {
.byte_offset => |byte_off| {
const tree = try src_loc.file_scope.getTree(gpa);
const tok_index = src_loc.baseSrcToken();
- const start = tree.tokens.items(.start)[tok_index] + byte_off;
+ const start = tree.tokenStart(tok_index) + byte_off;
const end = start + @as(u32, @intCast(tree.tokenSlice(tok_index).len));
return Span{ .start = start, .end = end, .main = start };
},
.token_offset => |tok_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const tok_index = src_loc.baseSrcToken() + tok_off;
- const start = tree.tokens.items(.start)[tok_index];
+ const tok_index = tok_off.toAbsolute(src_loc.baseSrcToken());
+ const start = tree.tokenStart(tok_index);
const end = start + @as(u32, @intCast(tree.tokenSlice(tok_index).len));
return Span{ .start = start, .end = end, .main = start };
},
.node_offset => |traced_off| {
const node_off = traced_off.x;
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
return tree.nodeToSpan(node);
},
.node_offset_main_token => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
- const main_token = tree.nodes.items(.main_token)[node];
+ const node = node_off.toAbsolute(src_loc.base_node);
+ const main_token = tree.nodeMainToken(node);
return tree.tokensToSpan(main_token, main_token, main_token);
},
.node_offset_bin_op => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
return tree.nodeToSpan(node);
},
.node_offset_initializer => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
return tree.tokensToSpan(
tree.firstToken(node) - 3,
tree.lastToken(node),
- tree.nodes.items(.main_token)[node] - 2,
+ tree.nodeMainToken(node) - 2,
);
},
.node_offset_var_decl_ty => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
- const node_tags = tree.nodes.items(.tag);
- const full = switch (node_tags[node]) {
+ const node = node_off.toAbsolute(src_loc.base_node);
+ const full = switch (tree.nodeTag(node)) {
.global_var_decl,
.local_var_decl,
.simple_var_decl,
.aligned_var_decl,
=> tree.fullVarDecl(node).?,
.@"usingnamespace" => {
- const node_data = tree.nodes.items(.data);
- return tree.nodeToSpan(node_data[node].lhs);
+ return tree.nodeToSpan(tree.nodeData(node).node);
},
else => unreachable,
};
- if (full.ast.type_node != 0) {
- return tree.nodeToSpan(full.ast.type_node);
+ if (full.ast.type_node.unwrap()) |type_node| {
+ return tree.nodeToSpan(type_node);
}
const tok_index = full.ast.mut_token + 1; // the name token
- const start = tree.tokens.items(.start)[tok_index];
+ const start = tree.tokenStart(tok_index);
const end = start + @as(u32, @intCast(tree.tokenSlice(tok_index).len));
return Span{ .start = start, .end = end, .main = start };
},
.node_offset_var_decl_align => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const align_node = if (tree.fullVarDecl(node)) |v|
- v.ast.align_node
+ v.ast.align_node.unwrap().?
else if (tree.fullFnProto(&buf, node)) |f|
- f.ast.align_expr
+ f.ast.align_expr.unwrap().?
else
unreachable;
return tree.nodeToSpan(align_node);
},
.node_offset_var_decl_section => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const section_node = if (tree.fullVarDecl(node)) |v|
- v.ast.section_node
+ v.ast.section_node.unwrap().?
else if (tree.fullFnProto(&buf, node)) |f|
- f.ast.section_expr
+ f.ast.section_expr.unwrap().?
else
unreachable;
return tree.nodeToSpan(section_node);
},
.node_offset_var_decl_addrspace => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const addrspace_node = if (tree.fullVarDecl(node)) |v|
- v.ast.addrspace_node
+ v.ast.addrspace_node.unwrap().?
else if (tree.fullFnProto(&buf, node)) |f|
- f.ast.addrspace_expr
+ f.ast.addrspace_expr.unwrap().?
else
unreachable;
return tree.nodeToSpan(addrspace_node);
},
.node_offset_var_decl_init => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
- const full = tree.fullVarDecl(node).?;
- return tree.nodeToSpan(full.ast.init_node);
+ const node = node_off.toAbsolute(src_loc.base_node);
+ const init_node = switch (tree.nodeTag(node)) {
+ .global_var_decl,
+ .local_var_decl,
+ .aligned_var_decl,
+ .simple_var_decl,
+ => tree.fullVarDecl(node).?.ast.init_node.unwrap().?,
+ .assign_destructure => tree.assignDestructure(node).ast.value_expr,
+ else => unreachable,
+ };
+ return tree.nodeToSpan(init_node);
},
.node_offset_builtin_call_arg => |builtin_arg| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(builtin_arg.builtin_call_node);
+ const node = builtin_arg.builtin_call_node.toAbsolute(src_loc.base_node);
var buf: [2]Ast.Node.Index = undefined;
const params = tree.builtinCallParams(&buf, node).?;
return tree.nodeToSpan(params[builtin_arg.arg_index]);
},
.node_offset_ptrcast_operand => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const main_tokens = tree.nodes.items(.main_token);
- const node_datas = tree.nodes.items(.data);
- const node_tags = tree.nodes.items(.tag);
- var node = src_loc.relativeToNodeIndex(node_off);
+ var node = node_off.toAbsolute(src_loc.base_node);
while (true) {
- switch (node_tags[node]) {
+ switch (tree.nodeTag(node)) {
.builtin_call_two, .builtin_call_two_comma => {},
else => break,
}
- if (node_datas[node].lhs == 0) break; // 0 args
- if (node_datas[node].rhs != 0) break; // 2 args
+ const first_arg, const second_arg = tree.nodeData(node).opt_node_and_opt_node;
+ if (first_arg == .none) break; // 0 args
+ if (second_arg != .none) break; // 2 args
- const builtin_token = main_tokens[node];
+ const builtin_token = tree.nodeMainToken(node);
const builtin_name = tree.tokenSlice(builtin_token);
const info = BuiltinFn.list.get(builtin_name) orelse break;
@@ -1197,16 +1197,15 @@ pub const SrcLoc = struct {
=> {},
}
- node = node_datas[node].lhs;
+ node = first_arg.unwrap().?;
}
return tree.nodeToSpan(node);
},
.node_offset_array_access_index => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node_datas = tree.nodes.items(.data);
- const node = src_loc.relativeToNodeIndex(node_off);
- return tree.nodeToSpan(node_datas[node].rhs);
+ const node = node_off.toAbsolute(src_loc.base_node);
+ return tree.nodeToSpan(tree.nodeData(node).node_and_node[1]);
},
.node_offset_slice_ptr,
.node_offset_slice_start,
@@ -1214,32 +1213,30 @@ pub const SrcLoc = struct {
.node_offset_slice_sentinel,
=> |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullSlice(node).?;
const part_node = switch (src_loc.lazy) {
.node_offset_slice_ptr => full.ast.sliced,
.node_offset_slice_start => full.ast.start,
- .node_offset_slice_end => full.ast.end,
- .node_offset_slice_sentinel => full.ast.sentinel,
+ .node_offset_slice_end => full.ast.end.unwrap().?,
+ .node_offset_slice_sentinel => full.ast.sentinel.unwrap().?,
else => unreachable,
};
return tree.nodeToSpan(part_node);
},
.node_offset_call_func => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullCall(&buf, node).?;
return tree.nodeToSpan(full.ast.fn_expr);
},
.node_offset_field_name => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node_datas = tree.nodes.items(.data);
- const node_tags = tree.nodes.items(.tag);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
- const tok_index = switch (node_tags[node]) {
- .field_access => node_datas[node].rhs,
+ const tok_index = switch (tree.nodeTag(node)) {
+ .field_access => tree.nodeData(node).node_and_token[1],
.call_one,
.call_one_comma,
.async_call_one,
@@ -1254,43 +1251,41 @@ pub const SrcLoc = struct {
},
else => tree.firstToken(node) - 2,
};
- const start = tree.tokens.items(.start)[tok_index];
+ const start = tree.tokenStart(tok_index);
const end = start + @as(u32, @intCast(tree.tokenSlice(tok_index).len));
return Span{ .start = start, .end = end, .main = start };
},
.node_offset_field_name_init => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
const tok_index = tree.firstToken(node) - 2;
- const start = tree.tokens.items(.start)[tok_index];
+ const start = tree.tokenStart(tok_index);
const end = start + @as(u32, @intCast(tree.tokenSlice(tok_index).len));
return Span{ .start = start, .end = end, .main = start };
},
.node_offset_deref_ptr => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
return tree.nodeToSpan(node);
},
.node_offset_asm_source => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullAsm(node).?;
return tree.nodeToSpan(full.ast.template);
},
.node_offset_asm_ret_ty => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullAsm(node).?;
const asm_output = full.outputs[0];
- const node_datas = tree.nodes.items(.data);
- return tree.nodeToSpan(node_datas[asm_output].lhs);
+ return tree.nodeToSpan(tree.nodeData(asm_output).opt_node_and_token[0].unwrap().?);
},
.node_offset_if_cond => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
- const node_tags = tree.nodes.items(.tag);
- const src_node = switch (node_tags[node]) {
+ const node = node_off.toAbsolute(src_loc.base_node);
+ const src_node = switch (tree.nodeTag(node)) {
.if_simple,
.@"if",
=> tree.fullIf(node).?.ast.cond_expr,
@@ -1317,20 +1312,19 @@ pub const SrcLoc = struct {
},
.for_input => |for_input| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(for_input.for_node_offset);
+ const node = for_input.for_node_offset.toAbsolute(src_loc.base_node);
const for_full = tree.fullFor(node).?;
const src_node = for_full.ast.inputs[for_input.input_index];
return tree.nodeToSpan(src_node);
},
.for_capture_from_input => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const token_tags = tree.tokens.items(.tag);
- const input_node = src_loc.relativeToNodeIndex(node_off);
+ const input_node = node_off.toAbsolute(src_loc.base_node);
// We have to actually linear scan the whole AST to find the for loop
// that contains this input.
const node_tags = tree.nodes.items(.tag);
for (node_tags, 0..) |node_tag, node_usize| {
- const node = @as(Ast.Node.Index, @intCast(node_usize));
+ const node: Ast.Node.Index = @enumFromInt(node_usize);
switch (node_tag) {
.for_simple, .@"for" => {
const for_full = tree.fullFor(node).?;
@@ -1339,7 +1333,7 @@ pub const SrcLoc = struct {
var count = input_index;
var tok = for_full.payload_token;
while (true) {
- switch (token_tags[tok]) {
+ switch (tree.tokenTag(tok)) {
.comma => {
count -= 1;
tok += 1;
@@ -1366,13 +1360,12 @@ pub const SrcLoc = struct {
},
.call_arg => |call_arg| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(call_arg.call_node_offset);
+ const node = call_arg.call_node_offset.toAbsolute(src_loc.base_node);
var buf: [2]Ast.Node.Index = undefined;
const call_full = tree.fullCall(buf[0..1], node) orelse {
- const node_tags = tree.nodes.items(.tag);
- assert(node_tags[node] == .builtin_call);
- const call_args_node = tree.extra_data[tree.nodes.items(.data)[node].rhs - 1];
- switch (node_tags[call_args_node]) {
+ assert(tree.nodeTag(node) == .builtin_call);
+ const call_args_node: Ast.Node.Index = @enumFromInt(tree.extra_data[@intFromEnum(tree.nodeData(node).extra_range.end) - 1]);
+ switch (tree.nodeTag(call_args_node)) {
.array_init_one,
.array_init_one_comma,
.array_init_dot_two,
@@ -1404,7 +1397,7 @@ pub const SrcLoc = struct {
},
.fn_proto_param, .fn_proto_param_type => |fn_proto_param| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(fn_proto_param.fn_proto_node_offset);
+ const node = fn_proto_param.fn_proto_node_offset.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
var it = full.iterate(tree);
@@ -1416,14 +1409,14 @@ pub const SrcLoc = struct {
.fn_proto_param_type => if (param.anytype_ellipsis3) |tok| {
return tree.tokenToSpan(tok);
} else {
- return tree.nodeToSpan(param.type_expr);
+ return tree.nodeToSpan(param.type_expr.?);
},
.fn_proto_param => if (param.anytype_ellipsis3) |tok| {
const first = param.comptime_noalias orelse param.name_token orelse tok;
return tree.tokensToSpan(first, tok, first);
} else {
- const first = param.comptime_noalias orelse param.name_token orelse tree.firstToken(param.type_expr);
- return tree.tokensToSpan(first, tree.lastToken(param.type_expr), first);
+ const first = param.comptime_noalias orelse param.name_token orelse tree.firstToken(param.type_expr.?);
+ return tree.tokensToSpan(first, tree.lastToken(param.type_expr.?), first);
},
else => unreachable,
}
@@ -1432,28 +1425,24 @@ pub const SrcLoc = struct {
},
.node_offset_bin_lhs => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
- const node_datas = tree.nodes.items(.data);
- return tree.nodeToSpan(node_datas[node].lhs);
+ const node = node_off.toAbsolute(src_loc.base_node);
+ return tree.nodeToSpan(tree.nodeData(node).node_and_node[0]);
},
.node_offset_bin_rhs => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
- const node_datas = tree.nodes.items(.data);
- return tree.nodeToSpan(node_datas[node].rhs);
+ const node = node_off.toAbsolute(src_loc.base_node);
+ return tree.nodeToSpan(tree.nodeData(node).node_and_node[1]);
},
.array_cat_lhs, .array_cat_rhs => |cat| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(cat.array_cat_offset);
- const node_datas = tree.nodes.items(.data);
+ const node = cat.array_cat_offset.toAbsolute(src_loc.base_node);
const arr_node = if (src_loc.lazy == .array_cat_lhs)
- node_datas[node].lhs
+ tree.nodeData(node).node_and_node[0]
else
- node_datas[node].rhs;
+ tree.nodeData(node).node_and_node[1];
- const node_tags = tree.nodes.items(.tag);
var buf: [2]Ast.Node.Index = undefined;
- switch (node_tags[arr_node]) {
+ switch (tree.nodeTag(arr_node)) {
.array_init_one,
.array_init_one_comma,
.array_init_dot_two,
@@ -1470,27 +1459,30 @@ pub const SrcLoc = struct {
}
},
+ .node_offset_try_operand => |node_off| {
+ const tree = try src_loc.file_scope.getTree(gpa);
+ const node = node_off.toAbsolute(src_loc.base_node);
+ return tree.nodeToSpan(tree.nodeData(node).node);
+ },
+
.node_offset_switch_operand => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
- const node_datas = tree.nodes.items(.data);
- return tree.nodeToSpan(node_datas[node].lhs);
+ const node = node_off.toAbsolute(src_loc.base_node);
+ const condition, _ = tree.nodeData(node).node_and_extra;
+ return tree.nodeToSpan(condition);
},
.node_offset_switch_special_prong => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const switch_node = src_loc.relativeToNodeIndex(node_off);
- const node_datas = tree.nodes.items(.data);
- const node_tags = tree.nodes.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
- const extra = tree.extraData(node_datas[switch_node].rhs, Ast.Node.SubRange);
- const case_nodes = tree.extra_data[extra.start..extra.end];
+ const switch_node = node_off.toAbsolute(src_loc.base_node);
+ _, const extra_index = tree.nodeData(switch_node).node_and_extra;
+ const case_nodes = tree.extraDataSlice(tree.extraData(extra_index, Ast.Node.SubRange), Ast.Node.Index);
for (case_nodes) |case_node| {
const case = tree.fullSwitchCase(case_node).?;
const is_special = (case.ast.values.len == 0) or
(case.ast.values.len == 1 and
- node_tags[case.ast.values[0]] == .identifier and
- mem.eql(u8, tree.tokenSlice(main_tokens[case.ast.values[0]]), "_"));
+ tree.nodeTag(case.ast.values[0]) == .identifier and
+ mem.eql(u8, tree.tokenSlice(tree.nodeMainToken(case.ast.values[0])), "_"));
if (!is_special) continue;
return tree.nodeToSpan(case_node);
@@ -1499,22 +1491,19 @@ pub const SrcLoc = struct {
.node_offset_switch_range => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const switch_node = src_loc.relativeToNodeIndex(node_off);
- const node_datas = tree.nodes.items(.data);
- const node_tags = tree.nodes.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
- const extra = tree.extraData(node_datas[switch_node].rhs, Ast.Node.SubRange);
- const case_nodes = tree.extra_data[extra.start..extra.end];
+ const switch_node = node_off.toAbsolute(src_loc.base_node);
+ _, const extra_index = tree.nodeData(switch_node).node_and_extra;
+ const case_nodes = tree.extraDataSlice(tree.extraData(extra_index, Ast.Node.SubRange), Ast.Node.Index);
for (case_nodes) |case_node| {
const case = tree.fullSwitchCase(case_node).?;
const is_special = (case.ast.values.len == 0) or
(case.ast.values.len == 1 and
- node_tags[case.ast.values[0]] == .identifier and
- mem.eql(u8, tree.tokenSlice(main_tokens[case.ast.values[0]]), "_"));
+ tree.nodeTag(case.ast.values[0]) == .identifier and
+ mem.eql(u8, tree.tokenSlice(tree.nodeMainToken(case.ast.values[0])), "_"));
if (is_special) continue;
for (case.ast.values) |item_node| {
- if (node_tags[item_node] == .switch_range) {
+ if (tree.nodeTag(item_node) == .switch_range) {
return tree.nodeToSpan(item_node);
}
}
@@ -1522,47 +1511,46 @@ pub const SrcLoc = struct {
},
.node_offset_fn_type_align => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
- return tree.nodeToSpan(full.ast.align_expr);
+ return tree.nodeToSpan(full.ast.align_expr.unwrap().?);
},
.node_offset_fn_type_addrspace => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
- return tree.nodeToSpan(full.ast.addrspace_expr);
+ return tree.nodeToSpan(full.ast.addrspace_expr.unwrap().?);
},
.node_offset_fn_type_section => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
- return tree.nodeToSpan(full.ast.section_expr);
+ return tree.nodeToSpan(full.ast.section_expr.unwrap().?);
},
.node_offset_fn_type_cc => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
- return tree.nodeToSpan(full.ast.callconv_expr);
+ return tree.nodeToSpan(full.ast.callconv_expr.unwrap().?);
},
.node_offset_fn_type_ret_ty => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, node).?;
- return tree.nodeToSpan(full.ast.return_type);
+ return tree.nodeToSpan(full.ast.return_type.unwrap().?);
},
.node_offset_param => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const token_tags = tree.tokens.items(.tag);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
var first_tok = tree.firstToken(node);
- while (true) switch (token_tags[first_tok - 1]) {
+ while (true) switch (tree.tokenTag(first_tok - 1)) {
.colon, .identifier, .keyword_comptime, .keyword_noalias => first_tok -= 1,
else => break,
};
@@ -1574,12 +1562,11 @@ pub const SrcLoc = struct {
},
.token_offset_param => |token_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const token_tags = tree.tokens.items(.tag);
- const main_token = tree.nodes.items(.main_token)[src_loc.base_node];
- const tok_index = @as(Ast.TokenIndex, @bitCast(token_off + @as(i32, @bitCast(main_token))));
+ const main_token = tree.nodeMainToken(src_loc.base_node);
+ const tok_index = token_off.toAbsolute(main_token);
var first_tok = tok_index;
- while (true) switch (token_tags[first_tok - 1]) {
+ while (true) switch (tree.tokenTag(first_tok - 1)) {
.colon, .identifier, .keyword_comptime, .keyword_noalias => first_tok -= 1,
else => break,
};
@@ -1592,109 +1579,108 @@ pub const SrcLoc = struct {
.node_offset_anyframe_type => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node_datas = tree.nodes.items(.data);
- const parent_node = src_loc.relativeToNodeIndex(node_off);
- return tree.nodeToSpan(node_datas[parent_node].rhs);
+ const parent_node = node_off.toAbsolute(src_loc.base_node);
+ _, const child_type = tree.nodeData(parent_node).token_and_node;
+ return tree.nodeToSpan(child_type);
},
.node_offset_lib_name => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const parent_node = src_loc.relativeToNodeIndex(node_off);
+ const parent_node = node_off.toAbsolute(src_loc.base_node);
var buf: [1]Ast.Node.Index = undefined;
const full = tree.fullFnProto(&buf, parent_node).?;
const tok_index = full.lib_name.?;
- const start = tree.tokens.items(.start)[tok_index];
+ const start = tree.tokenStart(tok_index);
const end = start + @as(u32, @intCast(tree.tokenSlice(tok_index).len));
return Span{ .start = start, .end = end, .main = start };
},
.node_offset_array_type_len => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const parent_node = src_loc.relativeToNodeIndex(node_off);
+ const parent_node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullArrayType(parent_node).?;
return tree.nodeToSpan(full.ast.elem_count);
},
.node_offset_array_type_sentinel => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const parent_node = src_loc.relativeToNodeIndex(node_off);
+ const parent_node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullArrayType(parent_node).?;
- return tree.nodeToSpan(full.ast.sentinel);
+ return tree.nodeToSpan(full.ast.sentinel.unwrap().?);
},
.node_offset_array_type_elem => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const parent_node = src_loc.relativeToNodeIndex(node_off);
+ const parent_node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullArrayType(parent_node).?;
return tree.nodeToSpan(full.ast.elem_type);
},
.node_offset_un_op => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node_datas = tree.nodes.items(.data);
- const node = src_loc.relativeToNodeIndex(node_off);
-
- return tree.nodeToSpan(node_datas[node].lhs);
+ const node = node_off.toAbsolute(src_loc.base_node);
+ return tree.nodeToSpan(tree.nodeData(node).node);
},
.node_offset_ptr_elem => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const parent_node = src_loc.relativeToNodeIndex(node_off);
+ const parent_node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullPtrType(parent_node).?;
return tree.nodeToSpan(full.ast.child_type);
},
.node_offset_ptr_sentinel => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const parent_node = src_loc.relativeToNodeIndex(node_off);
+ const parent_node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullPtrType(parent_node).?;
- return tree.nodeToSpan(full.ast.sentinel);
+ return tree.nodeToSpan(full.ast.sentinel.unwrap().?);
},
.node_offset_ptr_align => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const parent_node = src_loc.relativeToNodeIndex(node_off);
+ const parent_node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullPtrType(parent_node).?;
- return tree.nodeToSpan(full.ast.align_node);
+ return tree.nodeToSpan(full.ast.align_node.unwrap().?);
},
.node_offset_ptr_addrspace => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const parent_node = src_loc.relativeToNodeIndex(node_off);
+ const parent_node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullPtrType(parent_node).?;
- return tree.nodeToSpan(full.ast.addrspace_node);
+ return tree.nodeToSpan(full.ast.addrspace_node.unwrap().?);
},
.node_offset_ptr_bitoffset => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const parent_node = src_loc.relativeToNodeIndex(node_off);
+ const parent_node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullPtrType(parent_node).?;
- return tree.nodeToSpan(full.ast.bit_range_start);
+ return tree.nodeToSpan(full.ast.bit_range_start.unwrap().?);
},
.node_offset_ptr_hostsize => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const parent_node = src_loc.relativeToNodeIndex(node_off);
+ const parent_node = node_off.toAbsolute(src_loc.base_node);
const full = tree.fullPtrType(parent_node).?;
- return tree.nodeToSpan(full.ast.bit_range_end);
+ return tree.nodeToSpan(full.ast.bit_range_end.unwrap().?);
},
.node_offset_container_tag => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node_tags = tree.nodes.items(.tag);
- const parent_node = src_loc.relativeToNodeIndex(node_off);
+ const parent_node = node_off.toAbsolute(src_loc.base_node);
- switch (node_tags[parent_node]) {
+ switch (tree.nodeTag(parent_node)) {
.container_decl_arg, .container_decl_arg_trailing => {
const full = tree.containerDeclArg(parent_node);
- return tree.nodeToSpan(full.ast.arg);
+ const arg_node = full.ast.arg.unwrap().?;
+ return tree.nodeToSpan(arg_node);
},
.tagged_union_enum_tag, .tagged_union_enum_tag_trailing => {
const full = tree.taggedUnionEnumTag(parent_node);
+ const arg_node = full.ast.arg.unwrap().?;
return tree.tokensToSpan(
- tree.firstToken(full.ast.arg) - 2,
- tree.lastToken(full.ast.arg) + 1,
- tree.nodes.items(.main_token)[full.ast.arg],
+ tree.firstToken(arg_node) - 2,
+ tree.lastToken(arg_node) + 1,
+ tree.nodeMainToken(arg_node),
);
},
else => unreachable,
@@ -1702,60 +1688,55 @@ pub const SrcLoc = struct {
},
.node_offset_field_default => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node_tags = tree.nodes.items(.tag);
- const parent_node = src_loc.relativeToNodeIndex(node_off);
+ const parent_node = node_off.toAbsolute(src_loc.base_node);
- const full: Ast.full.ContainerField = switch (node_tags[parent_node]) {
+ const full: Ast.full.ContainerField = switch (tree.nodeTag(parent_node)) {
.container_field => tree.containerField(parent_node),
.container_field_init => tree.containerFieldInit(parent_node),
else => unreachable,
};
- return tree.nodeToSpan(full.ast.value_expr);
+ return tree.nodeToSpan(full.ast.value_expr.unwrap().?);
},
.node_offset_init_ty => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const parent_node = src_loc.relativeToNodeIndex(node_off);
+ const parent_node = node_off.toAbsolute(src_loc.base_node);
var buf: [2]Ast.Node.Index = undefined;
const type_expr = if (tree.fullArrayInit(&buf, parent_node)) |array_init|
- array_init.ast.type_expr
+ array_init.ast.type_expr.unwrap().?
else
- tree.fullStructInit(&buf, parent_node).?.ast.type_expr;
+ tree.fullStructInit(&buf, parent_node).?.ast.type_expr.unwrap().?;
return tree.nodeToSpan(type_expr);
},
.node_offset_store_ptr => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node_tags = tree.nodes.items(.tag);
- const node_datas = tree.nodes.items(.data);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
- switch (node_tags[node]) {
+ switch (tree.nodeTag(node)) {
.assign => {
- return tree.nodeToSpan(node_datas[node].lhs);
+ return tree.nodeToSpan(tree.nodeData(node).node_and_node[0]);
},
else => return tree.nodeToSpan(node),
}
},
.node_offset_store_operand => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node_tags = tree.nodes.items(.tag);
- const node_datas = tree.nodes.items(.data);
- const node = src_loc.relativeToNodeIndex(node_off);
+ const node = node_off.toAbsolute(src_loc.base_node);
- switch (node_tags[node]) {
+ switch (tree.nodeTag(node)) {
.assign => {
- return tree.nodeToSpan(node_datas[node].rhs);
+ return tree.nodeToSpan(tree.nodeData(node).node_and_node[1]);
},
else => return tree.nodeToSpan(node),
}
},
.node_offset_return_operand => |node_off| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(node_off);
- const node_tags = tree.nodes.items(.tag);
- const node_datas = tree.nodes.items(.data);
- if (node_tags[node] == .@"return" and node_datas[node].lhs != 0) {
- return tree.nodeToSpan(node_datas[node].lhs);
+ const node = node_off.toAbsolute(src_loc.base_node);
+ if (tree.nodeTag(node) == .@"return") {
+ if (tree.nodeData(node).opt_node.unwrap()) |lhs| {
+ return tree.nodeToSpan(lhs);
+ }
}
return tree.nodeToSpan(node);
},
@@ -1765,7 +1746,7 @@ pub const SrcLoc = struct {
.container_field_align,
=> |field_idx| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(0);
+ const node = src_loc.base_node;
var buf: [2]Ast.Node.Index = undefined;
const container_decl = tree.fullContainerDecl(&buf, node) orelse
return tree.nodeToSpan(node);
@@ -1778,36 +1759,36 @@ pub const SrcLoc = struct {
continue;
}
const field_component_node = switch (src_loc.lazy) {
- .container_field_name => 0,
+ .container_field_name => .none,
.container_field_value => field.ast.value_expr,
.container_field_type => field.ast.type_expr,
.container_field_align => field.ast.align_expr,
else => unreachable,
};
- if (field_component_node == 0) {
- return tree.tokenToSpan(field.ast.main_token);
+ if (field_component_node.unwrap()) |component_node| {
+ return tree.nodeToSpan(component_node);
} else {
- return tree.nodeToSpan(field_component_node);
+ return tree.tokenToSpan(field.ast.main_token);
}
} else unreachable;
},
.tuple_field_type, .tuple_field_init => |field_info| {
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(0);
+ const node = src_loc.base_node;
var buf: [2]Ast.Node.Index = undefined;
const container_decl = tree.fullContainerDecl(&buf, node) orelse
return tree.nodeToSpan(node);
const field = tree.fullContainerField(container_decl.ast.members[field_info.elem_index]).?;
return tree.nodeToSpan(switch (src_loc.lazy) {
- .tuple_field_type => field.ast.type_expr,
- .tuple_field_init => field.ast.value_expr,
+ .tuple_field_type => field.ast.type_expr.unwrap().?,
+ .tuple_field_init => field.ast.value_expr.unwrap().?,
else => unreachable,
});
},
.init_elem => |init_elem| {
const tree = try src_loc.file_scope.getTree(gpa);
- const init_node = src_loc.relativeToNodeIndex(init_elem.init_node_offset);
+ const init_node = init_elem.init_node_offset.toAbsolute(src_loc.base_node);
var buf: [2]Ast.Node.Index = undefined;
if (tree.fullArrayInit(&buf, init_node)) |full| {
const elem_node = full.ast.elements[init_elem.elem_index];
@@ -1817,7 +1798,7 @@ pub const SrcLoc = struct {
return tree.tokensToSpan(
tree.firstToken(field_node) - 3,
tree.lastToken(field_node),
- tree.nodes.items(.main_token)[field_node] - 2,
+ tree.nodeMainToken(field_node) - 2,
);
} else unreachable;
},
@@ -1846,7 +1827,7 @@ pub const SrcLoc = struct {
else => unreachable,
};
const tree = try src_loc.file_scope.getTree(gpa);
- const node = src_loc.relativeToNodeIndex(builtin_call_node);
+ const node = builtin_call_node.toAbsolute(src_loc.base_node);
var builtin_buf: [2]Ast.Node.Index = undefined;
const args = tree.builtinCallParams(&builtin_buf, node).?;
const arg_node = args[1];
@@ -1861,7 +1842,7 @@ pub const SrcLoc = struct {
return tree.tokensToSpan(
name_token - 1,
tree.lastToken(field_node),
- tree.nodes.items(.main_token)[field_node] - 2,
+ tree.nodeMainToken(field_node) - 2,
);
}
}
@@ -1885,12 +1866,9 @@ pub const SrcLoc = struct {
};
const tree = try src_loc.file_scope.getTree(gpa);
- const node_datas = tree.nodes.items(.data);
- const node_tags = tree.nodes.items(.tag);
- const main_tokens = tree.nodes.items(.main_token);
- const switch_node = src_loc.relativeToNodeIndex(switch_node_offset);
- const extra = tree.extraData(node_datas[switch_node].rhs, Ast.Node.SubRange);
- const case_nodes = tree.extra_data[extra.start..extra.end];
+ const switch_node = switch_node_offset.toAbsolute(src_loc.base_node);
+ _, const extra_index = tree.nodeData(switch_node).node_and_extra;
+ const case_nodes = tree.extraDataSlice(tree.extraData(extra_index, Ast.Node.SubRange), Ast.Node.Index);
var multi_i: u32 = 0;
var scalar_i: u32 = 0;
@@ -1898,8 +1876,8 @@ pub const SrcLoc = struct {
const case = tree.fullSwitchCase(case_node).?;
const is_special = special: {
if (case.ast.values.len == 0) break :special true;
- if (case.ast.values.len == 1 and node_tags[case.ast.values[0]] == .identifier) {
- break :special mem.eql(u8, tree.tokenSlice(main_tokens[case.ast.values[0]]), "_");
+ if (case.ast.values.len == 1 and tree.nodeTag(case.ast.values[0]) == .identifier) {
+ break :special mem.eql(u8, tree.tokenSlice(tree.nodeMainToken(case.ast.values[0])), "_");
}
break :special false;
};
@@ -1911,7 +1889,7 @@ pub const SrcLoc = struct {
}
const is_multi = case.ast.values.len != 1 or
- node_tags[case.ast.values[0]] == .switch_range;
+ tree.nodeTag(case.ast.values[0]) == .switch_range;
switch (want_case_idx.kind) {
.scalar => if (!is_multi and want_case_idx.index == scalar_i) break case,
@@ -1931,18 +1909,17 @@ pub const SrcLoc = struct {
.switch_case_item_range_last,
=> |x| x.item_idx,
.switch_capture, .switch_tag_capture => {
- const token_tags = tree.tokens.items(.tag);
const start = switch (src_loc.lazy) {
.switch_capture => case.payload_token.?,
.switch_tag_capture => tok: {
var tok = case.payload_token.?;
- if (token_tags[tok] == .asterisk) tok += 1;
- tok += 2; // skip over comma
+ if (tree.tokenTag(tok) == .asterisk) tok += 1;
+ tok = tok + 2; // skip over comma
break :tok tok;
},
else => unreachable,
};
- const end = switch (token_tags[start]) {
+ const end = switch (tree.tokenTag(start)) {
.asterisk => start + 1,
else => start,
};
@@ -1955,7 +1932,7 @@ pub const SrcLoc = struct {
.single => {
var item_i: u32 = 0;
for (case.ast.values) |item_node| {
- if (node_tags[item_node] == .switch_range) continue;
+ if (tree.nodeTag(item_node) == .switch_range) continue;
if (item_i != want_item.index) {
item_i += 1;
continue;
@@ -1966,15 +1943,16 @@ pub const SrcLoc = struct {
.range => {
var range_i: u32 = 0;
for (case.ast.values) |item_node| {
- if (node_tags[item_node] != .switch_range) continue;
+ if (tree.nodeTag(item_node) != .switch_range) continue;
if (range_i != want_item.index) {
range_i += 1;
continue;
}
+ const first, const last = tree.nodeData(item_node).node_and_node;
return switch (src_loc.lazy) {
.switch_case_item => tree.nodeToSpan(item_node),
- .switch_case_item_range_first => tree.nodeToSpan(node_datas[item_node].lhs),
- .switch_case_item_range_last => tree.nodeToSpan(node_datas[item_node].rhs),
+ .switch_case_item_range_first => tree.nodeToSpan(first),
+ .switch_case_item_range_last => tree.nodeToSpan(last),
else => unreachable,
};
} else unreachable;
@@ -1997,7 +1975,7 @@ pub const SrcLoc = struct {
var param_it = full.iterate(tree);
for (0..param_idx) |_| assert(param_it.next() != null);
const param = param_it.next().?;
- return tree.nodeToSpan(param.type_expr);
+ return tree.nodeToSpan(param.type_expr.?);
},
}
}
@@ -2028,212 +2006,217 @@ pub const LazySrcLoc = struct {
byte_abs: u32,
/// The source location points to a token within a source file,
/// offset from 0. The source file is determined contextually.
- token_abs: u32,
+ token_abs: Ast.TokenIndex,
/// The source location points to an AST node within a source file,
/// offset from 0. The source file is determined contextually.
- node_abs: u32,
+ node_abs: Ast.Node.Index,
/// The source location points to a byte offset within a source file,
/// offset from the byte offset of the base node within the file.
byte_offset: u32,
/// This data is the offset into the token list from the base node's first token.
- token_offset: u32,
+ token_offset: Ast.TokenOffset,
/// The source location points to an AST node, which is this value offset
/// from its containing base node AST index.
node_offset: TracedOffset,
/// The source location points to the main token of an AST node, found
/// by taking this AST node index offset from the containing base node.
- node_offset_main_token: i32,
+ node_offset_main_token: Ast.Node.Offset,
/// The source location points to the beginning of a struct initializer.
- node_offset_initializer: i32,
+ node_offset_initializer: Ast.Node.Offset,
/// The source location points to a variable declaration type expression,
/// found by taking this AST node index offset from the containing
/// base node, which points to a variable declaration AST node. Next, navigate
/// to the type expression.
- node_offset_var_decl_ty: i32,
+ node_offset_var_decl_ty: Ast.Node.Offset,
/// The source location points to the alignment expression of a var decl.
- node_offset_var_decl_align: i32,
+ node_offset_var_decl_align: Ast.Node.Offset,
/// The source location points to the linksection expression of a var decl.
- node_offset_var_decl_section: i32,
+ node_offset_var_decl_section: Ast.Node.Offset,
/// The source location points to the addrspace expression of a var decl.
- node_offset_var_decl_addrspace: i32,
+ node_offset_var_decl_addrspace: Ast.Node.Offset,
/// The source location points to the initializer of a var decl.
- node_offset_var_decl_init: i32,
+ node_offset_var_decl_init: Ast.Node.Offset,
/// The source location points to the given argument of a builtin function call.
/// `builtin_call_node` points to the builtin call.
/// `arg_index` is the index of the argument which hte source location refers to.
node_offset_builtin_call_arg: struct {
- builtin_call_node: i32,
+ builtin_call_node: Ast.Node.Offset,
arg_index: u32,
},
/// Like `node_offset_builtin_call_arg` but recurses through arbitrarily many calls
/// to pointer cast builtins (taking the first argument of the most nested).
- node_offset_ptrcast_operand: i32,
+ node_offset_ptrcast_operand: Ast.Node.Offset,
/// The source location points to the index expression of an array access
/// expression, found by taking this AST node index offset from the containing
/// base node, which points to an array access AST node. Next, navigate
/// to the index expression.
- node_offset_array_access_index: i32,
+ node_offset_array_access_index: Ast.Node.Offset,
/// The source location points to the LHS of a slice expression
/// expression, found by taking this AST node index offset from the containing
/// base node, which points to a slice AST node. Next, navigate
/// to the sentinel expression.
- node_offset_slice_ptr: i32,
+ node_offset_slice_ptr: Ast.Node.Offset,
/// The source location points to start expression of a slice expression
/// expression, found by taking this AST node index offset from the containing
/// base node, which points to a slice AST node. Next, navigate
/// to the sentinel expression.
- node_offset_slice_start: i32,
+ node_offset_slice_start: Ast.Node.Offset,
/// The source location points to the end expression of a slice
/// expression, found by taking this AST node index offset from the containing
/// base node, which points to a slice AST node. Next, navigate
/// to the sentinel expression.
- node_offset_slice_end: i32,
+ node_offset_slice_end: Ast.Node.Offset,
/// The source location points to the sentinel expression of a slice
/// expression, found by taking this AST node index offset from the containing
/// base node, which points to a slice AST node. Next, navigate
/// to the sentinel expression.
- node_offset_slice_sentinel: i32,
+ node_offset_slice_sentinel: Ast.Node.Offset,
/// The source location points to the callee expression of a function
/// call expression, found by taking this AST node index offset from the containing
/// base node, which points to a function call AST node. Next, navigate
/// to the callee expression.
- node_offset_call_func: i32,
+ node_offset_call_func: Ast.Node.Offset,
/// The payload is offset from the containing base node.
/// The source location points to the field name of:
/// * a field access expression (`a.b`), or
/// * the callee of a method call (`a.b()`)
- node_offset_field_name: i32,
+ node_offset_field_name: Ast.Node.Offset,
/// The payload is offset from the containing base node.
/// The source location points to the field name of the operand ("b" node)
/// of a field initialization expression (`.a = b`)
- node_offset_field_name_init: i32,
+ node_offset_field_name_init: Ast.Node.Offset,
/// The source location points to the pointer of a pointer deref expression,
/// found by taking this AST node index offset from the containing
/// base node, which points to a pointer deref AST node. Next, navigate
/// to the pointer expression.
- node_offset_deref_ptr: i32,
+ node_offset_deref_ptr: Ast.Node.Offset,
/// The source location points to the assembly source code of an inline assembly
/// expression, found by taking this AST node index offset from the containing
/// base node, which points to inline assembly AST node. Next, navigate
/// to the asm template source code.
- node_offset_asm_source: i32,
+ node_offset_asm_source: Ast.Node.Offset,
/// The source location points to the return type of an inline assembly
/// expression, found by taking this AST node index offset from the containing
/// base node, which points to inline assembly AST node. Next, navigate
/// to the return type expression.
- node_offset_asm_ret_ty: i32,
+ node_offset_asm_ret_ty: Ast.Node.Offset,
/// The source location points to the condition expression of an if
/// expression, found by taking this AST node index offset from the containing
/// base node, which points to an if expression AST node. Next, navigate
/// to the condition expression.
- node_offset_if_cond: i32,
+ node_offset_if_cond: Ast.Node.Offset,
/// The source location points to a binary expression, such as `a + b`, found
/// by taking this AST node index offset from the containing base node.
- node_offset_bin_op: i32,
+ node_offset_bin_op: Ast.Node.Offset,
/// The source location points to the LHS of a binary expression, found
/// by taking this AST node index offset from the containing base node,
/// which points to a binary expression AST node. Next, navigate to the LHS.
- node_offset_bin_lhs: i32,
+ node_offset_bin_lhs: Ast.Node.Offset,
/// The source location points to the RHS of a binary expression, found
/// by taking this AST node index offset from the containing base node,
/// which points to a binary expression AST node. Next, navigate to the RHS.
- node_offset_bin_rhs: i32,
+ node_offset_bin_rhs: Ast.Node.Offset,
+ /// The source location points to the operand of a try expression, found
+ /// by taking this AST node index offset from the containing base node,
+ /// which points to a try expression AST node. Next, navigate to the
+ /// operand expression.
+ node_offset_try_operand: Ast.Node.Offset,
/// The source location points to the operand of a switch expression, found
/// by taking this AST node index offset from the containing base node,
/// which points to a switch expression AST node. Next, navigate to the operand.
- node_offset_switch_operand: i32,
+ node_offset_switch_operand: Ast.Node.Offset,
/// The source location points to the else/`_` prong of a switch expression, found
/// by taking this AST node index offset from the containing base node,
/// which points to a switch expression AST node. Next, navigate to the else/`_` prong.
- node_offset_switch_special_prong: i32,
+ node_offset_switch_special_prong: Ast.Node.Offset,
/// The source location points to all the ranges of a switch expression, found
/// by taking this AST node index offset from the containing base node,
/// which points to a switch expression AST node. Next, navigate to any of the
/// range nodes. The error applies to all of them.
- node_offset_switch_range: i32,
+ node_offset_switch_range: Ast.Node.Offset,
/// The source location points to the align expr of a function type
/// expression, found by taking this AST node index offset from the containing
/// base node, which points to a function type AST node. Next, navigate to
/// the calling convention node.
- node_offset_fn_type_align: i32,
+ node_offset_fn_type_align: Ast.Node.Offset,
/// The source location points to the addrspace expr of a function type
/// expression, found by taking this AST node index offset from the containing
/// base node, which points to a function type AST node. Next, navigate to
/// the calling convention node.
- node_offset_fn_type_addrspace: i32,
+ node_offset_fn_type_addrspace: Ast.Node.Offset,
/// The source location points to the linksection expr of a function type
/// expression, found by taking this AST node index offset from the containing
/// base node, which points to a function type AST node. Next, navigate to
/// the calling convention node.
- node_offset_fn_type_section: i32,
+ node_offset_fn_type_section: Ast.Node.Offset,
/// The source location points to the calling convention of a function type
/// expression, found by taking this AST node index offset from the containing
/// base node, which points to a function type AST node. Next, navigate to
/// the calling convention node.
- node_offset_fn_type_cc: i32,
+ node_offset_fn_type_cc: Ast.Node.Offset,
/// The source location points to the return type of a function type
/// expression, found by taking this AST node index offset from the containing
/// base node, which points to a function type AST node. Next, navigate to
/// the return type node.
- node_offset_fn_type_ret_ty: i32,
- node_offset_param: i32,
- token_offset_param: i32,
+ node_offset_fn_type_ret_ty: Ast.Node.Offset,
+ node_offset_param: Ast.Node.Offset,
+ token_offset_param: Ast.TokenOffset,
/// The source location points to the type expression of an `anyframe->T`
/// expression, found by taking this AST node index offset from the containing
/// base node, which points to a `anyframe->T` expression AST node. Next, navigate
/// to the type expression.
- node_offset_anyframe_type: i32,
+ node_offset_anyframe_type: Ast.Node.Offset,
/// The source location points to the string literal of `extern "foo"`, found
/// by taking this AST node index offset from the containing
/// base node, which points to a function prototype or variable declaration
/// expression AST node. Next, navigate to the string literal of the `extern "foo"`.
- node_offset_lib_name: i32,
+ node_offset_lib_name: Ast.Node.Offset,
/// The source location points to the len expression of an `[N:S]T`
/// expression, found by taking this AST node index offset from the containing
/// base node, which points to an `[N:S]T` expression AST node. Next, navigate
/// to the len expression.
- node_offset_array_type_len: i32,
+ node_offset_array_type_len: Ast.Node.Offset,
/// The source location points to the sentinel expression of an `[N:S]T`
/// expression, found by taking this AST node index offset from the containing
/// base node, which points to an `[N:S]T` expression AST node. Next, navigate
/// to the sentinel expression.
- node_offset_array_type_sentinel: i32,
+ node_offset_array_type_sentinel: Ast.Node.Offset,
/// The source location points to the elem expression of an `[N:S]T`
/// expression, found by taking this AST node index offset from the containing
/// base node, which points to an `[N:S]T` expression AST node. Next, navigate
/// to the elem expression.
- node_offset_array_type_elem: i32,
+ node_offset_array_type_elem: Ast.Node.Offset,
/// The source location points to the operand of an unary expression.
- node_offset_un_op: i32,
+ node_offset_un_op: Ast.Node.Offset,
/// The source location points to the elem type of a pointer.
- node_offset_ptr_elem: i32,
+ node_offset_ptr_elem: Ast.Node.Offset,
/// The source location points to the sentinel of a pointer.
- node_offset_ptr_sentinel: i32,
+ node_offset_ptr_sentinel: Ast.Node.Offset,
/// The source location points to the align expr of a pointer.
- node_offset_ptr_align: i32,
+ node_offset_ptr_align: Ast.Node.Offset,
/// The source location points to the addrspace expr of a pointer.
- node_offset_ptr_addrspace: i32,
+ node_offset_ptr_addrspace: Ast.Node.Offset,
/// The source location points to the bit-offset of a pointer.
- node_offset_ptr_bitoffset: i32,
+ node_offset_ptr_bitoffset: Ast.Node.Offset,
/// The source location points to the host size of a pointer.
- node_offset_ptr_hostsize: i32,
+ node_offset_ptr_hostsize: Ast.Node.Offset,
/// The source location points to the tag type of an union or an enum.
- node_offset_container_tag: i32,
+ node_offset_container_tag: Ast.Node.Offset,
/// The source location points to the default value of a field.
- node_offset_field_default: i32,
+ node_offset_field_default: Ast.Node.Offset,
/// The source location points to the type of an array or struct initializer.
- node_offset_init_ty: i32,
+ node_offset_init_ty: Ast.Node.Offset,
/// The source location points to the LHS of an assignment.
- node_offset_store_ptr: i32,
+ node_offset_store_ptr: Ast.Node.Offset,
/// The source location points to the RHS of an assignment.
- node_offset_store_operand: i32,
+ node_offset_store_operand: Ast.Node.Offset,
/// The source location points to the operand of a `return` statement, or
/// the `return` itself if there is no explicit operand.
- node_offset_return_operand: i32,
+ node_offset_return_operand: Ast.Node.Offset,
/// The source location points to a for loop input.
for_input: struct {
/// Points to the for loop AST node.
- for_node_offset: i32,
+ for_node_offset: Ast.Node.Offset,
/// Picks one of the inputs from the condition.
input_index: u32,
},
@@ -2241,11 +2224,11 @@ pub const LazySrcLoc = struct {
/// by taking this AST node index offset from the containing
/// base node, which points to one of the input nodes of a for loop.
/// Next, navigate to the corresponding capture.
- for_capture_from_input: i32,
+ for_capture_from_input: Ast.Node.Offset,
/// The source location points to the argument node of a function call.
call_arg: struct {
/// Points to the function call AST node.
- call_node_offset: i32,
+ call_node_offset: Ast.Node.Offset,
/// The index of the argument the source location points to.
arg_index: u32,
},
@@ -2272,25 +2255,25 @@ pub const LazySrcLoc = struct {
/// array initialization expression.
init_elem: struct {
/// Points to the AST node of the initialization expression.
- init_node_offset: i32,
+ init_node_offset: Ast.Node.Offset,
/// The index of the field/element the source location points to.
elem_index: u32,
},
// The following source locations are like `init_elem`, but refer to a
// field with a specific name. If such a field is not given, the entire
// initialization expression is used instead.
- // The `i32` points to the AST node of a builtin call, whose *second*
+ // The `Ast.Node.Offset` points to the AST node of a builtin call, whose *second*
// argument is the init expression.
- init_field_name: i32,
- init_field_linkage: i32,
- init_field_section: i32,
- init_field_visibility: i32,
- init_field_rw: i32,
- init_field_locality: i32,
- init_field_cache: i32,
- init_field_library: i32,
- init_field_thread_local: i32,
- init_field_dll_import: i32,
+ init_field_name: Ast.Node.Offset,
+ init_field_linkage: Ast.Node.Offset,
+ init_field_section: Ast.Node.Offset,
+ init_field_visibility: Ast.Node.Offset,
+ init_field_rw: Ast.Node.Offset,
+ init_field_locality: Ast.Node.Offset,
+ init_field_cache: Ast.Node.Offset,
+ init_field_library: Ast.Node.Offset,
+ init_field_thread_local: Ast.Node.Offset,
+ init_field_dll_import: Ast.Node.Offset,
/// The source location points to the value of an item in a specific
/// case of a `switch`.
switch_case_item: SwitchItem,
@@ -2315,14 +2298,14 @@ pub const LazySrcLoc = struct {
pub const FnProtoParam = struct {
/// The offset of the function prototype AST node.
- fn_proto_node_offset: i32,
+ fn_proto_node_offset: Ast.Node.Offset,
/// The index of the parameter the source location points to.
param_index: u32,
};
pub const SwitchItem = struct {
/// The offset of the switch AST node.
- switch_node_offset: i32,
+ switch_node_offset: Ast.Node.Offset,
/// The index of the case to point to within this switch.
case_idx: SwitchCaseIndex,
/// The index of the item to point to within this case.
@@ -2331,7 +2314,7 @@ pub const LazySrcLoc = struct {
pub const SwitchCapture = struct {
/// The offset of the switch AST node.
- switch_node_offset: i32,
+ switch_node_offset: Ast.Node.Offset,
/// The index of the case whose capture to point to.
case_idx: SwitchCaseIndex,
};
@@ -2353,34 +2336,34 @@ pub const LazySrcLoc = struct {
pub const ArrayCat = struct {
/// Points to the array concat AST node.
- array_cat_offset: i32,
+ array_cat_offset: Ast.Node.Offset,
/// The index of the element the source location points to.
elem_index: u32,
};
pub const TupleField = struct {
/// Points to the AST node of the tuple type decaration.
- tuple_decl_node_offset: i32,
+ tuple_decl_node_offset: Ast.Node.Offset,
/// The index of the tuple field the source location points to.
elem_index: u32,
};
pub const nodeOffset = if (TracedOffset.want_tracing) nodeOffsetDebug else nodeOffsetRelease;
- noinline fn nodeOffsetDebug(node_offset: i32) Offset {
+ noinline fn nodeOffsetDebug(node_offset: Ast.Node.Offset) Offset {
var result: LazySrcLoc = .{ .node_offset = .{ .x = node_offset } };
result.node_offset.trace.addAddr(@returnAddress(), "init");
return result;
}
- fn nodeOffsetRelease(node_offset: i32) Offset {
+ fn nodeOffsetRelease(node_offset: Ast.Node.Offset) Offset {
return .{ .node_offset = .{ .x = node_offset } };
}
/// This wraps a simple integer in debug builds so that later on we can find out
/// where in semantic analysis the value got set.
pub const TracedOffset = struct {
- x: i32,
+ x: Ast.Node.Offset,
trace: std.debug.Trace = std.debug.Trace.init,
const want_tracing = false;
@@ -2405,7 +2388,7 @@ pub const LazySrcLoc = struct {
// If we're relative to .main_struct_inst, we know the ast node is the root and don't need to resolve the ZIR,
// which may not exist e.g. in the case of errors in ZON files.
- if (zir_inst == .main_struct_inst) return .{ file, 0 };
+ if (zir_inst == .main_struct_inst) return .{ file, .root };
// Otherwise, make sure ZIR is loaded.
const zir = file.zir.?;
@@ -2438,7 +2421,7 @@ pub const LazySrcLoc = struct {
pub fn upgradeOrLost(lazy: LazySrcLoc, zcu: *Zcu) ?SrcLoc {
const file, const base_node: Ast.Node.Index = if (lazy.offset == .entire_file) .{
zcu.fileByIndex(lazy.base_node_inst.resolveFile(&zcu.intern_pool)),
- 0,
+ .root,
} else resolveBaseNode(lazy.base_node_inst, zcu) orelse return null;
return .{
.file_scope = file,
@@ -4007,7 +3990,7 @@ pub fn navSrcLoc(zcu: *const Zcu, nav_index: InternPool.Nav.Index) LazySrcLoc {
const ip = &zcu.intern_pool;
return .{
.base_node_inst = ip.getNav(nav_index).srcInst(ip),
- .offset = LazySrcLoc.Offset.nodeOffset(0),
+ .offset = LazySrcLoc.Offset.nodeOffset(.zero),
};
}