Commit 3940a1be18

Andrew Kelley <andrew@ziglang.org>
2021-08-31 04:22:04
rename std.zig.ast to std.zig.Ast; use top-level fields
1 parent e41e75a
lib/std/zig/Ast.zig
@@ -0,0 +1,2979 @@
+//! Abstract Syntax Tree for Zig source code.
+
+/// Reference to externally-owned data.
+source: [:0]const u8,
+
+tokens: TokenList.Slice,
+/// The root AST node is assumed to be index 0. Since there can be no
+/// references to the root node, this means 0 is available to indicate null.
+nodes: NodeList.Slice,
+extra_data: []Node.Index,
+
+errors: []const Error,
+
+const std = @import("../std.zig");
+const assert = std.debug.assert;
+const testing = std.testing;
+const mem = std.mem;
+const Token = std.zig.Token;
+const Tree = @This();
+
+pub const TokenIndex = u32;
+pub const ByteOffset = u32;
+
+pub const TokenList = std.MultiArrayList(struct {
+    tag: Token.Tag,
+    start: ByteOffset,
+});
+pub const NodeList = std.MultiArrayList(Node);
+
+pub const Location = struct {
+    line: usize,
+    column: usize,
+    line_start: usize,
+    line_end: usize,
+};
+
+pub fn deinit(tree: *Tree, gpa: *mem.Allocator) void {
+    tree.tokens.deinit(gpa);
+    tree.nodes.deinit(gpa);
+    gpa.free(tree.extra_data);
+    gpa.free(tree.errors);
+    tree.* = undefined;
+}
+
+pub const RenderError = error{
+    /// Ran out of memory allocating call stack frames to complete rendering, or
+    /// ran out of memory allocating space in the output buffer.
+    OutOfMemory,
+};
+
+/// `gpa` is used for allocating the resulting formatted source code, as well as
+/// for allocating extra stack memory if needed, because this function utilizes recursion.
+/// Note: that's not actually true yet, see https://github.com/ziglang/zig/issues/1006.
+/// Caller owns the returned slice of bytes, allocated with `gpa`.
+pub fn render(tree: Tree, gpa: *mem.Allocator) RenderError![]u8 {
+    var buffer = std.ArrayList(u8).init(gpa);
+    defer buffer.deinit();
+
+    try tree.renderToArrayList(&buffer);
+    return buffer.toOwnedSlice();
+}
+
+pub fn renderToArrayList(tree: Tree, buffer: *std.ArrayList(u8)) RenderError!void {
+    return @import("./render.zig").renderTree(buffer, tree);
+}
+
+pub fn tokenLocation(self: Tree, start_offset: ByteOffset, token_index: TokenIndex) Location {
+    var loc = Location{
+        .line = 0,
+        .column = 0,
+        .line_start = start_offset,
+        .line_end = self.source.len,
+    };
+    const token_start = self.tokens.items(.start)[token_index];
+    for (self.source[start_offset..]) |c, i| {
+        if (i + start_offset == token_start) {
+            loc.line_end = i + start_offset;
+            while (loc.line_end < self.source.len and self.source[loc.line_end] != '\n') {
+                loc.line_end += 1;
+            }
+            return loc;
+        }
+        if (c == '\n') {
+            loc.line += 1;
+            loc.column = 0;
+            loc.line_start = i + 1;
+        } else {
+            loc.column += 1;
+        }
+    }
+    return loc;
+}
+
+pub fn tokenSlice(tree: Tree, token_index: TokenIndex) []const u8 {
+    const token_starts = tree.tokens.items(.start);
+    const token_tags = tree.tokens.items(.tag);
+    const token_tag = token_tags[token_index];
+
+    // Many tokens can be determined entirely by their tag.
+    if (token_tag.lexeme()) |lexeme| {
+        return lexeme;
+    }
+
+    // For some tokens, re-tokenization is needed to find the end.
+    var tokenizer: std.zig.Tokenizer = .{
+        .buffer = tree.source,
+        .index = token_starts[token_index],
+        .pending_invalid_token = null,
+    };
+    const token = tokenizer.next();
+    assert(token.tag == token_tag);
+    return tree.source[token.loc.start..token.loc.end];
+}
+
+pub fn extraData(tree: Tree, index: usize, comptime T: type) T {
+    const fields = std.meta.fields(T);
+    var result: T = undefined;
+    inline for (fields) |field, i| {
+        comptime assert(field.field_type == Node.Index);
+        @field(result, field.name) = tree.extra_data[index + i];
+    }
+    return result;
+}
+
+pub fn rootDecls(tree: Tree) []const Node.Index {
+    // Root is always index 0.
+    const nodes_data = tree.nodes.items(.data);
+    return tree.extra_data[nodes_data[0].lhs..nodes_data[0].rhs];
+}
+
+pub fn renderError(tree: Tree, parse_error: Error, stream: anytype) !void {
+    const token_tags = tree.tokens.items(.tag);
+    switch (parse_error.tag) {
+        .asterisk_after_ptr_deref => {
+            // Note that the token will point at the `.*` but ideally the source
+            // location would point to the `*` after the `.*`.
+            return stream.writeAll("'.*' cannot be followed by '*'. Are you missing a space?");
+        },
+        .decl_between_fields => {
+            return stream.writeAll("declarations are not allowed between container fields");
+        },
+        .expected_block => {
+            return stream.print("expected block or field, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_block_or_assignment => {
+            return stream.print("expected block or assignment, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_block_or_expr => {
+            return stream.print("expected block or expression, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_block_or_field => {
+            return stream.print("expected block or field, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_container_members => {
+            return stream.print("expected test, comptime, var decl, or container field, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_expr => {
+            return stream.print("expected expression, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_expr_or_assignment => {
+            return stream.print("expected expression or assignment, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_fn => {
+            return stream.print("expected function, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_inlinable => {
+            return stream.print("expected 'while' or 'for', found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_labelable => {
+            return stream.print("expected 'while', 'for', 'inline', 'suspend', or '{{', found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_param_list => {
+            return stream.print("expected parameter list, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_prefix_expr => {
+            return stream.print("expected prefix expression, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_primary_type_expr => {
+            return stream.print("expected primary type expression, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_pub_item => {
+            return stream.writeAll("expected function or variable declaration after pub");
+        },
+        .expected_return_type => {
+            return stream.print("expected return type expression, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_semi_or_else => {
+            return stream.print("expected ';' or 'else', found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_semi_or_lbrace => {
+            return stream.print("expected ';' or '{{', found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_statement => {
+            return stream.print("expected statement, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_string_literal => {
+            return stream.print("expected string literal, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_suffix_op => {
+            return stream.print("expected pointer dereference, optional unwrap, or field access, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_type_expr => {
+            return stream.print("expected type expression, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_var_decl => {
+            return stream.print("expected variable declaration, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_var_decl_or_fn => {
+            return stream.print("expected variable declaration or function, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_loop_payload => {
+            return stream.print("expected loop payload, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .expected_container => {
+            return stream.print("expected a struct, enum or union, found '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .extra_align_qualifier => {
+            return stream.writeAll("extra align qualifier");
+        },
+        .extra_allowzero_qualifier => {
+            return stream.writeAll("extra allowzero qualifier");
+        },
+        .extra_const_qualifier => {
+            return stream.writeAll("extra const qualifier");
+        },
+        .extra_volatile_qualifier => {
+            return stream.writeAll("extra volatile qualifier");
+        },
+        .ptr_mod_on_array_child_type => {
+            return stream.print("pointer modifier '{s}' not allowed on array child type", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .invalid_bit_range => {
+            return stream.writeAll("bit range not allowed on slices and arrays");
+        },
+        .invalid_token => {
+            return stream.print("invalid token: '{s}'", .{
+                token_tags[parse_error.token].symbol(),
+            });
+        },
+        .same_line_doc_comment => {
+            return stream.writeAll("same line documentation comment");
+        },
+        .unattached_doc_comment => {
+            return stream.writeAll("unattached documentation comment");
+        },
+        .varargs_nonfinal => {
+            return stream.writeAll("function prototype has parameter after varargs");
+        },
+
+        .expected_token => {
+            const found_tag = token_tags[parse_error.token];
+            const expected_symbol = parse_error.extra.expected_tag.symbol();
+            switch (found_tag) {
+                .invalid => return stream.print("expected '{s}', found invalid bytes", .{
+                    expected_symbol,
+                }),
+                else => return stream.print("expected '{s}', found '{s}'", .{
+                    expected_symbol, found_tag.symbol(),
+                }),
+            }
+        },
+    }
+}
+
+pub fn firstToken(tree: Tree, node: Node.Index) TokenIndex {
+    const tags = tree.nodes.items(.tag);
+    const datas = tree.nodes.items(.data);
+    const main_tokens = tree.nodes.items(.main_token);
+    const token_tags = tree.tokens.items(.tag);
+    var end_offset: TokenIndex = 0;
+    var n = node;
+    while (true) switch (tags[n]) {
+        .root => return 0,
+
+        .test_decl,
+        .@"errdefer",
+        .@"defer",
+        .bool_not,
+        .negation,
+        .bit_not,
+        .negation_wrap,
+        .address_of,
+        .@"try",
+        .@"await",
+        .optional_type,
+        .@"switch",
+        .switch_comma,
+        .if_simple,
+        .@"if",
+        .@"suspend",
+        .@"resume",
+        .@"continue",
+        .@"break",
+        .@"return",
+        .anyframe_type,
+        .identifier,
+        .anyframe_literal,
+        .char_literal,
+        .integer_literal,
+        .float_literal,
+        .unreachable_literal,
+        .string_literal,
+        .multiline_string_literal,
+        .grouped_expression,
+        .builtin_call_two,
+        .builtin_call_two_comma,
+        .builtin_call,
+        .builtin_call_comma,
+        .error_set_decl,
+        .@"anytype",
+        .@"comptime",
+        .@"nosuspend",
+        .asm_simple,
+        .@"asm",
+        .array_type,
+        .array_type_sentinel,
+        .error_value,
+        => return main_tokens[n] - end_offset,
+
+        .array_init_dot,
+        .array_init_dot_comma,
+        .array_init_dot_two,
+        .array_init_dot_two_comma,
+        .struct_init_dot,
+        .struct_init_dot_comma,
+        .struct_init_dot_two,
+        .struct_init_dot_two_comma,
+        .enum_literal,
+        => return main_tokens[n] - 1 - end_offset,
+
+        .@"catch",
+        .field_access,
+        .unwrap_optional,
+        .equal_equal,
+        .bang_equal,
+        .less_than,
+        .greater_than,
+        .less_or_equal,
+        .greater_or_equal,
+        .assign_mul,
+        .assign_div,
+        .assign_mod,
+        .assign_add,
+        .assign_sub,
+        .assign_bit_shift_left,
+        .assign_bit_shift_right,
+        .assign_bit_and,
+        .assign_bit_xor,
+        .assign_bit_or,
+        .assign_mul_wrap,
+        .assign_add_wrap,
+        .assign_sub_wrap,
+        .assign,
+        .merge_error_sets,
+        .mul,
+        .div,
+        .mod,
+        .array_mult,
+        .mul_wrap,
+        .add,
+        .sub,
+        .array_cat,
+        .add_wrap,
+        .sub_wrap,
+        .bit_shift_left,
+        .bit_shift_right,
+        .bit_and,
+        .bit_xor,
+        .bit_or,
+        .@"orelse",
+        .bool_and,
+        .bool_or,
+        .slice_open,
+        .slice,
+        .slice_sentinel,
+        .deref,
+        .array_access,
+        .array_init_one,
+        .array_init_one_comma,
+        .array_init,
+        .array_init_comma,
+        .struct_init_one,
+        .struct_init_one_comma,
+        .struct_init,
+        .struct_init_comma,
+        .call_one,
+        .call_one_comma,
+        .call,
+        .call_comma,
+        .switch_range,
+        .error_union,
+        => n = datas[n].lhs,
+
+        .fn_decl,
+        .fn_proto_simple,
+        .fn_proto_multi,
+        .fn_proto_one,
+        .fn_proto,
+        => {
+            var i = main_tokens[n]; // fn token
+            while (i > 0) {
+                i -= 1;
+                switch (token_tags[i]) {
+                    .keyword_extern,
+                    .keyword_export,
+                    .keyword_pub,
+                    .keyword_inline,
+                    .keyword_noinline,
+                    .string_literal,
+                    => continue,
+
+                    else => return i + 1 - end_offset,
+                }
+            }
+            return i - end_offset;
+        },
+
+        .@"usingnamespace" => {
+            const main_token = main_tokens[n];
+            if (main_token > 0 and token_tags[main_token - 1] == .keyword_pub) {
+                end_offset += 1;
+            }
+            return main_token - end_offset;
+        },
+
+        .async_call_one,
+        .async_call_one_comma,
+        .async_call,
+        .async_call_comma,
+        => {
+            end_offset += 1; // async token
+            n = datas[n].lhs;
+        },
+
+        .container_field_init,
+        .container_field_align,
+        .container_field,
+        => {
+            const name_token = main_tokens[n];
+            if (name_token > 0 and token_tags[name_token - 1] == .keyword_comptime) {
+                end_offset += 1;
+            }
+            return name_token - end_offset;
+        },
+
+        .global_var_decl,
+        .local_var_decl,
+        .simple_var_decl,
+        .aligned_var_decl,
+        => {
+            var i = main_tokens[n]; // mut token
+            while (i > 0) {
+                i -= 1;
+                switch (token_tags[i]) {
+                    .keyword_extern,
+                    .keyword_export,
+                    .keyword_comptime,
+                    .keyword_pub,
+                    .keyword_threadlocal,
+                    .string_literal,
+                    => continue,
+
+                    else => return i + 1 - end_offset,
+                }
+            }
+            return i - end_offset;
+        },
+
+        .block,
+        .block_semicolon,
+        .block_two,
+        .block_two_semicolon,
+        => {
+            // Look for a label.
+            const lbrace = main_tokens[n];
+            if (token_tags[lbrace - 1] == .colon and
+                token_tags[lbrace - 2] == .identifier)
+            {
+                end_offset += 2;
+            }
+            return lbrace - end_offset;
+        },
+
+        .container_decl,
+        .container_decl_trailing,
+        .container_decl_two,
+        .container_decl_two_trailing,
+        .container_decl_arg,
+        .container_decl_arg_trailing,
+        .tagged_union,
+        .tagged_union_trailing,
+        .tagged_union_two,
+        .tagged_union_two_trailing,
+        .tagged_union_enum_tag,
+        .tagged_union_enum_tag_trailing,
+        => {
+            const main_token = main_tokens[n];
+            switch (token_tags[main_token - 1]) {
+                .keyword_packed, .keyword_extern => end_offset += 1,
+                else => {},
+            }
+            return main_token - end_offset;
+        },
+
+        .ptr_type_aligned,
+        .ptr_type_sentinel,
+        .ptr_type,
+        .ptr_type_bit_range,
+        => {
+            const main_token = main_tokens[n];
+            return switch (token_tags[main_token]) {
+                .asterisk,
+                .asterisk_asterisk,
+                => switch (token_tags[main_token - 1]) {
+                    .l_bracket => main_token - 1,
+                    else => main_token,
+                },
+                .l_bracket => main_token,
+                else => unreachable,
+            } - end_offset;
+        },
+
+        .switch_case_one => {
+            if (datas[n].lhs == 0) {
+                return main_tokens[n] - 1 - end_offset; // else token
+            } else {
+                n = datas[n].lhs;
+            }
+        },
+        .switch_case => {
+            const extra = tree.extraData(datas[n].lhs, Node.SubRange);
+            assert(extra.end - extra.start > 0);
+            n = tree.extra_data[extra.start];
+        },
+
+        .asm_output, .asm_input => {
+            assert(token_tags[main_tokens[n] - 1] == .l_bracket);
+            return main_tokens[n] - 1 - end_offset;
+        },
+
+        .while_simple,
+        .while_cont,
+        .@"while",
+        .for_simple,
+        .@"for",
+        => {
+            // Look for a label and inline.
+            const main_token = main_tokens[n];
+            var result = main_token;
+            if (token_tags[result - 1] == .keyword_inline) {
+                result -= 1;
+            }
+            if (token_tags[result - 1] == .colon) {
+                result -= 2;
+            }
+            return result - end_offset;
+        },
+    };
+}
+
+pub fn lastToken(tree: Tree, node: Node.Index) TokenIndex {
+    const tags = tree.nodes.items(.tag);
+    const datas = tree.nodes.items(.data);
+    const main_tokens = tree.nodes.items(.main_token);
+    const token_starts = tree.tokens.items(.start);
+    const token_tags = tree.tokens.items(.tag);
+    var n = node;
+    var end_offset: TokenIndex = 0;
+    while (true) switch (tags[n]) {
+        .root => return @intCast(TokenIndex, tree.tokens.len - 1),
+
+        .@"usingnamespace",
+        .bool_not,
+        .negation,
+        .bit_not,
+        .negation_wrap,
+        .address_of,
+        .@"try",
+        .@"await",
+        .optional_type,
+        .@"resume",
+        .@"nosuspend",
+        .@"comptime",
+        => n = datas[n].lhs,
+
+        .test_decl,
+        .@"errdefer",
+        .@"defer",
+        .@"catch",
+        .equal_equal,
+        .bang_equal,
+        .less_than,
+        .greater_than,
+        .less_or_equal,
+        .greater_or_equal,
+        .assign_mul,
+        .assign_div,
+        .assign_mod,
+        .assign_add,
+        .assign_sub,
+        .assign_bit_shift_left,
+        .assign_bit_shift_right,
+        .assign_bit_and,
+        .assign_bit_xor,
+        .assign_bit_or,
+        .assign_mul_wrap,
+        .assign_add_wrap,
+        .assign_sub_wrap,
+        .assign,
+        .merge_error_sets,
+        .mul,
+        .div,
+        .mod,
+        .array_mult,
+        .mul_wrap,
+        .add,
+        .sub,
+        .array_cat,
+        .add_wrap,
+        .sub_wrap,
+        .bit_shift_left,
+        .bit_shift_right,
+        .bit_and,
+        .bit_xor,
+        .bit_or,
+        .@"orelse",
+        .bool_and,
+        .bool_or,
+        .anyframe_type,
+        .error_union,
+        .if_simple,
+        .while_simple,
+        .for_simple,
+        .fn_proto_simple,
+        .fn_proto_multi,
+        .ptr_type_aligned,
+        .ptr_type_sentinel,
+        .ptr_type,
+        .ptr_type_bit_range,
+        .array_type,
+        .switch_case_one,
+        .switch_case,
+        .switch_range,
+        => n = datas[n].rhs,
+
+        .field_access,
+        .unwrap_optional,
+        .grouped_expression,
+        .multiline_string_literal,
+        .error_set_decl,
+        .asm_simple,
+        .asm_output,
+        .asm_input,
+        .error_value,
+        => return datas[n].rhs + end_offset,
+
+        .@"anytype",
+        .anyframe_literal,
+        .char_literal,
+        .integer_literal,
+        .float_literal,
+        .unreachable_literal,
+        .identifier,
+        .deref,
+        .enum_literal,
+        .string_literal,
+        => return main_tokens[n] + end_offset,
+
+        .@"return" => if (datas[n].lhs != 0) {
+            n = datas[n].lhs;
+        } else {
+            return main_tokens[n] + end_offset;
+        },
+
+        .call, .async_call => {
+            end_offset += 1; // for the rparen
+            const params = tree.extraData(datas[n].rhs, Node.SubRange);
+            if (params.end - params.start == 0) {
+                return main_tokens[n] + end_offset;
+            }
+            n = tree.extra_data[params.end - 1]; // last parameter
+        },
+        .tagged_union_enum_tag => {
+            const members = tree.extraData(datas[n].rhs, Node.SubRange);
+            if (members.end - members.start == 0) {
+                end_offset += 4; // for the rparen + rparen + lbrace + rbrace
+                n = datas[n].lhs;
+            } else {
+                end_offset += 1; // for the rbrace
+                n = tree.extra_data[members.end - 1]; // last parameter
+            }
+        },
+        .call_comma,
+        .async_call_comma,
+        .tagged_union_enum_tag_trailing,
+        => {
+            end_offset += 2; // for the comma/semicolon + rparen/rbrace
+            const params = tree.extraData(datas[n].rhs, Node.SubRange);
+            assert(params.end > params.start);
+            n = tree.extra_data[params.end - 1]; // last parameter
+        },
+        .@"switch" => {
+            const cases = tree.extraData(datas[n].rhs, Node.SubRange);
+            if (cases.end - cases.start == 0) {
+                end_offset += 3; // rparen, lbrace, rbrace
+                n = datas[n].lhs; // condition expression
+            } else {
+                end_offset += 1; // for the rbrace
+                n = tree.extra_data[cases.end - 1]; // last case
+            }
+        },
+        .container_decl_arg => {
+            const members = tree.extraData(datas[n].rhs, Node.SubRange);
+            if (members.end - members.start == 0) {
+                end_offset += 3; // for the rparen + lbrace + rbrace
+                n = datas[n].lhs;
+            } else {
+                end_offset += 1; // for the rbrace
+                n = tree.extra_data[members.end - 1]; // last parameter
+            }
+        },
+        .@"asm" => {
+            const extra = tree.extraData(datas[n].rhs, Node.Asm);
+            return extra.rparen + end_offset;
+        },
+        .array_init,
+        .struct_init,
+        => {
+            const elements = tree.extraData(datas[n].rhs, Node.SubRange);
+            assert(elements.end - elements.start > 0);
+            end_offset += 1; // for the rbrace
+            n = tree.extra_data[elements.end - 1]; // last element
+        },
+        .array_init_comma,
+        .struct_init_comma,
+        .container_decl_arg_trailing,
+        .switch_comma,
+        => {
+            const members = tree.extraData(datas[n].rhs, Node.SubRange);
+            assert(members.end - members.start > 0);
+            end_offset += 2; // for the comma + rbrace
+            n = tree.extra_data[members.end - 1]; // last parameter
+        },
+        .array_init_dot,
+        .struct_init_dot,
+        .block,
+        .container_decl,
+        .tagged_union,
+        .builtin_call,
+        => {
+            assert(datas[n].rhs - datas[n].lhs > 0);
+            end_offset += 1; // for the rbrace
+            n = tree.extra_data[datas[n].rhs - 1]; // last statement
+        },
+        .array_init_dot_comma,
+        .struct_init_dot_comma,
+        .block_semicolon,
+        .container_decl_trailing,
+        .tagged_union_trailing,
+        .builtin_call_comma,
+        => {
+            assert(datas[n].rhs - datas[n].lhs > 0);
+            end_offset += 2; // for the comma/semicolon + rbrace/rparen
+            n = tree.extra_data[datas[n].rhs - 1]; // last member
+        },
+        .call_one,
+        .async_call_one,
+        .array_access,
+        => {
+            end_offset += 1; // for the rparen/rbracket
+            if (datas[n].rhs == 0) {
+                return main_tokens[n] + end_offset;
+            }
+            n = datas[n].rhs;
+        },
+        .array_init_dot_two,
+        .block_two,
+        .builtin_call_two,
+        .struct_init_dot_two,
+        .container_decl_two,
+        .tagged_union_two,
+        => {
+            if (datas[n].rhs != 0) {
+                end_offset += 1; // for the rparen/rbrace
+                n = datas[n].rhs;
+            } else if (datas[n].lhs != 0) {
+                end_offset += 1; // for the rparen/rbrace
+                n = datas[n].lhs;
+            } else {
+                switch (tags[n]) {
+                    .array_init_dot_two,
+                    .block_two,
+                    .struct_init_dot_two,
+                    => end_offset += 1, // rbrace
+                    .builtin_call_two => end_offset += 2, // lparen/lbrace + rparen/rbrace
+                    .container_decl_two => {
+                        var i: u32 = 2; // lbrace + rbrace
+                        while (token_tags[main_tokens[n] + i] == .container_doc_comment) i += 1;
+                        end_offset += i;
+                    },
+                    .tagged_union_two => {
+                        var i: u32 = 5; // (enum) {}
+                        while (token_tags[main_tokens[n] + i] == .container_doc_comment) i += 1;
+                        end_offset += i;
+                    },
+                    else => unreachable,
+                }
+                return main_tokens[n] + end_offset;
+            }
+        },
+        .array_init_dot_two_comma,
+        .builtin_call_two_comma,
+        .block_two_semicolon,
+        .struct_init_dot_two_comma,
+        .container_decl_two_trailing,
+        .tagged_union_two_trailing,
+        => {
+            end_offset += 2; // for the comma/semicolon + rbrace/rparen
+            if (datas[n].rhs != 0) {
+                n = datas[n].rhs;
+            } else if (datas[n].lhs != 0) {
+                n = datas[n].lhs;
+            } else {
+                unreachable;
+            }
+        },
+        .simple_var_decl => {
+            if (datas[n].rhs != 0) {
+                n = datas[n].rhs;
+            } else if (datas[n].lhs != 0) {
+                n = datas[n].lhs;
+            } else {
+                end_offset += 1; // from mut token to name
+                return main_tokens[n] + end_offset;
+            }
+        },
+        .aligned_var_decl => {
+            if (datas[n].rhs != 0) {
+                n = datas[n].rhs;
+            } else if (datas[n].lhs != 0) {
+                end_offset += 1; // for the rparen
+                n = datas[n].lhs;
+            } else {
+                end_offset += 1; // from mut token to name
+                return main_tokens[n] + end_offset;
+            }
+        },
+        .global_var_decl => {
+            if (datas[n].rhs != 0) {
+                n = datas[n].rhs;
+            } else {
+                const extra = tree.extraData(datas[n].lhs, Node.GlobalVarDecl);
+                if (extra.section_node != 0) {
+                    end_offset += 1; // for the rparen
+                    n = extra.section_node;
+                } else if (extra.align_node != 0) {
+                    end_offset += 1; // for the rparen
+                    n = extra.align_node;
+                } else if (extra.type_node != 0) {
+                    n = extra.type_node;
+                } else {
+                    end_offset += 1; // from mut token to name
+                    return main_tokens[n] + end_offset;
+                }
+            }
+        },
+        .local_var_decl => {
+            if (datas[n].rhs != 0) {
+                n = datas[n].rhs;
+            } else {
+                const extra = tree.extraData(datas[n].lhs, Node.LocalVarDecl);
+                if (extra.align_node != 0) {
+                    end_offset += 1; // for the rparen
+                    n = extra.align_node;
+                } else if (extra.type_node != 0) {
+                    n = extra.type_node;
+                } else {
+                    end_offset += 1; // from mut token to name
+                    return main_tokens[n] + end_offset;
+                }
+            }
+        },
+        .container_field_init => {
+            if (datas[n].rhs != 0) {
+                n = datas[n].rhs;
+            } else if (datas[n].lhs != 0) {
+                n = datas[n].lhs;
+            } else {
+                return main_tokens[n] + end_offset;
+            }
+        },
+        .container_field_align => {
+            if (datas[n].rhs != 0) {
+                end_offset += 1; // for the rparen
+                n = datas[n].rhs;
+            } else if (datas[n].lhs != 0) {
+                n = datas[n].lhs;
+            } else {
+                return main_tokens[n] + end_offset;
+            }
+        },
+        .container_field => {
+            const extra = tree.extraData(datas[n].rhs, Node.ContainerField);
+            if (extra.value_expr != 0) {
+                n = extra.value_expr;
+            } else if (extra.align_expr != 0) {
+                end_offset += 1; // for the rparen
+                n = extra.align_expr;
+            } else if (datas[n].lhs != 0) {
+                n = datas[n].lhs;
+            } else {
+                return main_tokens[n] + end_offset;
+            }
+        },
+
+        .array_init_one,
+        .struct_init_one,
+        => {
+            end_offset += 1; // rbrace
+            if (datas[n].rhs == 0) {
+                return main_tokens[n] + end_offset;
+            } else {
+                n = datas[n].rhs;
+            }
+        },
+        .slice_open,
+        .call_one_comma,
+        .async_call_one_comma,
+        .array_init_one_comma,
+        .struct_init_one_comma,
+        => {
+            end_offset += 2; // ellipsis2 + rbracket, or comma + rparen
+            n = datas[n].rhs;
+            assert(n != 0);
+        },
+        .slice => {
+            const extra = tree.extraData(datas[n].rhs, Node.Slice);
+            assert(extra.end != 0); // should have used slice_open
+            end_offset += 1; // rbracket
+            n = extra.end;
+        },
+        .slice_sentinel => {
+            const extra = tree.extraData(datas[n].rhs, Node.SliceSentinel);
+            assert(extra.sentinel != 0); // should have used slice
+            end_offset += 1; // rbracket
+            n = extra.sentinel;
+        },
+
+        .@"continue" => {
+            if (datas[n].lhs != 0) {
+                return datas[n].lhs + end_offset;
+            } else {
+                return main_tokens[n] + end_offset;
+            }
+        },
+        .@"break" => {
+            if (datas[n].rhs != 0) {
+                n = datas[n].rhs;
+            } else if (datas[n].lhs != 0) {
+                return datas[n].lhs + end_offset;
+            } else {
+                return main_tokens[n] + end_offset;
+            }
+        },
+        .fn_decl => {
+            if (datas[n].rhs != 0) {
+                n = datas[n].rhs;
+            } else {
+                n = datas[n].lhs;
+            }
+        },
+        .fn_proto_one => {
+            const extra = tree.extraData(datas[n].lhs, Node.FnProtoOne);
+            // linksection, callconv, align can appear in any order, so we
+            // find the last one here.
+            var max_node: Node.Index = datas[n].rhs;
+            var max_start = token_starts[main_tokens[max_node]];
+            var max_offset: TokenIndex = 0;
+            if (extra.align_expr != 0) {
+                const start = token_starts[main_tokens[extra.align_expr]];
+                if (start > max_start) {
+                    max_node = extra.align_expr;
+                    max_start = start;
+                    max_offset = 1; // for the rparen
+                }
+            }
+            if (extra.section_expr != 0) {
+                const start = token_starts[main_tokens[extra.section_expr]];
+                if (start > max_start) {
+                    max_node = extra.section_expr;
+                    max_start = start;
+                    max_offset = 1; // for the rparen
+                }
+            }
+            if (extra.callconv_expr != 0) {
+                const start = token_starts[main_tokens[extra.callconv_expr]];
+                if (start > max_start) {
+                    max_node = extra.callconv_expr;
+                    max_start = start;
+                    max_offset = 1; // for the rparen
+                }
+            }
+            n = max_node;
+            end_offset += max_offset;
+        },
+        .fn_proto => {
+            const extra = tree.extraData(datas[n].lhs, Node.FnProto);
+            // linksection, callconv, align can appear in any order, so we
+            // find the last one here.
+            var max_node: Node.Index = datas[n].rhs;
+            var max_start = token_starts[main_tokens[max_node]];
+            var max_offset: TokenIndex = 0;
+            if (extra.align_expr != 0) {
+                const start = token_starts[main_tokens[extra.align_expr]];
+                if (start > max_start) {
+                    max_node = extra.align_expr;
+                    max_start = start;
+                    max_offset = 1; // for the rparen
+                }
+            }
+            if (extra.section_expr != 0) {
+                const start = token_starts[main_tokens[extra.section_expr]];
+                if (start > max_start) {
+                    max_node = extra.section_expr;
+                    max_start = start;
+                    max_offset = 1; // for the rparen
+                }
+            }
+            if (extra.callconv_expr != 0) {
+                const start = token_starts[main_tokens[extra.callconv_expr]];
+                if (start > max_start) {
+                    max_node = extra.callconv_expr;
+                    max_start = start;
+                    max_offset = 1; // for the rparen
+                }
+            }
+            n = max_node;
+            end_offset += max_offset;
+        },
+        .while_cont => {
+            const extra = tree.extraData(datas[n].rhs, Node.WhileCont);
+            assert(extra.then_expr != 0);
+            n = extra.then_expr;
+        },
+        .@"while" => {
+            const extra = tree.extraData(datas[n].rhs, Node.While);
+            assert(extra.else_expr != 0);
+            n = extra.else_expr;
+        },
+        .@"if", .@"for" => {
+            const extra = tree.extraData(datas[n].rhs, Node.If);
+            assert(extra.else_expr != 0);
+            n = extra.else_expr;
+        },
+        .@"suspend" => {
+            if (datas[n].lhs != 0) {
+                n = datas[n].lhs;
+            } else {
+                return main_tokens[n] + end_offset;
+            }
+        },
+        .array_type_sentinel => {
+            const extra = tree.extraData(datas[n].rhs, Node.ArrayTypeSentinel);
+            n = extra.elem_type;
+        },
+    };
+}
+
+pub fn tokensOnSameLine(tree: Tree, token1: TokenIndex, token2: TokenIndex) bool {
+    const token_starts = tree.tokens.items(.start);
+    const source = tree.source[token_starts[token1]..token_starts[token2]];
+    return mem.indexOfScalar(u8, source, '\n') == null;
+}
+
+pub fn getNodeSource(tree: Tree, node: Node.Index) []const u8 {
+    const token_starts = tree.tokens.items(.start);
+    const first_token = tree.firstToken(node);
+    const last_token = tree.lastToken(node);
+    const start = token_starts[first_token];
+    const end = token_starts[last_token] + tree.tokenSlice(last_token).len;
+    return tree.source[start..end];
+}
+
+pub fn globalVarDecl(tree: Tree, node: Node.Index) full.VarDecl {
+    assert(tree.nodes.items(.tag)[node] == .global_var_decl);
+    const data = tree.nodes.items(.data)[node];
+    const extra = tree.extraData(data.lhs, Node.GlobalVarDecl);
+    return tree.fullVarDecl(.{
+        .type_node = extra.type_node,
+        .align_node = extra.align_node,
+        .section_node = extra.section_node,
+        .init_node = data.rhs,
+        .mut_token = tree.nodes.items(.main_token)[node],
+    });
+}
+
+pub fn localVarDecl(tree: Tree, node: Node.Index) full.VarDecl {
+    assert(tree.nodes.items(.tag)[node] == .local_var_decl);
+    const data = tree.nodes.items(.data)[node];
+    const extra = tree.extraData(data.lhs, Node.LocalVarDecl);
+    return tree.fullVarDecl(.{
+        .type_node = extra.type_node,
+        .align_node = extra.align_node,
+        .section_node = 0,
+        .init_node = data.rhs,
+        .mut_token = tree.nodes.items(.main_token)[node],
+    });
+}
+
+pub fn simpleVarDecl(tree: Tree, node: Node.Index) full.VarDecl {
+    assert(tree.nodes.items(.tag)[node] == .simple_var_decl);
+    const data = tree.nodes.items(.data)[node];
+    return tree.fullVarDecl(.{
+        .type_node = data.lhs,
+        .align_node = 0,
+        .section_node = 0,
+        .init_node = data.rhs,
+        .mut_token = tree.nodes.items(.main_token)[node],
+    });
+}
+
+pub fn alignedVarDecl(tree: Tree, node: Node.Index) full.VarDecl {
+    assert(tree.nodes.items(.tag)[node] == .aligned_var_decl);
+    const data = tree.nodes.items(.data)[node];
+    return tree.fullVarDecl(.{
+        .type_node = 0,
+        .align_node = data.lhs,
+        .section_node = 0,
+        .init_node = data.rhs,
+        .mut_token = tree.nodes.items(.main_token)[node],
+    });
+}
+
+pub fn ifSimple(tree: Tree, node: Node.Index) full.If {
+    assert(tree.nodes.items(.tag)[node] == .if_simple);
+    const data = tree.nodes.items(.data)[node];
+    return tree.fullIf(.{
+        .cond_expr = data.lhs,
+        .then_expr = data.rhs,
+        .else_expr = 0,
+        .if_token = tree.nodes.items(.main_token)[node],
+    });
+}
+
+pub fn ifFull(tree: Tree, node: Node.Index) full.If {
+    assert(tree.nodes.items(.tag)[node] == .@"if");
+    const data = tree.nodes.items(.data)[node];
+    const extra = tree.extraData(data.rhs, Node.If);
+    return tree.fullIf(.{
+        .cond_expr = data.lhs,
+        .then_expr = extra.then_expr,
+        .else_expr = extra.else_expr,
+        .if_token = tree.nodes.items(.main_token)[node],
+    });
+}
+
+pub fn containerField(tree: Tree, node: Node.Index) full.ContainerField {
+    assert(tree.nodes.items(.tag)[node] == .container_field);
+    const data = tree.nodes.items(.data)[node];
+    const extra = tree.extraData(data.rhs, Node.ContainerField);
+    return tree.fullContainerField(.{
+        .name_token = tree.nodes.items(.main_token)[node],
+        .type_expr = data.lhs,
+        .value_expr = extra.value_expr,
+        .align_expr = extra.align_expr,
+    });
+}
+
+pub fn containerFieldInit(tree: Tree, node: Node.Index) full.ContainerField {
+    assert(tree.nodes.items(.tag)[node] == .container_field_init);
+    const data = tree.nodes.items(.data)[node];
+    return tree.fullContainerField(.{
+        .name_token = tree.nodes.items(.main_token)[node],
+        .type_expr = data.lhs,
+        .value_expr = data.rhs,
+        .align_expr = 0,
+    });
+}
+
+pub fn containerFieldAlign(tree: Tree, node: Node.Index) full.ContainerField {
+    assert(tree.nodes.items(.tag)[node] == .container_field_align);
+    const data = tree.nodes.items(.data)[node];
+    return tree.fullContainerField(.{
+        .name_token = tree.nodes.items(.main_token)[node],
+        .type_expr = data.lhs,
+        .value_expr = 0,
+        .align_expr = data.rhs,
+    });
+}
+
+pub fn fnProtoSimple(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.FnProto {
+    assert(tree.nodes.items(.tag)[node] == .fn_proto_simple);
+    const data = tree.nodes.items(.data)[node];
+    buffer[0] = data.lhs;
+    const params = if (data.lhs == 0) buffer[0..0] else buffer[0..1];
+    return tree.fullFnProto(.{
+        .proto_node = node,
+        .fn_token = tree.nodes.items(.main_token)[node],
+        .return_type = data.rhs,
+        .params = params,
+        .align_expr = 0,
+        .section_expr = 0,
+        .callconv_expr = 0,
+    });
+}
+
+pub fn fnProtoMulti(tree: Tree, node: Node.Index) full.FnProto {
+    assert(tree.nodes.items(.tag)[node] == .fn_proto_multi);
+    const data = tree.nodes.items(.data)[node];
+    const params_range = tree.extraData(data.lhs, Node.SubRange);
+    const params = tree.extra_data[params_range.start..params_range.end];
+    return tree.fullFnProto(.{
+        .proto_node = node,
+        .fn_token = tree.nodes.items(.main_token)[node],
+        .return_type = data.rhs,
+        .params = params,
+        .align_expr = 0,
+        .section_expr = 0,
+        .callconv_expr = 0,
+    });
+}
+
+pub fn fnProtoOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.FnProto {
+    assert(tree.nodes.items(.tag)[node] == .fn_proto_one);
+    const data = tree.nodes.items(.data)[node];
+    const extra = tree.extraData(data.lhs, Node.FnProtoOne);
+    buffer[0] = extra.param;
+    const params = if (extra.param == 0) buffer[0..0] else buffer[0..1];
+    return tree.fullFnProto(.{
+        .proto_node = node,
+        .fn_token = tree.nodes.items(.main_token)[node],
+        .return_type = data.rhs,
+        .params = params,
+        .align_expr = extra.align_expr,
+        .section_expr = extra.section_expr,
+        .callconv_expr = extra.callconv_expr,
+    });
+}
+
+pub fn fnProto(tree: Tree, node: Node.Index) full.FnProto {
+    assert(tree.nodes.items(.tag)[node] == .fn_proto);
+    const data = tree.nodes.items(.data)[node];
+    const extra = tree.extraData(data.lhs, Node.FnProto);
+    const params = tree.extra_data[extra.params_start..extra.params_end];
+    return tree.fullFnProto(.{
+        .proto_node = node,
+        .fn_token = tree.nodes.items(.main_token)[node],
+        .return_type = data.rhs,
+        .params = params,
+        .align_expr = extra.align_expr,
+        .section_expr = extra.section_expr,
+        .callconv_expr = extra.callconv_expr,
+    });
+}
+
+pub fn structInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.StructInit {
+    assert(tree.nodes.items(.tag)[node] == .struct_init_one or
+        tree.nodes.items(.tag)[node] == .struct_init_one_comma);
+    const data = tree.nodes.items(.data)[node];
+    buffer[0] = data.rhs;
+    const fields = if (data.rhs == 0) buffer[0..0] else buffer[0..1];
+    return tree.fullStructInit(.{
+        .lbrace = tree.nodes.items(.main_token)[node],
+        .fields = fields,
+        .type_expr = data.lhs,
+    });
+}
+
+pub fn structInitDotTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.StructInit {
+    assert(tree.nodes.items(.tag)[node] == .struct_init_dot_two or
+        tree.nodes.items(.tag)[node] == .struct_init_dot_two_comma);
+    const data = tree.nodes.items(.data)[node];
+    buffer.* = .{ data.lhs, data.rhs };
+    const fields = if (data.rhs != 0)
+        buffer[0..2]
+    else if (data.lhs != 0)
+        buffer[0..1]
+    else
+        buffer[0..0];
+    return tree.fullStructInit(.{
+        .lbrace = tree.nodes.items(.main_token)[node],
+        .fields = fields,
+        .type_expr = 0,
+    });
+}
+
+pub fn structInitDot(tree: Tree, node: Node.Index) full.StructInit {
+    assert(tree.nodes.items(.tag)[node] == .struct_init_dot or
+        tree.nodes.items(.tag)[node] == .struct_init_dot_comma);
+    const data = tree.nodes.items(.data)[node];
+    return tree.fullStructInit(.{
+        .lbrace = tree.nodes.items(.main_token)[node],
+        .fields = tree.extra_data[data.lhs..data.rhs],
+        .type_expr = 0,
+    });
+}
+
+pub fn structInit(tree: Tree, node: Node.Index) full.StructInit {
+    assert(tree.nodes.items(.tag)[node] == .struct_init or
+        tree.nodes.items(.tag)[node] == .struct_init_comma);
+    const data = tree.nodes.items(.data)[node];
+    const fields_range = tree.extraData(data.rhs, Node.SubRange);
+    return tree.fullStructInit(.{
+        .lbrace = tree.nodes.items(.main_token)[node],
+        .fields = tree.extra_data[fields_range.start..fields_range.end],
+        .type_expr = data.lhs,
+    });
+}
+
+pub fn arrayInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.ArrayInit {
+    assert(tree.nodes.items(.tag)[node] == .array_init_one or
+        tree.nodes.items(.tag)[node] == .array_init_one_comma);
+    const data = tree.nodes.items(.data)[node];
+    buffer[0] = data.rhs;
+    const elements = if (data.rhs == 0) buffer[0..0] else buffer[0..1];
+    return .{
+        .ast = .{
+            .lbrace = tree.nodes.items(.main_token)[node],
+            .elements = elements,
+            .type_expr = data.lhs,
+        },
+    };
+}
+
+pub fn arrayInitDotTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ArrayInit {
+    assert(tree.nodes.items(.tag)[node] == .array_init_dot_two or
+        tree.nodes.items(.tag)[node] == .array_init_dot_two_comma);
+    const data = tree.nodes.items(.data)[node];
+    buffer.* = .{ data.lhs, data.rhs };
+    const elements = if (data.rhs != 0)
+        buffer[0..2]
+    else if (data.lhs != 0)
+        buffer[0..1]
+    else
+        buffer[0..0];
+    return .{
+        .ast = .{
+            .lbrace = tree.nodes.items(.main_token)[node],
+            .elements = elements,
+            .type_expr = 0,
+        },
+    };
+}
+
+pub fn arrayInitDot(tree: Tree, node: Node.Index) full.ArrayInit {
+    assert(tree.nodes.items(.tag)[node] == .array_init_dot or
+        tree.nodes.items(.tag)[node] == .array_init_dot_comma);
+    const data = tree.nodes.items(.data)[node];
+    return .{
+        .ast = .{
+            .lbrace = tree.nodes.items(.main_token)[node],
+            .elements = tree.extra_data[data.lhs..data.rhs],
+            .type_expr = 0,
+        },
+    };
+}
+
+pub fn arrayInit(tree: Tree, node: Node.Index) full.ArrayInit {
+    assert(tree.nodes.items(.tag)[node] == .array_init or
+        tree.nodes.items(.tag)[node] == .array_init_comma);
+    const data = tree.nodes.items(.data)[node];
+    const elem_range = tree.extraData(data.rhs, Node.SubRange);
+    return .{
+        .ast = .{
+            .lbrace = tree.nodes.items(.main_token)[node],
+            .elements = tree.extra_data[elem_range.start..elem_range.end],
+            .type_expr = data.lhs,
+        },
+    };
+}
+
+pub fn arrayType(tree: Tree, node: Node.Index) full.ArrayType {
+    assert(tree.nodes.items(.tag)[node] == .array_type);
+    const data = tree.nodes.items(.data)[node];
+    return .{
+        .ast = .{
+            .lbracket = tree.nodes.items(.main_token)[node],
+            .elem_count = data.lhs,
+            .sentinel = 0,
+            .elem_type = data.rhs,
+        },
+    };
+}
+
+pub fn arrayTypeSentinel(tree: Tree, node: Node.Index) full.ArrayType {
+    assert(tree.nodes.items(.tag)[node] == .array_type_sentinel);
+    const data = tree.nodes.items(.data)[node];
+    const extra = tree.extraData(data.rhs, Node.ArrayTypeSentinel);
+    assert(extra.sentinel != 0);
+    return .{
+        .ast = .{
+            .lbracket = tree.nodes.items(.main_token)[node],
+            .elem_count = data.lhs,
+            .sentinel = extra.sentinel,
+            .elem_type = extra.elem_type,
+        },
+    };
+}
+
+pub fn ptrTypeAligned(tree: Tree, node: Node.Index) full.PtrType {
+    assert(tree.nodes.items(.tag)[node] == .ptr_type_aligned);
+    const data = tree.nodes.items(.data)[node];
+    return tree.fullPtrType(.{
+        .main_token = tree.nodes.items(.main_token)[node],
+        .align_node = data.lhs,
+        .sentinel = 0,
+        .bit_range_start = 0,
+        .bit_range_end = 0,
+        .child_type = data.rhs,
+    });
+}
+
+pub fn ptrTypeSentinel(tree: Tree, node: Node.Index) full.PtrType {
+    assert(tree.nodes.items(.tag)[node] == .ptr_type_sentinel);
+    const data = tree.nodes.items(.data)[node];
+    return tree.fullPtrType(.{
+        .main_token = tree.nodes.items(.main_token)[node],
+        .align_node = 0,
+        .sentinel = data.lhs,
+        .bit_range_start = 0,
+        .bit_range_end = 0,
+        .child_type = data.rhs,
+    });
+}
+
+pub fn ptrType(tree: Tree, node: Node.Index) full.PtrType {
+    assert(tree.nodes.items(.tag)[node] == .ptr_type);
+    const data = tree.nodes.items(.data)[node];
+    const extra = tree.extraData(data.lhs, Node.PtrType);
+    return tree.fullPtrType(.{
+        .main_token = tree.nodes.items(.main_token)[node],
+        .align_node = extra.align_node,
+        .sentinel = extra.sentinel,
+        .bit_range_start = 0,
+        .bit_range_end = 0,
+        .child_type = data.rhs,
+    });
+}
+
+pub fn ptrTypeBitRange(tree: Tree, node: Node.Index) full.PtrType {
+    assert(tree.nodes.items(.tag)[node] == .ptr_type_bit_range);
+    const data = tree.nodes.items(.data)[node];
+    const extra = tree.extraData(data.lhs, Node.PtrTypeBitRange);
+    return tree.fullPtrType(.{
+        .main_token = tree.nodes.items(.main_token)[node],
+        .align_node = extra.align_node,
+        .sentinel = extra.sentinel,
+        .bit_range_start = extra.bit_range_start,
+        .bit_range_end = extra.bit_range_end,
+        .child_type = data.rhs,
+    });
+}
+
+pub fn sliceOpen(tree: Tree, node: Node.Index) full.Slice {
+    assert(tree.nodes.items(.tag)[node] == .slice_open);
+    const data = tree.nodes.items(.data)[node];
+    return .{
+        .ast = .{
+            .sliced = data.lhs,
+            .lbracket = tree.nodes.items(.main_token)[node],
+            .start = data.rhs,
+            .end = 0,
+            .sentinel = 0,
+        },
+    };
+}
+
+pub fn slice(tree: Tree, node: Node.Index) full.Slice {
+    assert(tree.nodes.items(.tag)[node] == .slice);
+    const data = tree.nodes.items(.data)[node];
+    const extra = tree.extraData(data.rhs, Node.Slice);
+    return .{
+        .ast = .{
+            .sliced = data.lhs,
+            .lbracket = tree.nodes.items(.main_token)[node],
+            .start = extra.start,
+            .end = extra.end,
+            .sentinel = 0,
+        },
+    };
+}
+
+pub fn sliceSentinel(tree: Tree, node: Node.Index) full.Slice {
+    assert(tree.nodes.items(.tag)[node] == .slice_sentinel);
+    const data = tree.nodes.items(.data)[node];
+    const extra = tree.extraData(data.rhs, Node.SliceSentinel);
+    return .{
+        .ast = .{
+            .sliced = data.lhs,
+            .lbracket = tree.nodes.items(.main_token)[node],
+            .start = extra.start,
+            .end = extra.end,
+            .sentinel = extra.sentinel,
+        },
+    };
+}
+
+pub fn containerDeclTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ContainerDecl {
+    assert(tree.nodes.items(.tag)[node] == .container_decl_two or
+        tree.nodes.items(.tag)[node] == .container_decl_two_trailing);
+    const data = tree.nodes.items(.data)[node];
+    buffer.* = .{ data.lhs, data.rhs };
+    const members = if (data.rhs != 0)
+        buffer[0..2]
+    else if (data.lhs != 0)
+        buffer[0..1]
+    else
+        buffer[0..0];
+    return tree.fullContainerDecl(.{
+        .main_token = tree.nodes.items(.main_token)[node],
+        .enum_token = null,
+        .members = members,
+        .arg = 0,
+    });
+}
+
+pub fn containerDecl(tree: Tree, node: Node.Index) full.ContainerDecl {
+    assert(tree.nodes.items(.tag)[node] == .container_decl or
+        tree.nodes.items(.tag)[node] == .container_decl_trailing);
+    const data = tree.nodes.items(.data)[node];
+    return tree.fullContainerDecl(.{
+        .main_token = tree.nodes.items(.main_token)[node],
+        .enum_token = null,
+        .members = tree.extra_data[data.lhs..data.rhs],
+        .arg = 0,
+    });
+}
+
+pub fn containerDeclArg(tree: Tree, node: Node.Index) full.ContainerDecl {
+    assert(tree.nodes.items(.tag)[node] == .container_decl_arg or
+        tree.nodes.items(.tag)[node] == .container_decl_arg_trailing);
+    const data = tree.nodes.items(.data)[node];
+    const members_range = tree.extraData(data.rhs, Node.SubRange);
+    return tree.fullContainerDecl(.{
+        .main_token = tree.nodes.items(.main_token)[node],
+        .enum_token = null,
+        .members = tree.extra_data[members_range.start..members_range.end],
+        .arg = data.lhs,
+    });
+}
+
+pub fn taggedUnionTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ContainerDecl {
+    assert(tree.nodes.items(.tag)[node] == .tagged_union_two or
+        tree.nodes.items(.tag)[node] == .tagged_union_two_trailing);
+    const data = tree.nodes.items(.data)[node];
+    buffer.* = .{ data.lhs, data.rhs };
+    const members = if (data.rhs != 0)
+        buffer[0..2]
+    else if (data.lhs != 0)
+        buffer[0..1]
+    else
+        buffer[0..0];
+    const main_token = tree.nodes.items(.main_token)[node];
+    return tree.fullContainerDecl(.{
+        .main_token = main_token,
+        .enum_token = main_token + 2, // union lparen enum
+        .members = members,
+        .arg = 0,
+    });
+}
+
+pub fn taggedUnion(tree: Tree, node: Node.Index) full.ContainerDecl {
+    assert(tree.nodes.items(.tag)[node] == .tagged_union or
+        tree.nodes.items(.tag)[node] == .tagged_union_trailing);
+    const data = tree.nodes.items(.data)[node];
+    const main_token = tree.nodes.items(.main_token)[node];
+    return tree.fullContainerDecl(.{
+        .main_token = main_token,
+        .enum_token = main_token + 2, // union lparen enum
+        .members = tree.extra_data[data.lhs..data.rhs],
+        .arg = 0,
+    });
+}
+
+pub fn taggedUnionEnumTag(tree: Tree, node: Node.Index) full.ContainerDecl {
+    assert(tree.nodes.items(.tag)[node] == .tagged_union_enum_tag or
+        tree.nodes.items(.tag)[node] == .tagged_union_enum_tag_trailing);
+    const data = tree.nodes.items(.data)[node];
+    const members_range = tree.extraData(data.rhs, Node.SubRange);
+    const main_token = tree.nodes.items(.main_token)[node];
+    return tree.fullContainerDecl(.{
+        .main_token = main_token,
+        .enum_token = main_token + 2, // union lparen enum
+        .members = tree.extra_data[members_range.start..members_range.end],
+        .arg = data.lhs,
+    });
+}
+
+pub fn switchCaseOne(tree: Tree, node: Node.Index) full.SwitchCase {
+    const data = &tree.nodes.items(.data)[node];
+    const values: *[1]Node.Index = &data.lhs;
+    return tree.fullSwitchCase(.{
+        .values = if (data.lhs == 0) values[0..0] else values[0..1],
+        .arrow_token = tree.nodes.items(.main_token)[node],
+        .target_expr = data.rhs,
+    });
+}
+
+pub fn switchCase(tree: Tree, node: Node.Index) full.SwitchCase {
+    const data = tree.nodes.items(.data)[node];
+    const extra = tree.extraData(data.lhs, Node.SubRange);
+    return tree.fullSwitchCase(.{
+        .values = tree.extra_data[extra.start..extra.end],
+        .arrow_token = tree.nodes.items(.main_token)[node],
+        .target_expr = data.rhs,
+    });
+}
+
+pub fn asmSimple(tree: Tree, node: Node.Index) full.Asm {
+    const data = tree.nodes.items(.data)[node];
+    return tree.fullAsm(.{
+        .asm_token = tree.nodes.items(.main_token)[node],
+        .template = data.lhs,
+        .items = &.{},
+        .rparen = data.rhs,
+    });
+}
+
+pub fn asmFull(tree: Tree, node: Node.Index) full.Asm {
+    const data = tree.nodes.items(.data)[node];
+    const extra = tree.extraData(data.rhs, Node.Asm);
+    return tree.fullAsm(.{
+        .asm_token = tree.nodes.items(.main_token)[node],
+        .template = data.lhs,
+        .items = tree.extra_data[extra.items_start..extra.items_end],
+        .rparen = extra.rparen,
+    });
+}
+
+pub fn whileSimple(tree: Tree, node: Node.Index) full.While {
+    const data = tree.nodes.items(.data)[node];
+    return tree.fullWhile(.{
+        .while_token = tree.nodes.items(.main_token)[node],
+        .cond_expr = data.lhs,
+        .cont_expr = 0,
+        .then_expr = data.rhs,
+        .else_expr = 0,
+    });
+}
+
+pub fn whileCont(tree: Tree, node: Node.Index) full.While {
+    const data = tree.nodes.items(.data)[node];
+    const extra = tree.extraData(data.rhs, Node.WhileCont);
+    return tree.fullWhile(.{
+        .while_token = tree.nodes.items(.main_token)[node],
+        .cond_expr = data.lhs,
+        .cont_expr = extra.cont_expr,
+        .then_expr = extra.then_expr,
+        .else_expr = 0,
+    });
+}
+
+pub fn whileFull(tree: Tree, node: Node.Index) full.While {
+    const data = tree.nodes.items(.data)[node];
+    const extra = tree.extraData(data.rhs, Node.While);
+    return tree.fullWhile(.{
+        .while_token = tree.nodes.items(.main_token)[node],
+        .cond_expr = data.lhs,
+        .cont_expr = extra.cont_expr,
+        .then_expr = extra.then_expr,
+        .else_expr = extra.else_expr,
+    });
+}
+
+pub fn forSimple(tree: Tree, node: Node.Index) full.While {
+    const data = tree.nodes.items(.data)[node];
+    return tree.fullWhile(.{
+        .while_token = tree.nodes.items(.main_token)[node],
+        .cond_expr = data.lhs,
+        .cont_expr = 0,
+        .then_expr = data.rhs,
+        .else_expr = 0,
+    });
+}
+
+pub fn forFull(tree: Tree, node: Node.Index) full.While {
+    const data = tree.nodes.items(.data)[node];
+    const extra = tree.extraData(data.rhs, Node.If);
+    return tree.fullWhile(.{
+        .while_token = tree.nodes.items(.main_token)[node],
+        .cond_expr = data.lhs,
+        .cont_expr = 0,
+        .then_expr = extra.then_expr,
+        .else_expr = extra.else_expr,
+    });
+}
+
+pub fn callOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.Call {
+    const data = tree.nodes.items(.data)[node];
+    buffer.* = .{data.rhs};
+    const params = if (data.rhs != 0) buffer[0..1] else buffer[0..0];
+    return tree.fullCall(.{
+        .lparen = tree.nodes.items(.main_token)[node],
+        .fn_expr = data.lhs,
+        .params = params,
+    });
+}
+
+pub fn callFull(tree: Tree, node: Node.Index) full.Call {
+    const data = tree.nodes.items(.data)[node];
+    const extra = tree.extraData(data.rhs, Node.SubRange);
+    return tree.fullCall(.{
+        .lparen = tree.nodes.items(.main_token)[node],
+        .fn_expr = data.lhs,
+        .params = tree.extra_data[extra.start..extra.end],
+    });
+}
+
+fn fullVarDecl(tree: Tree, info: full.VarDecl.Components) full.VarDecl {
+    const token_tags = tree.tokens.items(.tag);
+    var result: full.VarDecl = .{
+        .ast = info,
+        .visib_token = null,
+        .extern_export_token = null,
+        .lib_name = null,
+        .threadlocal_token = null,
+        .comptime_token = null,
+    };
+    var i = info.mut_token;
+    while (i > 0) {
+        i -= 1;
+        switch (token_tags[i]) {
+            .keyword_extern, .keyword_export => result.extern_export_token = i,
+            .keyword_comptime => result.comptime_token = i,
+            .keyword_pub => result.visib_token = i,
+            .keyword_threadlocal => result.threadlocal_token = i,
+            .string_literal => result.lib_name = i,
+            else => break,
+        }
+    }
+    return result;
+}
+
+fn fullIf(tree: Tree, info: full.If.Components) full.If {
+    const token_tags = tree.tokens.items(.tag);
+    var result: full.If = .{
+        .ast = info,
+        .payload_token = null,
+        .error_token = null,
+        .else_token = undefined,
+    };
+    // if (cond_expr) |x|
+    //              ^ ^
+    const payload_pipe = tree.lastToken(info.cond_expr) + 2;
+    if (token_tags[payload_pipe] == .pipe) {
+        result.payload_token = payload_pipe + 1;
+    }
+    if (info.else_expr != 0) {
+        // then_expr else |x|
+        //           ^    ^
+        result.else_token = tree.lastToken(info.then_expr) + 1;
+        if (token_tags[result.else_token + 1] == .pipe) {
+            result.error_token = result.else_token + 2;
+        }
+    }
+    return result;
+}
+
+fn fullContainerField(tree: Tree, info: full.ContainerField.Components) full.ContainerField {
+    const token_tags = tree.tokens.items(.tag);
+    var result: full.ContainerField = .{
+        .ast = info,
+        .comptime_token = null,
+    };
+    // comptime name: type = init,
+    // ^
+    if (info.name_token > 0 and token_tags[info.name_token - 1] == .keyword_comptime) {
+        result.comptime_token = info.name_token - 1;
+    }
+    return result;
+}
+
+fn fullFnProto(tree: Tree, info: full.FnProto.Components) full.FnProto {
+    const token_tags = tree.tokens.items(.tag);
+    var result: full.FnProto = .{
+        .ast = info,
+        .visib_token = null,
+        .extern_export_inline_token = null,
+        .lib_name = null,
+        .name_token = null,
+        .lparen = undefined,
+    };
+    var i = info.fn_token;
+    while (i > 0) {
+        i -= 1;
+        switch (token_tags[i]) {
+            .keyword_extern,
+            .keyword_export,
+            .keyword_inline,
+            .keyword_noinline,
+            => result.extern_export_inline_token = i,
+            .keyword_pub => result.visib_token = i,
+            .string_literal => result.lib_name = i,
+            else => break,
+        }
+    }
+    const after_fn_token = info.fn_token + 1;
+    if (token_tags[after_fn_token] == .identifier) {
+        result.name_token = after_fn_token;
+        result.lparen = after_fn_token + 1;
+    } else {
+        result.lparen = after_fn_token;
+    }
+    assert(token_tags[result.lparen] == .l_paren);
+
+    return result;
+}
+
+fn fullStructInit(tree: Tree, info: full.StructInit.Components) full.StructInit {
+    _ = tree;
+    var result: full.StructInit = .{
+        .ast = info,
+    };
+    return result;
+}
+
+fn fullPtrType(tree: Tree, info: full.PtrType.Components) full.PtrType {
+    const token_tags = tree.tokens.items(.tag);
+    // TODO: looks like stage1 isn't quite smart enough to handle enum
+    // literals in some places here
+    const Size = std.builtin.TypeInfo.Pointer.Size;
+    const size: Size = switch (token_tags[info.main_token]) {
+        .asterisk,
+        .asterisk_asterisk,
+        => switch (token_tags[info.main_token + 1]) {
+            .r_bracket, .colon => .Many,
+            .identifier => if (token_tags[info.main_token - 1] == .l_bracket) Size.C else .One,
+            else => .One,
+        },
+        .l_bracket => Size.Slice,
+        else => unreachable,
+    };
+    var result: full.PtrType = .{
+        .size = size,
+        .allowzero_token = null,
+        .const_token = null,
+        .volatile_token = null,
+        .ast = info,
+    };
+    // We need to be careful that we don't iterate over any sub-expressions
+    // here while looking for modifiers as that could result in false
+    // positives. Therefore, start after a sentinel if there is one and
+    // skip over any align node and bit range nodes.
+    var i = if (info.sentinel != 0) tree.lastToken(info.sentinel) + 1 else info.main_token;
+    const end = tree.firstToken(info.child_type);
+    while (i < end) : (i += 1) {
+        switch (token_tags[i]) {
+            .keyword_allowzero => result.allowzero_token = i,
+            .keyword_const => result.const_token = i,
+            .keyword_volatile => result.volatile_token = i,
+            .keyword_align => {
+                assert(info.align_node != 0);
+                if (info.bit_range_end != 0) {
+                    assert(info.bit_range_start != 0);
+                    i = tree.lastToken(info.bit_range_end) + 1;
+                } else {
+                    i = tree.lastToken(info.align_node) + 1;
+                }
+            },
+            else => {},
+        }
+    }
+    return result;
+}
+
+fn fullContainerDecl(tree: Tree, info: full.ContainerDecl.Components) full.ContainerDecl {
+    const token_tags = tree.tokens.items(.tag);
+    var result: full.ContainerDecl = .{
+        .ast = info,
+        .layout_token = null,
+    };
+    switch (token_tags[info.main_token - 1]) {
+        .keyword_extern, .keyword_packed => result.layout_token = info.main_token - 1,
+        else => {},
+    }
+    return result;
+}
+
+fn fullSwitchCase(tree: Tree, info: full.SwitchCase.Components) full.SwitchCase {
+    const token_tags = tree.tokens.items(.tag);
+    var result: full.SwitchCase = .{
+        .ast = info,
+        .payload_token = null,
+    };
+    if (token_tags[info.arrow_token + 1] == .pipe) {
+        result.payload_token = info.arrow_token + 2;
+    }
+    return result;
+}
+
+fn fullAsm(tree: Tree, info: full.Asm.Components) full.Asm {
+    const token_tags = tree.tokens.items(.tag);
+    const node_tags = tree.nodes.items(.tag);
+    var result: full.Asm = .{
+        .ast = info,
+        .volatile_token = null,
+        .inputs = &.{},
+        .outputs = &.{},
+        .first_clobber = null,
+    };
+    if (token_tags[info.asm_token + 1] == .keyword_volatile) {
+        result.volatile_token = info.asm_token + 1;
+    }
+    const outputs_end: usize = for (info.items) |item, i| {
+        switch (node_tags[item]) {
+            .asm_output => continue,
+            else => break i,
+        }
+    } else info.items.len;
+
+    result.outputs = info.items[0..outputs_end];
+    result.inputs = info.items[outputs_end..];
+
+    if (info.items.len == 0) {
+        // asm ("foo" ::: "a", "b");
+        const template_token = tree.lastToken(info.template);
+        if (token_tags[template_token + 1] == .colon and
+            token_tags[template_token + 2] == .colon and
+            token_tags[template_token + 3] == .colon and
+            token_tags[template_token + 4] == .string_literal)
+        {
+            result.first_clobber = template_token + 4;
+        }
+    } else if (result.inputs.len != 0) {
+        // asm ("foo" :: [_] "" (y) : "a", "b");
+        const last_input = result.inputs[result.inputs.len - 1];
+        const rparen = tree.lastToken(last_input);
+        var i = rparen + 1;
+        // Allow a (useless) comma right after the closing parenthesis.
+        if (token_tags[i] == .comma) i += 1;
+        if (token_tags[i] == .colon and
+            token_tags[i + 1] == .string_literal)
+        {
+            result.first_clobber = i + 1;
+        }
+    } else {
+        // asm ("foo" : [_] "" (x) :: "a", "b");
+        const last_output = result.outputs[result.outputs.len - 1];
+        const rparen = tree.lastToken(last_output);
+        var i = rparen + 1;
+        // Allow a (useless) comma right after the closing parenthesis.
+        if (token_tags[i] == .comma) i += 1;
+        if (token_tags[i] == .colon and
+            token_tags[i + 1] == .colon and
+            token_tags[i + 2] == .string_literal)
+        {
+            result.first_clobber = i + 2;
+        }
+    }
+
+    return result;
+}
+
+fn fullWhile(tree: Tree, info: full.While.Components) full.While {
+    const token_tags = tree.tokens.items(.tag);
+    var result: full.While = .{
+        .ast = info,
+        .inline_token = null,
+        .label_token = null,
+        .payload_token = null,
+        .else_token = undefined,
+        .error_token = null,
+    };
+    var tok_i = info.while_token - 1;
+    if (token_tags[tok_i] == .keyword_inline) {
+        result.inline_token = tok_i;
+        tok_i -= 1;
+    }
+    if (token_tags[tok_i] == .colon and
+        token_tags[tok_i - 1] == .identifier)
+    {
+        result.label_token = tok_i - 1;
+    }
+    const last_cond_token = tree.lastToken(info.cond_expr);
+    if (token_tags[last_cond_token + 2] == .pipe) {
+        result.payload_token = last_cond_token + 3;
+    }
+    if (info.else_expr != 0) {
+        // then_expr else |x|
+        //           ^    ^
+        result.else_token = tree.lastToken(info.then_expr) + 1;
+        if (token_tags[result.else_token + 1] == .pipe) {
+            result.error_token = result.else_token + 2;
+        }
+    }
+    return result;
+}
+
+fn fullCall(tree: Tree, info: full.Call.Components) full.Call {
+    const token_tags = tree.tokens.items(.tag);
+    var result: full.Call = .{
+        .ast = info,
+        .async_token = null,
+    };
+    const maybe_async_token = tree.firstToken(info.fn_expr) - 1;
+    if (token_tags[maybe_async_token] == .keyword_async) {
+        result.async_token = maybe_async_token;
+    }
+    return result;
+}
+
+/// Fully assembled AST node information.
+pub const full = struct {
+    pub const VarDecl = struct {
+        visib_token: ?TokenIndex,
+        extern_export_token: ?TokenIndex,
+        lib_name: ?TokenIndex,
+        threadlocal_token: ?TokenIndex,
+        comptime_token: ?TokenIndex,
+        ast: Components,
+
+        pub const Components = struct {
+            mut_token: TokenIndex,
+            type_node: Node.Index,
+            align_node: Node.Index,
+            section_node: Node.Index,
+            init_node: Node.Index,
+        };
+    };
+
+    pub const If = struct {
+        /// Points to the first token after the `|`. Will either be an identifier or
+        /// a `*` (with an identifier immediately after it).
+        payload_token: ?TokenIndex,
+        /// Points to the identifier after the `|`.
+        error_token: ?TokenIndex,
+        /// Populated only if else_expr != 0.
+        else_token: TokenIndex,
+        ast: Components,
+
+        pub const Components = struct {
+            if_token: TokenIndex,
+            cond_expr: Node.Index,
+            then_expr: Node.Index,
+            else_expr: Node.Index,
+        };
+    };
+
+    pub const While = struct {
+        ast: Components,
+        inline_token: ?TokenIndex,
+        label_token: ?TokenIndex,
+        payload_token: ?TokenIndex,
+        error_token: ?TokenIndex,
+        /// Populated only if else_expr != 0.
+        else_token: TokenIndex,
+
+        pub const Components = struct {
+            while_token: TokenIndex,
+            cond_expr: Node.Index,
+            cont_expr: Node.Index,
+            then_expr: Node.Index,
+            else_expr: Node.Index,
+        };
+    };
+
+    pub const ContainerField = struct {
+        comptime_token: ?TokenIndex,
+        ast: Components,
+
+        pub const Components = struct {
+            name_token: TokenIndex,
+            type_expr: Node.Index,
+            value_expr: Node.Index,
+            align_expr: Node.Index,
+        };
+    };
+
+    pub const FnProto = struct {
+        visib_token: ?TokenIndex,
+        extern_export_inline_token: ?TokenIndex,
+        lib_name: ?TokenIndex,
+        name_token: ?TokenIndex,
+        lparen: TokenIndex,
+        ast: Components,
+
+        pub const Components = struct {
+            proto_node: Node.Index,
+            fn_token: TokenIndex,
+            return_type: Node.Index,
+            params: []const Node.Index,
+            align_expr: Node.Index,
+            section_expr: Node.Index,
+            callconv_expr: Node.Index,
+        };
+
+        pub const Param = struct {
+            first_doc_comment: ?TokenIndex,
+            name_token: ?TokenIndex,
+            comptime_noalias: ?TokenIndex,
+            anytype_ellipsis3: ?TokenIndex,
+            type_expr: Node.Index,
+        };
+
+        /// Abstracts over the fact that anytype and ... are not included
+        /// in the params slice, since they are simple identifiers and
+        /// not sub-expressions.
+        pub const Iterator = struct {
+            tree: *const Tree,
+            fn_proto: *const FnProto,
+            param_i: usize,
+            tok_i: TokenIndex,
+            tok_flag: bool,
+
+            pub fn next(it: *Iterator) ?Param {
+                const token_tags = it.tree.tokens.items(.tag);
+                while (true) {
+                    var first_doc_comment: ?TokenIndex = null;
+                    var comptime_noalias: ?TokenIndex = null;
+                    var name_token: ?TokenIndex = null;
+                    if (!it.tok_flag) {
+                        if (it.param_i >= it.fn_proto.ast.params.len) {
+                            return null;
+                        }
+                        const param_type = it.fn_proto.ast.params[it.param_i];
+                        var tok_i = it.tree.firstToken(param_type) - 1;
+                        while (true) : (tok_i -= 1) switch (token_tags[tok_i]) {
+                            .colon => continue,
+                            .identifier => name_token = tok_i,
+                            .doc_comment => first_doc_comment = tok_i,
+                            .keyword_comptime, .keyword_noalias => comptime_noalias = tok_i,
+                            else => break,
+                        };
+                        it.param_i += 1;
+                        it.tok_i = it.tree.lastToken(param_type) + 1;
+                        // Look for anytype and ... params afterwards.
+                        if (token_tags[it.tok_i] == .comma) {
+                            it.tok_i += 1;
+                        }
+                        it.tok_flag = true;
+                        return Param{
+                            .first_doc_comment = first_doc_comment,
+                            .comptime_noalias = comptime_noalias,
+                            .name_token = name_token,
+                            .anytype_ellipsis3 = null,
+                            .type_expr = param_type,
+                        };
+                    }
+                    if (token_tags[it.tok_i] == .comma) {
+                        it.tok_i += 1;
+                    }
+                    if (token_tags[it.tok_i] == .r_paren) {
+                        return null;
+                    }
+                    if (token_tags[it.tok_i] == .doc_comment) {
+                        first_doc_comment = it.tok_i;
+                        while (token_tags[it.tok_i] == .doc_comment) {
+                            it.tok_i += 1;
+                        }
+                    }
+                    switch (token_tags[it.tok_i]) {
+                        .ellipsis3 => {
+                            it.tok_flag = false; // Next iteration should return null.
+                            return Param{
+                                .first_doc_comment = first_doc_comment,
+                                .comptime_noalias = null,
+                                .name_token = null,
+                                .anytype_ellipsis3 = it.tok_i,
+                                .type_expr = 0,
+                            };
+                        },
+                        .keyword_noalias, .keyword_comptime => {
+                            comptime_noalias = it.tok_i;
+                            it.tok_i += 1;
+                        },
+                        else => {},
+                    }
+                    if (token_tags[it.tok_i] == .identifier and
+                        token_tags[it.tok_i + 1] == .colon)
+                    {
+                        name_token = it.tok_i;
+                        it.tok_i += 2;
+                    }
+                    if (token_tags[it.tok_i] == .keyword_anytype) {
+                        it.tok_i += 1;
+                        return Param{
+                            .first_doc_comment = first_doc_comment,
+                            .comptime_noalias = comptime_noalias,
+                            .name_token = name_token,
+                            .anytype_ellipsis3 = it.tok_i - 1,
+                            .type_expr = 0,
+                        };
+                    }
+                    it.tok_flag = false;
+                }
+            }
+        };
+
+        pub fn iterate(fn_proto: FnProto, tree: Tree) Iterator {
+            return .{
+                .tree = &tree,
+                .fn_proto = &fn_proto,
+                .param_i = 0,
+                .tok_i = fn_proto.lparen + 1,
+                .tok_flag = true,
+            };
+        }
+    };
+
+    pub const StructInit = struct {
+        ast: Components,
+
+        pub const Components = struct {
+            lbrace: TokenIndex,
+            fields: []const Node.Index,
+            type_expr: Node.Index,
+        };
+    };
+
+    pub const ArrayInit = struct {
+        ast: Components,
+
+        pub const Components = struct {
+            lbrace: TokenIndex,
+            elements: []const Node.Index,
+            type_expr: Node.Index,
+        };
+    };
+
+    pub const ArrayType = struct {
+        ast: Components,
+
+        pub const Components = struct {
+            lbracket: TokenIndex,
+            elem_count: Node.Index,
+            sentinel: Node.Index,
+            elem_type: Node.Index,
+        };
+    };
+
+    pub const PtrType = struct {
+        size: std.builtin.TypeInfo.Pointer.Size,
+        allowzero_token: ?TokenIndex,
+        const_token: ?TokenIndex,
+        volatile_token: ?TokenIndex,
+        ast: Components,
+
+        pub const Components = struct {
+            main_token: TokenIndex,
+            align_node: Node.Index,
+            sentinel: Node.Index,
+            bit_range_start: Node.Index,
+            bit_range_end: Node.Index,
+            child_type: Node.Index,
+        };
+    };
+
+    pub const Slice = struct {
+        ast: Components,
+
+        pub const Components = struct {
+            sliced: Node.Index,
+            lbracket: TokenIndex,
+            start: Node.Index,
+            end: Node.Index,
+            sentinel: Node.Index,
+        };
+    };
+
+    pub const ContainerDecl = struct {
+        layout_token: ?TokenIndex,
+        ast: Components,
+
+        pub const Components = struct {
+            main_token: TokenIndex,
+            /// Populated when main_token is Keyword_union.
+            enum_token: ?TokenIndex,
+            members: []const Node.Index,
+            arg: Node.Index,
+        };
+    };
+
+    pub const SwitchCase = struct {
+        /// Points to the first token after the `|`. Will either be an identifier or
+        /// a `*` (with an identifier immediately after it).
+        payload_token: ?TokenIndex,
+        ast: Components,
+
+        pub const Components = struct {
+            /// If empty, this is an else case
+            values: []const Node.Index,
+            arrow_token: TokenIndex,
+            target_expr: Node.Index,
+        };
+    };
+
+    pub const Asm = struct {
+        ast: Components,
+        volatile_token: ?TokenIndex,
+        first_clobber: ?TokenIndex,
+        outputs: []const Node.Index,
+        inputs: []const Node.Index,
+
+        pub const Components = struct {
+            asm_token: TokenIndex,
+            template: Node.Index,
+            items: []const Node.Index,
+            rparen: TokenIndex,
+        };
+    };
+
+    pub const Call = struct {
+        ast: Components,
+        async_token: ?TokenIndex,
+
+        pub const Components = struct {
+            lparen: TokenIndex,
+            fn_expr: Node.Index,
+            params: []const Node.Index,
+        };
+    };
+};
+
+pub const Error = struct {
+    tag: Tag,
+    token: TokenIndex,
+    extra: union {
+        none: void,
+        expected_tag: Token.Tag,
+    } = .{ .none = {} },
+
+    pub const Tag = enum {
+        asterisk_after_ptr_deref,
+        decl_between_fields,
+        expected_block,
+        expected_block_or_assignment,
+        expected_block_or_expr,
+        expected_block_or_field,
+        expected_container_members,
+        expected_expr,
+        expected_expr_or_assignment,
+        expected_fn,
+        expected_inlinable,
+        expected_labelable,
+        expected_param_list,
+        expected_prefix_expr,
+        expected_primary_type_expr,
+        expected_pub_item,
+        expected_return_type,
+        expected_semi_or_else,
+        expected_semi_or_lbrace,
+        expected_statement,
+        expected_string_literal,
+        expected_suffix_op,
+        expected_type_expr,
+        expected_var_decl,
+        expected_var_decl_or_fn,
+        expected_loop_payload,
+        expected_container,
+        extra_align_qualifier,
+        extra_allowzero_qualifier,
+        extra_const_qualifier,
+        extra_volatile_qualifier,
+        ptr_mod_on_array_child_type,
+        invalid_bit_range,
+        invalid_token,
+        same_line_doc_comment,
+        unattached_doc_comment,
+        varargs_nonfinal,
+
+        /// `expected_tag` is populated.
+        expected_token,
+    };
+};
+
+pub const Node = struct {
+    tag: Tag,
+    main_token: TokenIndex,
+    data: Data,
+
+    pub const Index = u32;
+
+    comptime {
+        // Goal is to keep this under one byte for efficiency.
+        assert(@sizeOf(Tag) == 1);
+    }
+
+    /// Note: The FooComma/FooSemicolon variants exist to ease the implementation of
+    /// Tree.lastToken()
+    pub const Tag = enum {
+        /// sub_list[lhs...rhs]
+        root,
+        /// `usingnamespace lhs;`. rhs unused. main_token is `usingnamespace`.
+        @"usingnamespace",
+        /// lhs is test name token (must be string literal), if any.
+        /// rhs is the body node.
+        test_decl,
+        /// lhs is the index into extra_data.
+        /// rhs is the initialization expression, if any.
+        /// main_token is `var` or `const`.
+        global_var_decl,
+        /// `var a: x align(y) = rhs`
+        /// lhs is the index into extra_data.
+        /// main_token is `var` or `const`.
+        local_var_decl,
+        /// `var a: lhs = rhs`. lhs and rhs may be unused.
+        /// Can be local or global.
+        /// main_token is `var` or `const`.
+        simple_var_decl,
+        /// `var a align(lhs) = rhs`. lhs and rhs may be unused.
+        /// Can be local or global.
+        /// main_token is `var` or `const`.
+        aligned_var_decl,
+        /// lhs is the identifier token payload if any,
+        /// rhs is the deferred expression.
+        @"errdefer",
+        /// lhs is unused.
+        /// rhs is the deferred expression.
+        @"defer",
+        /// lhs catch rhs
+        /// lhs catch |err| rhs
+        /// main_token is the `catch` keyword.
+        /// payload is determined by looking at the next token after the `catch` keyword.
+        @"catch",
+        /// `lhs.a`. main_token is the dot. rhs is the identifier token index.
+        field_access,
+        /// `lhs.?`. main_token is the dot. rhs is the `?` token index.
+        unwrap_optional,
+        /// `lhs == rhs`. main_token is op.
+        equal_equal,
+        /// `lhs != rhs`. main_token is op.
+        bang_equal,
+        /// `lhs < rhs`. main_token is op.
+        less_than,
+        /// `lhs > rhs`. main_token is op.
+        greater_than,
+        /// `lhs <= rhs`. main_token is op.
+        less_or_equal,
+        /// `lhs >= rhs`. main_token is op.
+        greater_or_equal,
+        /// `lhs *= rhs`. main_token is op.
+        assign_mul,
+        /// `lhs /= rhs`. main_token is op.
+        assign_div,
+        /// `lhs *= rhs`. main_token is op.
+        assign_mod,
+        /// `lhs += rhs`. main_token is op.
+        assign_add,
+        /// `lhs -= rhs`. main_token is op.
+        assign_sub,
+        /// `lhs <<= rhs`. main_token is op.
+        assign_bit_shift_left,
+        /// `lhs >>= rhs`. main_token is op.
+        assign_bit_shift_right,
+        /// `lhs &= rhs`. main_token is op.
+        assign_bit_and,
+        /// `lhs ^= rhs`. main_token is op.
+        assign_bit_xor,
+        /// `lhs |= rhs`. main_token is op.
+        assign_bit_or,
+        /// `lhs *%= rhs`. main_token is op.
+        assign_mul_wrap,
+        /// `lhs +%= rhs`. main_token is op.
+        assign_add_wrap,
+        /// `lhs -%= rhs`. main_token is op.
+        assign_sub_wrap,
+        /// `lhs = rhs`. main_token is op.
+        assign,
+        /// `lhs || rhs`. main_token is the `||`.
+        merge_error_sets,
+        /// `lhs * rhs`. main_token is the `*`.
+        mul,
+        /// `lhs / rhs`. main_token is the `/`.
+        div,
+        /// `lhs % rhs`. main_token is the `%`.
+        mod,
+        /// `lhs ** rhs`. main_token is the `**`.
+        array_mult,
+        /// `lhs *% rhs`. main_token is the `*%`.
+        mul_wrap,
+        /// `lhs + rhs`. main_token is the `+`.
+        add,
+        /// `lhs - rhs`. main_token is the `-`.
+        sub,
+        /// `lhs ++ rhs`. main_token is the `++`.
+        array_cat,
+        /// `lhs +% rhs`. main_token is the `+%`.
+        add_wrap,
+        /// `lhs -% rhs`. main_token is the `-%`.
+        sub_wrap,
+        /// `lhs << rhs`. main_token is the `<<`.
+        bit_shift_left,
+        /// `lhs >> rhs`. main_token is the `>>`.
+        bit_shift_right,
+        /// `lhs & rhs`. main_token is the `&`.
+        bit_and,
+        /// `lhs ^ rhs`. main_token is the `^`.
+        bit_xor,
+        /// `lhs | rhs`. main_token is the `|`.
+        bit_or,
+        /// `lhs orelse rhs`. main_token is the `orelse`.
+        @"orelse",
+        /// `lhs and rhs`. main_token is the `and`.
+        bool_and,
+        /// `lhs or rhs`. main_token is the `or`.
+        bool_or,
+        /// `op lhs`. rhs unused. main_token is op.
+        bool_not,
+        /// `op lhs`. rhs unused. main_token is op.
+        negation,
+        /// `op lhs`. rhs unused. main_token is op.
+        bit_not,
+        /// `op lhs`. rhs unused. main_token is op.
+        negation_wrap,
+        /// `op lhs`. rhs unused. main_token is op.
+        address_of,
+        /// `op lhs`. rhs unused. main_token is op.
+        @"try",
+        /// `op lhs`. rhs unused. main_token is op.
+        @"await",
+        /// `?lhs`. rhs unused. main_token is the `?`.
+        optional_type,
+        /// `[lhs]rhs`.
+        array_type,
+        /// `[lhs:a]b`. `ArrayTypeSentinel[rhs]`.
+        array_type_sentinel,
+        /// `[*]align(lhs) rhs`. lhs can be omitted.
+        /// `*align(lhs) rhs`. lhs can be omitted.
+        /// `[]rhs`.
+        /// main_token is the asterisk if a pointer or the lbracket if a slice
+        /// main_token might be a ** token, which is shared with a parent/child
+        /// pointer type and may require special handling.
+        ptr_type_aligned,
+        /// `[*:lhs]rhs`. lhs can be omitted.
+        /// `*rhs`.
+        /// `[:lhs]rhs`.
+        /// main_token is the asterisk if a pointer or the lbracket if a slice
+        /// main_token might be a ** token, which is shared with a parent/child
+        /// pointer type and may require special handling.
+        ptr_type_sentinel,
+        /// lhs is index into ptr_type. rhs is the element type expression.
+        /// main_token is the asterisk if a pointer or the lbracket if a slice
+        /// main_token might be a ** token, which is shared with a parent/child
+        /// pointer type and may require special handling.
+        ptr_type,
+        /// lhs is index into ptr_type_bit_range. rhs is the element type expression.
+        /// main_token is the asterisk if a pointer or the lbracket if a slice
+        /// main_token might be a ** token, which is shared with a parent/child
+        /// pointer type and may require special handling.
+        ptr_type_bit_range,
+        /// `lhs[rhs..]`
+        /// main_token is the lbracket.
+        slice_open,
+        /// `lhs[b..c]`. rhs is index into Slice
+        /// main_token is the lbracket.
+        slice,
+        /// `lhs[b..c :d]`. rhs is index into SliceSentinel
+        /// main_token is the lbracket.
+        slice_sentinel,
+        /// `lhs.*`. rhs is unused.
+        deref,
+        /// `lhs[rhs]`.
+        array_access,
+        /// `lhs{rhs}`. rhs can be omitted.
+        array_init_one,
+        /// `lhs{rhs,}`. rhs can *not* be omitted
+        array_init_one_comma,
+        /// `.{lhs, rhs}`. lhs and rhs can be omitted.
+        array_init_dot_two,
+        /// Same as `array_init_dot_two` except there is known to be a trailing comma
+        /// before the final rbrace.
+        array_init_dot_two_comma,
+        /// `.{a, b}`. `sub_list[lhs..rhs]`.
+        array_init_dot,
+        /// Same as `array_init_dot` except there is known to be a trailing comma
+        /// before the final rbrace.
+        array_init_dot_comma,
+        /// `lhs{a, b}`. `sub_range_list[rhs]`. lhs can be omitted which means `.{a, b}`.
+        array_init,
+        /// Same as `array_init` except there is known to be a trailing comma
+        /// before the final rbrace.
+        array_init_comma,
+        /// `lhs{.a = rhs}`. rhs can be omitted making it empty.
+        /// main_token is the lbrace.
+        struct_init_one,
+        /// `lhs{.a = rhs,}`. rhs can *not* be omitted.
+        /// main_token is the lbrace.
+        struct_init_one_comma,
+        /// `.{.a = lhs, .b = rhs}`. lhs and rhs can be omitted.
+        /// main_token is the lbrace.
+        /// No trailing comma before the rbrace.
+        struct_init_dot_two,
+        /// Same as `struct_init_dot_two` except there is known to be a trailing comma
+        /// before the final rbrace.
+        struct_init_dot_two_comma,
+        /// `.{.a = b, .c = d}`. `sub_list[lhs..rhs]`.
+        /// main_token is the lbrace.
+        struct_init_dot,
+        /// Same as `struct_init_dot` except there is known to be a trailing comma
+        /// before the final rbrace.
+        struct_init_dot_comma,
+        /// `lhs{.a = b, .c = d}`. `sub_range_list[rhs]`.
+        /// lhs can be omitted which means `.{.a = b, .c = d}`.
+        /// main_token is the lbrace.
+        struct_init,
+        /// Same as `struct_init` except there is known to be a trailing comma
+        /// before the final rbrace.
+        struct_init_comma,
+        /// `lhs(rhs)`. rhs can be omitted.
+        /// main_token is the lparen.
+        call_one,
+        /// `lhs(rhs,)`. rhs can be omitted.
+        /// main_token is the lparen.
+        call_one_comma,
+        /// `async lhs(rhs)`. rhs can be omitted.
+        async_call_one,
+        /// `async lhs(rhs,)`.
+        async_call_one_comma,
+        /// `lhs(a, b, c)`. `SubRange[rhs]`.
+        /// main_token is the `(`.
+        call,
+        /// `lhs(a, b, c,)`. `SubRange[rhs]`.
+        /// main_token is the `(`.
+        call_comma,
+        /// `async lhs(a, b, c)`. `SubRange[rhs]`.
+        /// main_token is the `(`.
+        async_call,
+        /// `async lhs(a, b, c,)`. `SubRange[rhs]`.
+        /// main_token is the `(`.
+        async_call_comma,
+        /// `switch(lhs) {}`. `SubRange[rhs]`.
+        @"switch",
+        /// Same as switch except there is known to be a trailing comma
+        /// before the final rbrace
+        switch_comma,
+        /// `lhs => rhs`. If lhs is omitted it means `else`.
+        /// main_token is the `=>`
+        switch_case_one,
+        /// `a, b, c => rhs`. `SubRange[lhs]`.
+        /// main_token is the `=>`
+        switch_case,
+        /// `lhs...rhs`.
+        switch_range,
+        /// `while (lhs) rhs`.
+        /// `while (lhs) |x| rhs`.
+        while_simple,
+        /// `while (lhs) : (a) b`. `WhileCont[rhs]`.
+        /// `while (lhs) : (a) b`. `WhileCont[rhs]`.
+        while_cont,
+        /// `while (lhs) : (a) b else c`. `While[rhs]`.
+        /// `while (lhs) |x| : (a) b else c`. `While[rhs]`.
+        /// `while (lhs) |x| : (a) b else |y| c`. `While[rhs]`.
+        @"while",
+        /// `for (lhs) rhs`.
+        for_simple,
+        /// `for (lhs) a else b`. `if_list[rhs]`.
+        @"for",
+        /// `if (lhs) rhs`.
+        /// `if (lhs) |a| rhs`.
+        if_simple,
+        /// `if (lhs) a else b`. `If[rhs]`.
+        /// `if (lhs) |x| a else b`. `If[rhs]`.
+        /// `if (lhs) |x| a else |y| b`. `If[rhs]`.
+        @"if",
+        /// `suspend lhs`. lhs can be omitted. rhs is unused.
+        @"suspend",
+        /// `resume lhs`. rhs is unused.
+        @"resume",
+        /// `continue`. lhs is token index of label if any. rhs is unused.
+        @"continue",
+        /// `break :lhs rhs`
+        /// both lhs and rhs may be omitted.
+        @"break",
+        /// `return lhs`. lhs can be omitted. rhs is unused.
+        @"return",
+        /// `fn(a: lhs) rhs`. lhs can be omitted.
+        /// anytype and ... parameters are omitted from the AST tree.
+        /// main_token is the `fn` keyword.
+        /// extern function declarations use this tag.
+        fn_proto_simple,
+        /// `fn(a: b, c: d) rhs`. `sub_range_list[lhs]`.
+        /// anytype and ... parameters are omitted from the AST tree.
+        /// main_token is the `fn` keyword.
+        /// extern function declarations use this tag.
+        fn_proto_multi,
+        /// `fn(a: b) rhs linksection(e) callconv(f)`. `FnProtoOne[lhs]`.
+        /// zero or one parameters.
+        /// anytype and ... parameters are omitted from the AST tree.
+        /// main_token is the `fn` keyword.
+        /// extern function declarations use this tag.
+        fn_proto_one,
+        /// `fn(a: b, c: d) rhs linksection(e) callconv(f)`. `FnProto[lhs]`.
+        /// anytype and ... parameters are omitted from the AST tree.
+        /// main_token is the `fn` keyword.
+        /// extern function declarations use this tag.
+        fn_proto,
+        /// lhs is the fn_proto.
+        /// rhs is the function body block.
+        /// Note that extern function declarations use the fn_proto tags rather
+        /// than this one.
+        fn_decl,
+        /// `anyframe->rhs`. main_token is `anyframe`. `lhs` is arrow token index.
+        anyframe_type,
+        /// Both lhs and rhs unused.
+        anyframe_literal,
+        /// Both lhs and rhs unused.
+        char_literal,
+        /// Both lhs and rhs unused.
+        integer_literal,
+        /// Both lhs and rhs unused.
+        float_literal,
+        /// Both lhs and rhs unused.
+        unreachable_literal,
+        /// Both lhs and rhs unused.
+        /// Most identifiers will not have explicit AST nodes, however for expressions
+        /// which could be one of many different kinds of AST nodes, there will be an
+        /// identifier AST node for it.
+        identifier,
+        /// lhs is the dot token index, rhs unused, main_token is the identifier.
+        enum_literal,
+        /// main_token is the string literal token
+        /// Both lhs and rhs unused.
+        string_literal,
+        /// main_token is the first token index (redundant with lhs)
+        /// lhs is the first token index; rhs is the last token index.
+        /// Could be a series of multiline_string_literal_line tokens, or a single
+        /// string_literal token.
+        multiline_string_literal,
+        /// `(lhs)`. main_token is the `(`; rhs is the token index of the `)`.
+        grouped_expression,
+        /// `@a(lhs, rhs)`. lhs and rhs may be omitted.
+        /// main_token is the builtin token.
+        builtin_call_two,
+        /// Same as builtin_call_two but there is known to be a trailing comma before the rparen.
+        builtin_call_two_comma,
+        /// `@a(b, c)`. `sub_list[lhs..rhs]`.
+        /// main_token is the builtin token.
+        builtin_call,
+        /// Same as builtin_call but there is known to be a trailing comma before the rparen.
+        builtin_call_comma,
+        /// `error{a, b}`.
+        /// rhs is the rbrace, lhs is unused.
+        error_set_decl,
+        /// `struct {}`, `union {}`, `opaque {}`, `enum {}`. `extra_data[lhs..rhs]`.
+        /// main_token is `struct`, `union`, `opaque`, `enum` keyword.
+        container_decl,
+        /// Same as ContainerDecl but there is known to be a trailing comma
+        /// or semicolon before the rbrace.
+        container_decl_trailing,
+        /// `struct {lhs, rhs}`, `union {lhs, rhs}`, `opaque {lhs, rhs}`, `enum {lhs, rhs}`.
+        /// lhs or rhs can be omitted.
+        /// main_token is `struct`, `union`, `opaque`, `enum` keyword.
+        container_decl_two,
+        /// Same as ContainerDeclTwo except there is known to be a trailing comma
+        /// or semicolon before the rbrace.
+        container_decl_two_trailing,
+        /// `union(lhs)` / `enum(lhs)`. `SubRange[rhs]`.
+        container_decl_arg,
+        /// Same as container_decl_arg but there is known to be a trailing
+        /// comma or semicolon before the rbrace.
+        container_decl_arg_trailing,
+        /// `union(enum) {}`. `sub_list[lhs..rhs]`.
+        /// Note that tagged unions with explicitly provided enums are represented
+        /// by `container_decl_arg`.
+        tagged_union,
+        /// Same as tagged_union but there is known to be a trailing comma
+        /// or semicolon before the rbrace.
+        tagged_union_trailing,
+        /// `union(enum) {lhs, rhs}`. lhs or rhs may be omitted.
+        /// Note that tagged unions with explicitly provided enums are represented
+        /// by `container_decl_arg`.
+        tagged_union_two,
+        /// Same as tagged_union_two but there is known to be a trailing comma
+        /// or semicolon before the rbrace.
+        tagged_union_two_trailing,
+        /// `union(enum(lhs)) {}`. `SubRange[rhs]`.
+        tagged_union_enum_tag,
+        /// Same as tagged_union_enum_tag but there is known to be a trailing comma
+        /// or semicolon before the rbrace.
+        tagged_union_enum_tag_trailing,
+        /// `a: lhs = rhs,`. lhs and rhs can be omitted.
+        /// main_token is the field name identifier.
+        /// lastToken() does not include the possible trailing comma.
+        container_field_init,
+        /// `a: lhs align(rhs),`. rhs can be omitted.
+        /// main_token is the field name identifier.
+        /// lastToken() does not include the possible trailing comma.
+        container_field_align,
+        /// `a: lhs align(c) = d,`. `container_field_list[rhs]`.
+        /// main_token is the field name identifier.
+        /// lastToken() does not include the possible trailing comma.
+        container_field,
+        /// `anytype`. both lhs and rhs unused.
+        /// Used by `ContainerField`.
+        @"anytype",
+        /// `comptime lhs`. rhs unused.
+        @"comptime",
+        /// `nosuspend lhs`. rhs unused.
+        @"nosuspend",
+        /// `{lhs rhs}`. rhs or lhs can be omitted.
+        /// main_token points at the lbrace.
+        block_two,
+        /// Same as block_two but there is known to be a semicolon before the rbrace.
+        block_two_semicolon,
+        /// `{}`. `sub_list[lhs..rhs]`.
+        /// main_token points at the lbrace.
+        block,
+        /// Same as block but there is known to be a semicolon before the rbrace.
+        block_semicolon,
+        /// `asm(lhs)`. rhs is the token index of the rparen.
+        asm_simple,
+        /// `asm(lhs, a)`. `Asm[rhs]`.
+        @"asm",
+        /// `[a] "b" (c)`. lhs is 0, rhs is token index of the rparen.
+        /// `[a] "b" (-> lhs)`. rhs is token index of the rparen.
+        /// main_token is `a`.
+        asm_output,
+        /// `[a] "b" (lhs)`. rhs is token index of the rparen.
+        /// main_token is `a`.
+        asm_input,
+        /// `error.a`. lhs is token index of `.`. rhs is token index of `a`.
+        error_value,
+        /// `lhs!rhs`. main_token is the `!`.
+        error_union,
+
+        pub fn isContainerField(tag: Tag) bool {
+            return switch (tag) {
+                .container_field_init,
+                .container_field_align,
+                .container_field,
+                => true,
+
+                else => false,
+            };
+        }
+    };
+
+    pub const Data = struct {
+        lhs: Index,
+        rhs: Index,
+    };
+
+    pub const LocalVarDecl = struct {
+        type_node: Index,
+        align_node: Index,
+    };
+
+    pub const ArrayTypeSentinel = struct {
+        elem_type: Index,
+        sentinel: Index,
+    };
+
+    pub const PtrType = struct {
+        sentinel: Index,
+        align_node: Index,
+    };
+
+    pub const PtrTypeBitRange = struct {
+        sentinel: Index,
+        align_node: Index,
+        bit_range_start: Index,
+        bit_range_end: Index,
+    };
+
+    pub const SubRange = struct {
+        /// Index into sub_list.
+        start: Index,
+        /// Index into sub_list.
+        end: Index,
+    };
+
+    pub const If = struct {
+        then_expr: Index,
+        else_expr: Index,
+    };
+
+    pub const ContainerField = struct {
+        value_expr: Index,
+        align_expr: Index,
+    };
+
+    pub const GlobalVarDecl = struct {
+        type_node: Index,
+        align_node: Index,
+        section_node: Index,
+    };
+
+    pub const Slice = struct {
+        start: Index,
+        end: Index,
+    };
+
+    pub const SliceSentinel = struct {
+        start: Index,
+        /// May be 0 if the slice is "open"
+        end: Index,
+        sentinel: Index,
+    };
+
+    pub const While = struct {
+        cont_expr: Index,
+        then_expr: Index,
+        else_expr: Index,
+    };
+
+    pub const WhileCont = struct {
+        cont_expr: Index,
+        then_expr: Index,
+    };
+
+    pub const FnProtoOne = struct {
+        /// Populated if there is exactly 1 parameter. Otherwise there are 0 parameters.
+        param: Index,
+        /// Populated if align(A) is present.
+        align_expr: Index,
+        /// Populated if linksection(A) is present.
+        section_expr: Index,
+        /// Populated if callconv(A) is present.
+        callconv_expr: Index,
+    };
+
+    pub const FnProto = struct {
+        params_start: Index,
+        params_end: Index,
+        /// Populated if align(A) is present.
+        align_expr: Index,
+        /// Populated if linksection(A) is present.
+        section_expr: Index,
+        /// Populated if callconv(A) is present.
+        callconv_expr: Index,
+    };
+
+    pub const Asm = struct {
+        items_start: Index,
+        items_end: Index,
+        /// Needed to make lastToken() work.
+        rparen: TokenIndex,
+    };
+};
lib/std/zig/ast.zig
@@ -1,2978 +0,0 @@
-const std = @import("../std.zig");
-const assert = std.debug.assert;
-const testing = std.testing;
-const mem = std.mem;
-const Token = std.zig.Token;
-
-pub const TokenIndex = u32;
-pub const ByteOffset = u32;
-
-pub const TokenList = std.MultiArrayList(struct {
-    tag: Token.Tag,
-    start: ByteOffset,
-});
-pub const NodeList = std.MultiArrayList(Node);
-
-pub const Tree = struct {
-    /// Reference to externally-owned data.
-    source: [:0]const u8,
-
-    tokens: TokenList.Slice,
-    /// The root AST node is assumed to be index 0. Since there can be no
-    /// references to the root node, this means 0 is available to indicate null.
-    nodes: NodeList.Slice,
-    extra_data: []Node.Index,
-
-    errors: []const Error,
-
-    pub const Location = struct {
-        line: usize,
-        column: usize,
-        line_start: usize,
-        line_end: usize,
-    };
-
-    pub fn deinit(tree: *Tree, gpa: *mem.Allocator) void {
-        tree.tokens.deinit(gpa);
-        tree.nodes.deinit(gpa);
-        gpa.free(tree.extra_data);
-        gpa.free(tree.errors);
-        tree.* = undefined;
-    }
-
-    pub const RenderError = error{
-        /// Ran out of memory allocating call stack frames to complete rendering, or
-        /// ran out of memory allocating space in the output buffer.
-        OutOfMemory,
-    };
-
-    /// `gpa` is used for allocating the resulting formatted source code, as well as
-    /// for allocating extra stack memory if needed, because this function utilizes recursion.
-    /// Note: that's not actually true yet, see https://github.com/ziglang/zig/issues/1006.
-    /// Caller owns the returned slice of bytes, allocated with `gpa`.
-    pub fn render(tree: Tree, gpa: *mem.Allocator) RenderError![]u8 {
-        var buffer = std.ArrayList(u8).init(gpa);
-        defer buffer.deinit();
-
-        try tree.renderToArrayList(&buffer);
-        return buffer.toOwnedSlice();
-    }
-
-    pub fn renderToArrayList(tree: Tree, buffer: *std.ArrayList(u8)) RenderError!void {
-        return @import("./render.zig").renderTree(buffer, tree);
-    }
-
-    pub fn tokenLocation(self: Tree, start_offset: ByteOffset, token_index: TokenIndex) Location {
-        var loc = Location{
-            .line = 0,
-            .column = 0,
-            .line_start = start_offset,
-            .line_end = self.source.len,
-        };
-        const token_start = self.tokens.items(.start)[token_index];
-        for (self.source[start_offset..]) |c, i| {
-            if (i + start_offset == token_start) {
-                loc.line_end = i + start_offset;
-                while (loc.line_end < self.source.len and self.source[loc.line_end] != '\n') {
-                    loc.line_end += 1;
-                }
-                return loc;
-            }
-            if (c == '\n') {
-                loc.line += 1;
-                loc.column = 0;
-                loc.line_start = i + 1;
-            } else {
-                loc.column += 1;
-            }
-        }
-        return loc;
-    }
-
-    pub fn tokenSlice(tree: Tree, token_index: TokenIndex) []const u8 {
-        const token_starts = tree.tokens.items(.start);
-        const token_tags = tree.tokens.items(.tag);
-        const token_tag = token_tags[token_index];
-
-        // Many tokens can be determined entirely by their tag.
-        if (token_tag.lexeme()) |lexeme| {
-            return lexeme;
-        }
-
-        // For some tokens, re-tokenization is needed to find the end.
-        var tokenizer: std.zig.Tokenizer = .{
-            .buffer = tree.source,
-            .index = token_starts[token_index],
-            .pending_invalid_token = null,
-        };
-        const token = tokenizer.next();
-        assert(token.tag == token_tag);
-        return tree.source[token.loc.start..token.loc.end];
-    }
-
-    pub fn extraData(tree: Tree, index: usize, comptime T: type) T {
-        const fields = std.meta.fields(T);
-        var result: T = undefined;
-        inline for (fields) |field, i| {
-            comptime assert(field.field_type == Node.Index);
-            @field(result, field.name) = tree.extra_data[index + i];
-        }
-        return result;
-    }
-
-    pub fn rootDecls(tree: Tree) []const Node.Index {
-        // Root is always index 0.
-        const nodes_data = tree.nodes.items(.data);
-        return tree.extra_data[nodes_data[0].lhs..nodes_data[0].rhs];
-    }
-
-    pub fn renderError(tree: Tree, parse_error: Error, stream: anytype) !void {
-        const token_tags = tree.tokens.items(.tag);
-        switch (parse_error.tag) {
-            .asterisk_after_ptr_deref => {
-                // Note that the token will point at the `.*` but ideally the source
-                // location would point to the `*` after the `.*`.
-                return stream.writeAll("'.*' cannot be followed by '*'. Are you missing a space?");
-            },
-            .decl_between_fields => {
-                return stream.writeAll("declarations are not allowed between container fields");
-            },
-            .expected_block => {
-                return stream.print("expected block or field, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_block_or_assignment => {
-                return stream.print("expected block or assignment, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_block_or_expr => {
-                return stream.print("expected block or expression, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_block_or_field => {
-                return stream.print("expected block or field, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_container_members => {
-                return stream.print("expected test, comptime, var decl, or container field, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_expr => {
-                return stream.print("expected expression, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_expr_or_assignment => {
-                return stream.print("expected expression or assignment, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_fn => {
-                return stream.print("expected function, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_inlinable => {
-                return stream.print("expected 'while' or 'for', found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_labelable => {
-                return stream.print("expected 'while', 'for', 'inline', 'suspend', or '{{', found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_param_list => {
-                return stream.print("expected parameter list, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_prefix_expr => {
-                return stream.print("expected prefix expression, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_primary_type_expr => {
-                return stream.print("expected primary type expression, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_pub_item => {
-                return stream.writeAll("expected function or variable declaration after pub");
-            },
-            .expected_return_type => {
-                return stream.print("expected return type expression, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_semi_or_else => {
-                return stream.print("expected ';' or 'else', found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_semi_or_lbrace => {
-                return stream.print("expected ';' or '{{', found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_statement => {
-                return stream.print("expected statement, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_string_literal => {
-                return stream.print("expected string literal, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_suffix_op => {
-                return stream.print("expected pointer dereference, optional unwrap, or field access, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_type_expr => {
-                return stream.print("expected type expression, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_var_decl => {
-                return stream.print("expected variable declaration, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_var_decl_or_fn => {
-                return stream.print("expected variable declaration or function, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_loop_payload => {
-                return stream.print("expected loop payload, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .expected_container => {
-                return stream.print("expected a struct, enum or union, found '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .extra_align_qualifier => {
-                return stream.writeAll("extra align qualifier");
-            },
-            .extra_allowzero_qualifier => {
-                return stream.writeAll("extra allowzero qualifier");
-            },
-            .extra_const_qualifier => {
-                return stream.writeAll("extra const qualifier");
-            },
-            .extra_volatile_qualifier => {
-                return stream.writeAll("extra volatile qualifier");
-            },
-            .ptr_mod_on_array_child_type => {
-                return stream.print("pointer modifier '{s}' not allowed on array child type", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .invalid_bit_range => {
-                return stream.writeAll("bit range not allowed on slices and arrays");
-            },
-            .invalid_token => {
-                return stream.print("invalid token: '{s}'", .{
-                    token_tags[parse_error.token].symbol(),
-                });
-            },
-            .same_line_doc_comment => {
-                return stream.writeAll("same line documentation comment");
-            },
-            .unattached_doc_comment => {
-                return stream.writeAll("unattached documentation comment");
-            },
-            .varargs_nonfinal => {
-                return stream.writeAll("function prototype has parameter after varargs");
-            },
-
-            .expected_token => {
-                const found_tag = token_tags[parse_error.token];
-                const expected_symbol = parse_error.extra.expected_tag.symbol();
-                switch (found_tag) {
-                    .invalid => return stream.print("expected '{s}', found invalid bytes", .{
-                        expected_symbol,
-                    }),
-                    else => return stream.print("expected '{s}', found '{s}'", .{
-                        expected_symbol, found_tag.symbol(),
-                    }),
-                }
-            },
-        }
-    }
-
-    pub fn firstToken(tree: Tree, node: Node.Index) TokenIndex {
-        const tags = tree.nodes.items(.tag);
-        const datas = tree.nodes.items(.data);
-        const main_tokens = tree.nodes.items(.main_token);
-        const token_tags = tree.tokens.items(.tag);
-        var end_offset: TokenIndex = 0;
-        var n = node;
-        while (true) switch (tags[n]) {
-            .root => return 0,
-
-            .test_decl,
-            .@"errdefer",
-            .@"defer",
-            .bool_not,
-            .negation,
-            .bit_not,
-            .negation_wrap,
-            .address_of,
-            .@"try",
-            .@"await",
-            .optional_type,
-            .@"switch",
-            .switch_comma,
-            .if_simple,
-            .@"if",
-            .@"suspend",
-            .@"resume",
-            .@"continue",
-            .@"break",
-            .@"return",
-            .anyframe_type,
-            .identifier,
-            .anyframe_literal,
-            .char_literal,
-            .integer_literal,
-            .float_literal,
-            .unreachable_literal,
-            .string_literal,
-            .multiline_string_literal,
-            .grouped_expression,
-            .builtin_call_two,
-            .builtin_call_two_comma,
-            .builtin_call,
-            .builtin_call_comma,
-            .error_set_decl,
-            .@"anytype",
-            .@"comptime",
-            .@"nosuspend",
-            .asm_simple,
-            .@"asm",
-            .array_type,
-            .array_type_sentinel,
-            .error_value,
-            => return main_tokens[n] - end_offset,
-
-            .array_init_dot,
-            .array_init_dot_comma,
-            .array_init_dot_two,
-            .array_init_dot_two_comma,
-            .struct_init_dot,
-            .struct_init_dot_comma,
-            .struct_init_dot_two,
-            .struct_init_dot_two_comma,
-            .enum_literal,
-            => return main_tokens[n] - 1 - end_offset,
-
-            .@"catch",
-            .field_access,
-            .unwrap_optional,
-            .equal_equal,
-            .bang_equal,
-            .less_than,
-            .greater_than,
-            .less_or_equal,
-            .greater_or_equal,
-            .assign_mul,
-            .assign_div,
-            .assign_mod,
-            .assign_add,
-            .assign_sub,
-            .assign_bit_shift_left,
-            .assign_bit_shift_right,
-            .assign_bit_and,
-            .assign_bit_xor,
-            .assign_bit_or,
-            .assign_mul_wrap,
-            .assign_add_wrap,
-            .assign_sub_wrap,
-            .assign,
-            .merge_error_sets,
-            .mul,
-            .div,
-            .mod,
-            .array_mult,
-            .mul_wrap,
-            .add,
-            .sub,
-            .array_cat,
-            .add_wrap,
-            .sub_wrap,
-            .bit_shift_left,
-            .bit_shift_right,
-            .bit_and,
-            .bit_xor,
-            .bit_or,
-            .@"orelse",
-            .bool_and,
-            .bool_or,
-            .slice_open,
-            .slice,
-            .slice_sentinel,
-            .deref,
-            .array_access,
-            .array_init_one,
-            .array_init_one_comma,
-            .array_init,
-            .array_init_comma,
-            .struct_init_one,
-            .struct_init_one_comma,
-            .struct_init,
-            .struct_init_comma,
-            .call_one,
-            .call_one_comma,
-            .call,
-            .call_comma,
-            .switch_range,
-            .error_union,
-            => n = datas[n].lhs,
-
-            .fn_decl,
-            .fn_proto_simple,
-            .fn_proto_multi,
-            .fn_proto_one,
-            .fn_proto,
-            => {
-                var i = main_tokens[n]; // fn token
-                while (i > 0) {
-                    i -= 1;
-                    switch (token_tags[i]) {
-                        .keyword_extern,
-                        .keyword_export,
-                        .keyword_pub,
-                        .keyword_inline,
-                        .keyword_noinline,
-                        .string_literal,
-                        => continue,
-
-                        else => return i + 1 - end_offset,
-                    }
-                }
-                return i - end_offset;
-            },
-
-            .@"usingnamespace" => {
-                const main_token = main_tokens[n];
-                if (main_token > 0 and token_tags[main_token - 1] == .keyword_pub) {
-                    end_offset += 1;
-                }
-                return main_token - end_offset;
-            },
-
-            .async_call_one,
-            .async_call_one_comma,
-            .async_call,
-            .async_call_comma,
-            => {
-                end_offset += 1; // async token
-                n = datas[n].lhs;
-            },
-
-            .container_field_init,
-            .container_field_align,
-            .container_field,
-            => {
-                const name_token = main_tokens[n];
-                if (name_token > 0 and token_tags[name_token - 1] == .keyword_comptime) {
-                    end_offset += 1;
-                }
-                return name_token - end_offset;
-            },
-
-            .global_var_decl,
-            .local_var_decl,
-            .simple_var_decl,
-            .aligned_var_decl,
-            => {
-                var i = main_tokens[n]; // mut token
-                while (i > 0) {
-                    i -= 1;
-                    switch (token_tags[i]) {
-                        .keyword_extern,
-                        .keyword_export,
-                        .keyword_comptime,
-                        .keyword_pub,
-                        .keyword_threadlocal,
-                        .string_literal,
-                        => continue,
-
-                        else => return i + 1 - end_offset,
-                    }
-                }
-                return i - end_offset;
-            },
-
-            .block,
-            .block_semicolon,
-            .block_two,
-            .block_two_semicolon,
-            => {
-                // Look for a label.
-                const lbrace = main_tokens[n];
-                if (token_tags[lbrace - 1] == .colon and
-                    token_tags[lbrace - 2] == .identifier)
-                {
-                    end_offset += 2;
-                }
-                return lbrace - end_offset;
-            },
-
-            .container_decl,
-            .container_decl_trailing,
-            .container_decl_two,
-            .container_decl_two_trailing,
-            .container_decl_arg,
-            .container_decl_arg_trailing,
-            .tagged_union,
-            .tagged_union_trailing,
-            .tagged_union_two,
-            .tagged_union_two_trailing,
-            .tagged_union_enum_tag,
-            .tagged_union_enum_tag_trailing,
-            => {
-                const main_token = main_tokens[n];
-                switch (token_tags[main_token - 1]) {
-                    .keyword_packed, .keyword_extern => end_offset += 1,
-                    else => {},
-                }
-                return main_token - end_offset;
-            },
-
-            .ptr_type_aligned,
-            .ptr_type_sentinel,
-            .ptr_type,
-            .ptr_type_bit_range,
-            => {
-                const main_token = main_tokens[n];
-                return switch (token_tags[main_token]) {
-                    .asterisk,
-                    .asterisk_asterisk,
-                    => switch (token_tags[main_token - 1]) {
-                        .l_bracket => main_token - 1,
-                        else => main_token,
-                    },
-                    .l_bracket => main_token,
-                    else => unreachable,
-                } - end_offset;
-            },
-
-            .switch_case_one => {
-                if (datas[n].lhs == 0) {
-                    return main_tokens[n] - 1 - end_offset; // else token
-                } else {
-                    n = datas[n].lhs;
-                }
-            },
-            .switch_case => {
-                const extra = tree.extraData(datas[n].lhs, Node.SubRange);
-                assert(extra.end - extra.start > 0);
-                n = tree.extra_data[extra.start];
-            },
-
-            .asm_output, .asm_input => {
-                assert(token_tags[main_tokens[n] - 1] == .l_bracket);
-                return main_tokens[n] - 1 - end_offset;
-            },
-
-            .while_simple,
-            .while_cont,
-            .@"while",
-            .for_simple,
-            .@"for",
-            => {
-                // Look for a label and inline.
-                const main_token = main_tokens[n];
-                var result = main_token;
-                if (token_tags[result - 1] == .keyword_inline) {
-                    result -= 1;
-                }
-                if (token_tags[result - 1] == .colon) {
-                    result -= 2;
-                }
-                return result - end_offset;
-            },
-        };
-    }
-
-    pub fn lastToken(tree: Tree, node: Node.Index) TokenIndex {
-        const tags = tree.nodes.items(.tag);
-        const datas = tree.nodes.items(.data);
-        const main_tokens = tree.nodes.items(.main_token);
-        const token_starts = tree.tokens.items(.start);
-        const token_tags = tree.tokens.items(.tag);
-        var n = node;
-        var end_offset: TokenIndex = 0;
-        while (true) switch (tags[n]) {
-            .root => return @intCast(TokenIndex, tree.tokens.len - 1),
-
-            .@"usingnamespace",
-            .bool_not,
-            .negation,
-            .bit_not,
-            .negation_wrap,
-            .address_of,
-            .@"try",
-            .@"await",
-            .optional_type,
-            .@"resume",
-            .@"nosuspend",
-            .@"comptime",
-            => n = datas[n].lhs,
-
-            .test_decl,
-            .@"errdefer",
-            .@"defer",
-            .@"catch",
-            .equal_equal,
-            .bang_equal,
-            .less_than,
-            .greater_than,
-            .less_or_equal,
-            .greater_or_equal,
-            .assign_mul,
-            .assign_div,
-            .assign_mod,
-            .assign_add,
-            .assign_sub,
-            .assign_bit_shift_left,
-            .assign_bit_shift_right,
-            .assign_bit_and,
-            .assign_bit_xor,
-            .assign_bit_or,
-            .assign_mul_wrap,
-            .assign_add_wrap,
-            .assign_sub_wrap,
-            .assign,
-            .merge_error_sets,
-            .mul,
-            .div,
-            .mod,
-            .array_mult,
-            .mul_wrap,
-            .add,
-            .sub,
-            .array_cat,
-            .add_wrap,
-            .sub_wrap,
-            .bit_shift_left,
-            .bit_shift_right,
-            .bit_and,
-            .bit_xor,
-            .bit_or,
-            .@"orelse",
-            .bool_and,
-            .bool_or,
-            .anyframe_type,
-            .error_union,
-            .if_simple,
-            .while_simple,
-            .for_simple,
-            .fn_proto_simple,
-            .fn_proto_multi,
-            .ptr_type_aligned,
-            .ptr_type_sentinel,
-            .ptr_type,
-            .ptr_type_bit_range,
-            .array_type,
-            .switch_case_one,
-            .switch_case,
-            .switch_range,
-            => n = datas[n].rhs,
-
-            .field_access,
-            .unwrap_optional,
-            .grouped_expression,
-            .multiline_string_literal,
-            .error_set_decl,
-            .asm_simple,
-            .asm_output,
-            .asm_input,
-            .error_value,
-            => return datas[n].rhs + end_offset,
-
-            .@"anytype",
-            .anyframe_literal,
-            .char_literal,
-            .integer_literal,
-            .float_literal,
-            .unreachable_literal,
-            .identifier,
-            .deref,
-            .enum_literal,
-            .string_literal,
-            => return main_tokens[n] + end_offset,
-
-            .@"return" => if (datas[n].lhs != 0) {
-                n = datas[n].lhs;
-            } else {
-                return main_tokens[n] + end_offset;
-            },
-
-            .call, .async_call => {
-                end_offset += 1; // for the rparen
-                const params = tree.extraData(datas[n].rhs, Node.SubRange);
-                if (params.end - params.start == 0) {
-                    return main_tokens[n] + end_offset;
-                }
-                n = tree.extra_data[params.end - 1]; // last parameter
-            },
-            .tagged_union_enum_tag => {
-                const members = tree.extraData(datas[n].rhs, Node.SubRange);
-                if (members.end - members.start == 0) {
-                    end_offset += 4; // for the rparen + rparen + lbrace + rbrace
-                    n = datas[n].lhs;
-                } else {
-                    end_offset += 1; // for the rbrace
-                    n = tree.extra_data[members.end - 1]; // last parameter
-                }
-            },
-            .call_comma,
-            .async_call_comma,
-            .tagged_union_enum_tag_trailing,
-            => {
-                end_offset += 2; // for the comma/semicolon + rparen/rbrace
-                const params = tree.extraData(datas[n].rhs, Node.SubRange);
-                assert(params.end > params.start);
-                n = tree.extra_data[params.end - 1]; // last parameter
-            },
-            .@"switch" => {
-                const cases = tree.extraData(datas[n].rhs, Node.SubRange);
-                if (cases.end - cases.start == 0) {
-                    end_offset += 3; // rparen, lbrace, rbrace
-                    n = datas[n].lhs; // condition expression
-                } else {
-                    end_offset += 1; // for the rbrace
-                    n = tree.extra_data[cases.end - 1]; // last case
-                }
-            },
-            .container_decl_arg => {
-                const members = tree.extraData(datas[n].rhs, Node.SubRange);
-                if (members.end - members.start == 0) {
-                    end_offset += 3; // for the rparen + lbrace + rbrace
-                    n = datas[n].lhs;
-                } else {
-                    end_offset += 1; // for the rbrace
-                    n = tree.extra_data[members.end - 1]; // last parameter
-                }
-            },
-            .@"asm" => {
-                const extra = tree.extraData(datas[n].rhs, Node.Asm);
-                return extra.rparen + end_offset;
-            },
-            .array_init,
-            .struct_init,
-            => {
-                const elements = tree.extraData(datas[n].rhs, Node.SubRange);
-                assert(elements.end - elements.start > 0);
-                end_offset += 1; // for the rbrace
-                n = tree.extra_data[elements.end - 1]; // last element
-            },
-            .array_init_comma,
-            .struct_init_comma,
-            .container_decl_arg_trailing,
-            .switch_comma,
-            => {
-                const members = tree.extraData(datas[n].rhs, Node.SubRange);
-                assert(members.end - members.start > 0);
-                end_offset += 2; // for the comma + rbrace
-                n = tree.extra_data[members.end - 1]; // last parameter
-            },
-            .array_init_dot,
-            .struct_init_dot,
-            .block,
-            .container_decl,
-            .tagged_union,
-            .builtin_call,
-            => {
-                assert(datas[n].rhs - datas[n].lhs > 0);
-                end_offset += 1; // for the rbrace
-                n = tree.extra_data[datas[n].rhs - 1]; // last statement
-            },
-            .array_init_dot_comma,
-            .struct_init_dot_comma,
-            .block_semicolon,
-            .container_decl_trailing,
-            .tagged_union_trailing,
-            .builtin_call_comma,
-            => {
-                assert(datas[n].rhs - datas[n].lhs > 0);
-                end_offset += 2; // for the comma/semicolon + rbrace/rparen
-                n = tree.extra_data[datas[n].rhs - 1]; // last member
-            },
-            .call_one,
-            .async_call_one,
-            .array_access,
-            => {
-                end_offset += 1; // for the rparen/rbracket
-                if (datas[n].rhs == 0) {
-                    return main_tokens[n] + end_offset;
-                }
-                n = datas[n].rhs;
-            },
-            .array_init_dot_two,
-            .block_two,
-            .builtin_call_two,
-            .struct_init_dot_two,
-            .container_decl_two,
-            .tagged_union_two,
-            => {
-                if (datas[n].rhs != 0) {
-                    end_offset += 1; // for the rparen/rbrace
-                    n = datas[n].rhs;
-                } else if (datas[n].lhs != 0) {
-                    end_offset += 1; // for the rparen/rbrace
-                    n = datas[n].lhs;
-                } else {
-                    switch (tags[n]) {
-                        .array_init_dot_two,
-                        .block_two,
-                        .struct_init_dot_two,
-                        => end_offset += 1, // rbrace
-                        .builtin_call_two => end_offset += 2, // lparen/lbrace + rparen/rbrace
-                        .container_decl_two => {
-                            var i: u32 = 2; // lbrace + rbrace
-                            while (token_tags[main_tokens[n] + i] == .container_doc_comment) i += 1;
-                            end_offset += i;
-                        },
-                        .tagged_union_two => {
-                            var i: u32 = 5; // (enum) {}
-                            while (token_tags[main_tokens[n] + i] == .container_doc_comment) i += 1;
-                            end_offset += i;
-                        },
-                        else => unreachable,
-                    }
-                    return main_tokens[n] + end_offset;
-                }
-            },
-            .array_init_dot_two_comma,
-            .builtin_call_two_comma,
-            .block_two_semicolon,
-            .struct_init_dot_two_comma,
-            .container_decl_two_trailing,
-            .tagged_union_two_trailing,
-            => {
-                end_offset += 2; // for the comma/semicolon + rbrace/rparen
-                if (datas[n].rhs != 0) {
-                    n = datas[n].rhs;
-                } else if (datas[n].lhs != 0) {
-                    n = datas[n].lhs;
-                } else {
-                    unreachable;
-                }
-            },
-            .simple_var_decl => {
-                if (datas[n].rhs != 0) {
-                    n = datas[n].rhs;
-                } else if (datas[n].lhs != 0) {
-                    n = datas[n].lhs;
-                } else {
-                    end_offset += 1; // from mut token to name
-                    return main_tokens[n] + end_offset;
-                }
-            },
-            .aligned_var_decl => {
-                if (datas[n].rhs != 0) {
-                    n = datas[n].rhs;
-                } else if (datas[n].lhs != 0) {
-                    end_offset += 1; // for the rparen
-                    n = datas[n].lhs;
-                } else {
-                    end_offset += 1; // from mut token to name
-                    return main_tokens[n] + end_offset;
-                }
-            },
-            .global_var_decl => {
-                if (datas[n].rhs != 0) {
-                    n = datas[n].rhs;
-                } else {
-                    const extra = tree.extraData(datas[n].lhs, Node.GlobalVarDecl);
-                    if (extra.section_node != 0) {
-                        end_offset += 1; // for the rparen
-                        n = extra.section_node;
-                    } else if (extra.align_node != 0) {
-                        end_offset += 1; // for the rparen
-                        n = extra.align_node;
-                    } else if (extra.type_node != 0) {
-                        n = extra.type_node;
-                    } else {
-                        end_offset += 1; // from mut token to name
-                        return main_tokens[n] + end_offset;
-                    }
-                }
-            },
-            .local_var_decl => {
-                if (datas[n].rhs != 0) {
-                    n = datas[n].rhs;
-                } else {
-                    const extra = tree.extraData(datas[n].lhs, Node.LocalVarDecl);
-                    if (extra.align_node != 0) {
-                        end_offset += 1; // for the rparen
-                        n = extra.align_node;
-                    } else if (extra.type_node != 0) {
-                        n = extra.type_node;
-                    } else {
-                        end_offset += 1; // from mut token to name
-                        return main_tokens[n] + end_offset;
-                    }
-                }
-            },
-            .container_field_init => {
-                if (datas[n].rhs != 0) {
-                    n = datas[n].rhs;
-                } else if (datas[n].lhs != 0) {
-                    n = datas[n].lhs;
-                } else {
-                    return main_tokens[n] + end_offset;
-                }
-            },
-            .container_field_align => {
-                if (datas[n].rhs != 0) {
-                    end_offset += 1; // for the rparen
-                    n = datas[n].rhs;
-                } else if (datas[n].lhs != 0) {
-                    n = datas[n].lhs;
-                } else {
-                    return main_tokens[n] + end_offset;
-                }
-            },
-            .container_field => {
-                const extra = tree.extraData(datas[n].rhs, Node.ContainerField);
-                if (extra.value_expr != 0) {
-                    n = extra.value_expr;
-                } else if (extra.align_expr != 0) {
-                    end_offset += 1; // for the rparen
-                    n = extra.align_expr;
-                } else if (datas[n].lhs != 0) {
-                    n = datas[n].lhs;
-                } else {
-                    return main_tokens[n] + end_offset;
-                }
-            },
-
-            .array_init_one,
-            .struct_init_one,
-            => {
-                end_offset += 1; // rbrace
-                if (datas[n].rhs == 0) {
-                    return main_tokens[n] + end_offset;
-                } else {
-                    n = datas[n].rhs;
-                }
-            },
-            .slice_open,
-            .call_one_comma,
-            .async_call_one_comma,
-            .array_init_one_comma,
-            .struct_init_one_comma,
-            => {
-                end_offset += 2; // ellipsis2 + rbracket, or comma + rparen
-                n = datas[n].rhs;
-                assert(n != 0);
-            },
-            .slice => {
-                const extra = tree.extraData(datas[n].rhs, Node.Slice);
-                assert(extra.end != 0); // should have used slice_open
-                end_offset += 1; // rbracket
-                n = extra.end;
-            },
-            .slice_sentinel => {
-                const extra = tree.extraData(datas[n].rhs, Node.SliceSentinel);
-                assert(extra.sentinel != 0); // should have used slice
-                end_offset += 1; // rbracket
-                n = extra.sentinel;
-            },
-
-            .@"continue" => {
-                if (datas[n].lhs != 0) {
-                    return datas[n].lhs + end_offset;
-                } else {
-                    return main_tokens[n] + end_offset;
-                }
-            },
-            .@"break" => {
-                if (datas[n].rhs != 0) {
-                    n = datas[n].rhs;
-                } else if (datas[n].lhs != 0) {
-                    return datas[n].lhs + end_offset;
-                } else {
-                    return main_tokens[n] + end_offset;
-                }
-            },
-            .fn_decl => {
-                if (datas[n].rhs != 0) {
-                    n = datas[n].rhs;
-                } else {
-                    n = datas[n].lhs;
-                }
-            },
-            .fn_proto_one => {
-                const extra = tree.extraData(datas[n].lhs, Node.FnProtoOne);
-                // linksection, callconv, align can appear in any order, so we
-                // find the last one here.
-                var max_node: Node.Index = datas[n].rhs;
-                var max_start = token_starts[main_tokens[max_node]];
-                var max_offset: TokenIndex = 0;
-                if (extra.align_expr != 0) {
-                    const start = token_starts[main_tokens[extra.align_expr]];
-                    if (start > max_start) {
-                        max_node = extra.align_expr;
-                        max_start = start;
-                        max_offset = 1; // for the rparen
-                    }
-                }
-                if (extra.section_expr != 0) {
-                    const start = token_starts[main_tokens[extra.section_expr]];
-                    if (start > max_start) {
-                        max_node = extra.section_expr;
-                        max_start = start;
-                        max_offset = 1; // for the rparen
-                    }
-                }
-                if (extra.callconv_expr != 0) {
-                    const start = token_starts[main_tokens[extra.callconv_expr]];
-                    if (start > max_start) {
-                        max_node = extra.callconv_expr;
-                        max_start = start;
-                        max_offset = 1; // for the rparen
-                    }
-                }
-                n = max_node;
-                end_offset += max_offset;
-            },
-            .fn_proto => {
-                const extra = tree.extraData(datas[n].lhs, Node.FnProto);
-                // linksection, callconv, align can appear in any order, so we
-                // find the last one here.
-                var max_node: Node.Index = datas[n].rhs;
-                var max_start = token_starts[main_tokens[max_node]];
-                var max_offset: TokenIndex = 0;
-                if (extra.align_expr != 0) {
-                    const start = token_starts[main_tokens[extra.align_expr]];
-                    if (start > max_start) {
-                        max_node = extra.align_expr;
-                        max_start = start;
-                        max_offset = 1; // for the rparen
-                    }
-                }
-                if (extra.section_expr != 0) {
-                    const start = token_starts[main_tokens[extra.section_expr]];
-                    if (start > max_start) {
-                        max_node = extra.section_expr;
-                        max_start = start;
-                        max_offset = 1; // for the rparen
-                    }
-                }
-                if (extra.callconv_expr != 0) {
-                    const start = token_starts[main_tokens[extra.callconv_expr]];
-                    if (start > max_start) {
-                        max_node = extra.callconv_expr;
-                        max_start = start;
-                        max_offset = 1; // for the rparen
-                    }
-                }
-                n = max_node;
-                end_offset += max_offset;
-            },
-            .while_cont => {
-                const extra = tree.extraData(datas[n].rhs, Node.WhileCont);
-                assert(extra.then_expr != 0);
-                n = extra.then_expr;
-            },
-            .@"while" => {
-                const extra = tree.extraData(datas[n].rhs, Node.While);
-                assert(extra.else_expr != 0);
-                n = extra.else_expr;
-            },
-            .@"if", .@"for" => {
-                const extra = tree.extraData(datas[n].rhs, Node.If);
-                assert(extra.else_expr != 0);
-                n = extra.else_expr;
-            },
-            .@"suspend" => {
-                if (datas[n].lhs != 0) {
-                    n = datas[n].lhs;
-                } else {
-                    return main_tokens[n] + end_offset;
-                }
-            },
-            .array_type_sentinel => {
-                const extra = tree.extraData(datas[n].rhs, Node.ArrayTypeSentinel);
-                n = extra.elem_type;
-            },
-        };
-    }
-
-    pub fn tokensOnSameLine(tree: Tree, token1: TokenIndex, token2: TokenIndex) bool {
-        const token_starts = tree.tokens.items(.start);
-        const source = tree.source[token_starts[token1]..token_starts[token2]];
-        return mem.indexOfScalar(u8, source, '\n') == null;
-    }
-
-    pub fn getNodeSource(tree: Tree, node: Node.Index) []const u8 {
-        const token_starts = tree.tokens.items(.start);
-        const first_token = tree.firstToken(node);
-        const last_token = tree.lastToken(node);
-        const start = token_starts[first_token];
-        const end = token_starts[last_token] + tree.tokenSlice(last_token).len;
-        return tree.source[start..end];
-    }
-
-    pub fn globalVarDecl(tree: Tree, node: Node.Index) full.VarDecl {
-        assert(tree.nodes.items(.tag)[node] == .global_var_decl);
-        const data = tree.nodes.items(.data)[node];
-        const extra = tree.extraData(data.lhs, Node.GlobalVarDecl);
-        return tree.fullVarDecl(.{
-            .type_node = extra.type_node,
-            .align_node = extra.align_node,
-            .section_node = extra.section_node,
-            .init_node = data.rhs,
-            .mut_token = tree.nodes.items(.main_token)[node],
-        });
-    }
-
-    pub fn localVarDecl(tree: Tree, node: Node.Index) full.VarDecl {
-        assert(tree.nodes.items(.tag)[node] == .local_var_decl);
-        const data = tree.nodes.items(.data)[node];
-        const extra = tree.extraData(data.lhs, Node.LocalVarDecl);
-        return tree.fullVarDecl(.{
-            .type_node = extra.type_node,
-            .align_node = extra.align_node,
-            .section_node = 0,
-            .init_node = data.rhs,
-            .mut_token = tree.nodes.items(.main_token)[node],
-        });
-    }
-
-    pub fn simpleVarDecl(tree: Tree, node: Node.Index) full.VarDecl {
-        assert(tree.nodes.items(.tag)[node] == .simple_var_decl);
-        const data = tree.nodes.items(.data)[node];
-        return tree.fullVarDecl(.{
-            .type_node = data.lhs,
-            .align_node = 0,
-            .section_node = 0,
-            .init_node = data.rhs,
-            .mut_token = tree.nodes.items(.main_token)[node],
-        });
-    }
-
-    pub fn alignedVarDecl(tree: Tree, node: Node.Index) full.VarDecl {
-        assert(tree.nodes.items(.tag)[node] == .aligned_var_decl);
-        const data = tree.nodes.items(.data)[node];
-        return tree.fullVarDecl(.{
-            .type_node = 0,
-            .align_node = data.lhs,
-            .section_node = 0,
-            .init_node = data.rhs,
-            .mut_token = tree.nodes.items(.main_token)[node],
-        });
-    }
-
-    pub fn ifSimple(tree: Tree, node: Node.Index) full.If {
-        assert(tree.nodes.items(.tag)[node] == .if_simple);
-        const data = tree.nodes.items(.data)[node];
-        return tree.fullIf(.{
-            .cond_expr = data.lhs,
-            .then_expr = data.rhs,
-            .else_expr = 0,
-            .if_token = tree.nodes.items(.main_token)[node],
-        });
-    }
-
-    pub fn ifFull(tree: Tree, node: Node.Index) full.If {
-        assert(tree.nodes.items(.tag)[node] == .@"if");
-        const data = tree.nodes.items(.data)[node];
-        const extra = tree.extraData(data.rhs, Node.If);
-        return tree.fullIf(.{
-            .cond_expr = data.lhs,
-            .then_expr = extra.then_expr,
-            .else_expr = extra.else_expr,
-            .if_token = tree.nodes.items(.main_token)[node],
-        });
-    }
-
-    pub fn containerField(tree: Tree, node: Node.Index) full.ContainerField {
-        assert(tree.nodes.items(.tag)[node] == .container_field);
-        const data = tree.nodes.items(.data)[node];
-        const extra = tree.extraData(data.rhs, Node.ContainerField);
-        return tree.fullContainerField(.{
-            .name_token = tree.nodes.items(.main_token)[node],
-            .type_expr = data.lhs,
-            .value_expr = extra.value_expr,
-            .align_expr = extra.align_expr,
-        });
-    }
-
-    pub fn containerFieldInit(tree: Tree, node: Node.Index) full.ContainerField {
-        assert(tree.nodes.items(.tag)[node] == .container_field_init);
-        const data = tree.nodes.items(.data)[node];
-        return tree.fullContainerField(.{
-            .name_token = tree.nodes.items(.main_token)[node],
-            .type_expr = data.lhs,
-            .value_expr = data.rhs,
-            .align_expr = 0,
-        });
-    }
-
-    pub fn containerFieldAlign(tree: Tree, node: Node.Index) full.ContainerField {
-        assert(tree.nodes.items(.tag)[node] == .container_field_align);
-        const data = tree.nodes.items(.data)[node];
-        return tree.fullContainerField(.{
-            .name_token = tree.nodes.items(.main_token)[node],
-            .type_expr = data.lhs,
-            .value_expr = 0,
-            .align_expr = data.rhs,
-        });
-    }
-
-    pub fn fnProtoSimple(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.FnProto {
-        assert(tree.nodes.items(.tag)[node] == .fn_proto_simple);
-        const data = tree.nodes.items(.data)[node];
-        buffer[0] = data.lhs;
-        const params = if (data.lhs == 0) buffer[0..0] else buffer[0..1];
-        return tree.fullFnProto(.{
-            .proto_node = node,
-            .fn_token = tree.nodes.items(.main_token)[node],
-            .return_type = data.rhs,
-            .params = params,
-            .align_expr = 0,
-            .section_expr = 0,
-            .callconv_expr = 0,
-        });
-    }
-
-    pub fn fnProtoMulti(tree: Tree, node: Node.Index) full.FnProto {
-        assert(tree.nodes.items(.tag)[node] == .fn_proto_multi);
-        const data = tree.nodes.items(.data)[node];
-        const params_range = tree.extraData(data.lhs, Node.SubRange);
-        const params = tree.extra_data[params_range.start..params_range.end];
-        return tree.fullFnProto(.{
-            .proto_node = node,
-            .fn_token = tree.nodes.items(.main_token)[node],
-            .return_type = data.rhs,
-            .params = params,
-            .align_expr = 0,
-            .section_expr = 0,
-            .callconv_expr = 0,
-        });
-    }
-
-    pub fn fnProtoOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.FnProto {
-        assert(tree.nodes.items(.tag)[node] == .fn_proto_one);
-        const data = tree.nodes.items(.data)[node];
-        const extra = tree.extraData(data.lhs, Node.FnProtoOne);
-        buffer[0] = extra.param;
-        const params = if (extra.param == 0) buffer[0..0] else buffer[0..1];
-        return tree.fullFnProto(.{
-            .proto_node = node,
-            .fn_token = tree.nodes.items(.main_token)[node],
-            .return_type = data.rhs,
-            .params = params,
-            .align_expr = extra.align_expr,
-            .section_expr = extra.section_expr,
-            .callconv_expr = extra.callconv_expr,
-        });
-    }
-
-    pub fn fnProto(tree: Tree, node: Node.Index) full.FnProto {
-        assert(tree.nodes.items(.tag)[node] == .fn_proto);
-        const data = tree.nodes.items(.data)[node];
-        const extra = tree.extraData(data.lhs, Node.FnProto);
-        const params = tree.extra_data[extra.params_start..extra.params_end];
-        return tree.fullFnProto(.{
-            .proto_node = node,
-            .fn_token = tree.nodes.items(.main_token)[node],
-            .return_type = data.rhs,
-            .params = params,
-            .align_expr = extra.align_expr,
-            .section_expr = extra.section_expr,
-            .callconv_expr = extra.callconv_expr,
-        });
-    }
-
-    pub fn structInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.StructInit {
-        assert(tree.nodes.items(.tag)[node] == .struct_init_one or
-            tree.nodes.items(.tag)[node] == .struct_init_one_comma);
-        const data = tree.nodes.items(.data)[node];
-        buffer[0] = data.rhs;
-        const fields = if (data.rhs == 0) buffer[0..0] else buffer[0..1];
-        return tree.fullStructInit(.{
-            .lbrace = tree.nodes.items(.main_token)[node],
-            .fields = fields,
-            .type_expr = data.lhs,
-        });
-    }
-
-    pub fn structInitDotTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.StructInit {
-        assert(tree.nodes.items(.tag)[node] == .struct_init_dot_two or
-            tree.nodes.items(.tag)[node] == .struct_init_dot_two_comma);
-        const data = tree.nodes.items(.data)[node];
-        buffer.* = .{ data.lhs, data.rhs };
-        const fields = if (data.rhs != 0)
-            buffer[0..2]
-        else if (data.lhs != 0)
-            buffer[0..1]
-        else
-            buffer[0..0];
-        return tree.fullStructInit(.{
-            .lbrace = tree.nodes.items(.main_token)[node],
-            .fields = fields,
-            .type_expr = 0,
-        });
-    }
-
-    pub fn structInitDot(tree: Tree, node: Node.Index) full.StructInit {
-        assert(tree.nodes.items(.tag)[node] == .struct_init_dot or
-            tree.nodes.items(.tag)[node] == .struct_init_dot_comma);
-        const data = tree.nodes.items(.data)[node];
-        return tree.fullStructInit(.{
-            .lbrace = tree.nodes.items(.main_token)[node],
-            .fields = tree.extra_data[data.lhs..data.rhs],
-            .type_expr = 0,
-        });
-    }
-
-    pub fn structInit(tree: Tree, node: Node.Index) full.StructInit {
-        assert(tree.nodes.items(.tag)[node] == .struct_init or
-            tree.nodes.items(.tag)[node] == .struct_init_comma);
-        const data = tree.nodes.items(.data)[node];
-        const fields_range = tree.extraData(data.rhs, Node.SubRange);
-        return tree.fullStructInit(.{
-            .lbrace = tree.nodes.items(.main_token)[node],
-            .fields = tree.extra_data[fields_range.start..fields_range.end],
-            .type_expr = data.lhs,
-        });
-    }
-
-    pub fn arrayInitOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.ArrayInit {
-        assert(tree.nodes.items(.tag)[node] == .array_init_one or
-            tree.nodes.items(.tag)[node] == .array_init_one_comma);
-        const data = tree.nodes.items(.data)[node];
-        buffer[0] = data.rhs;
-        const elements = if (data.rhs == 0) buffer[0..0] else buffer[0..1];
-        return .{
-            .ast = .{
-                .lbrace = tree.nodes.items(.main_token)[node],
-                .elements = elements,
-                .type_expr = data.lhs,
-            },
-        };
-    }
-
-    pub fn arrayInitDotTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ArrayInit {
-        assert(tree.nodes.items(.tag)[node] == .array_init_dot_two or
-            tree.nodes.items(.tag)[node] == .array_init_dot_two_comma);
-        const data = tree.nodes.items(.data)[node];
-        buffer.* = .{ data.lhs, data.rhs };
-        const elements = if (data.rhs != 0)
-            buffer[0..2]
-        else if (data.lhs != 0)
-            buffer[0..1]
-        else
-            buffer[0..0];
-        return .{
-            .ast = .{
-                .lbrace = tree.nodes.items(.main_token)[node],
-                .elements = elements,
-                .type_expr = 0,
-            },
-        };
-    }
-
-    pub fn arrayInitDot(tree: Tree, node: Node.Index) full.ArrayInit {
-        assert(tree.nodes.items(.tag)[node] == .array_init_dot or
-            tree.nodes.items(.tag)[node] == .array_init_dot_comma);
-        const data = tree.nodes.items(.data)[node];
-        return .{
-            .ast = .{
-                .lbrace = tree.nodes.items(.main_token)[node],
-                .elements = tree.extra_data[data.lhs..data.rhs],
-                .type_expr = 0,
-            },
-        };
-    }
-
-    pub fn arrayInit(tree: Tree, node: Node.Index) full.ArrayInit {
-        assert(tree.nodes.items(.tag)[node] == .array_init or
-            tree.nodes.items(.tag)[node] == .array_init_comma);
-        const data = tree.nodes.items(.data)[node];
-        const elem_range = tree.extraData(data.rhs, Node.SubRange);
-        return .{
-            .ast = .{
-                .lbrace = tree.nodes.items(.main_token)[node],
-                .elements = tree.extra_data[elem_range.start..elem_range.end],
-                .type_expr = data.lhs,
-            },
-        };
-    }
-
-    pub fn arrayType(tree: Tree, node: Node.Index) full.ArrayType {
-        assert(tree.nodes.items(.tag)[node] == .array_type);
-        const data = tree.nodes.items(.data)[node];
-        return .{
-            .ast = .{
-                .lbracket = tree.nodes.items(.main_token)[node],
-                .elem_count = data.lhs,
-                .sentinel = 0,
-                .elem_type = data.rhs,
-            },
-        };
-    }
-
-    pub fn arrayTypeSentinel(tree: Tree, node: Node.Index) full.ArrayType {
-        assert(tree.nodes.items(.tag)[node] == .array_type_sentinel);
-        const data = tree.nodes.items(.data)[node];
-        const extra = tree.extraData(data.rhs, Node.ArrayTypeSentinel);
-        assert(extra.sentinel != 0);
-        return .{
-            .ast = .{
-                .lbracket = tree.nodes.items(.main_token)[node],
-                .elem_count = data.lhs,
-                .sentinel = extra.sentinel,
-                .elem_type = extra.elem_type,
-            },
-        };
-    }
-
-    pub fn ptrTypeAligned(tree: Tree, node: Node.Index) full.PtrType {
-        assert(tree.nodes.items(.tag)[node] == .ptr_type_aligned);
-        const data = tree.nodes.items(.data)[node];
-        return tree.fullPtrType(.{
-            .main_token = tree.nodes.items(.main_token)[node],
-            .align_node = data.lhs,
-            .sentinel = 0,
-            .bit_range_start = 0,
-            .bit_range_end = 0,
-            .child_type = data.rhs,
-        });
-    }
-
-    pub fn ptrTypeSentinel(tree: Tree, node: Node.Index) full.PtrType {
-        assert(tree.nodes.items(.tag)[node] == .ptr_type_sentinel);
-        const data = tree.nodes.items(.data)[node];
-        return tree.fullPtrType(.{
-            .main_token = tree.nodes.items(.main_token)[node],
-            .align_node = 0,
-            .sentinel = data.lhs,
-            .bit_range_start = 0,
-            .bit_range_end = 0,
-            .child_type = data.rhs,
-        });
-    }
-
-    pub fn ptrType(tree: Tree, node: Node.Index) full.PtrType {
-        assert(tree.nodes.items(.tag)[node] == .ptr_type);
-        const data = tree.nodes.items(.data)[node];
-        const extra = tree.extraData(data.lhs, Node.PtrType);
-        return tree.fullPtrType(.{
-            .main_token = tree.nodes.items(.main_token)[node],
-            .align_node = extra.align_node,
-            .sentinel = extra.sentinel,
-            .bit_range_start = 0,
-            .bit_range_end = 0,
-            .child_type = data.rhs,
-        });
-    }
-
-    pub fn ptrTypeBitRange(tree: Tree, node: Node.Index) full.PtrType {
-        assert(tree.nodes.items(.tag)[node] == .ptr_type_bit_range);
-        const data = tree.nodes.items(.data)[node];
-        const extra = tree.extraData(data.lhs, Node.PtrTypeBitRange);
-        return tree.fullPtrType(.{
-            .main_token = tree.nodes.items(.main_token)[node],
-            .align_node = extra.align_node,
-            .sentinel = extra.sentinel,
-            .bit_range_start = extra.bit_range_start,
-            .bit_range_end = extra.bit_range_end,
-            .child_type = data.rhs,
-        });
-    }
-
-    pub fn sliceOpen(tree: Tree, node: Node.Index) full.Slice {
-        assert(tree.nodes.items(.tag)[node] == .slice_open);
-        const data = tree.nodes.items(.data)[node];
-        return .{
-            .ast = .{
-                .sliced = data.lhs,
-                .lbracket = tree.nodes.items(.main_token)[node],
-                .start = data.rhs,
-                .end = 0,
-                .sentinel = 0,
-            },
-        };
-    }
-
-    pub fn slice(tree: Tree, node: Node.Index) full.Slice {
-        assert(tree.nodes.items(.tag)[node] == .slice);
-        const data = tree.nodes.items(.data)[node];
-        const extra = tree.extraData(data.rhs, Node.Slice);
-        return .{
-            .ast = .{
-                .sliced = data.lhs,
-                .lbracket = tree.nodes.items(.main_token)[node],
-                .start = extra.start,
-                .end = extra.end,
-                .sentinel = 0,
-            },
-        };
-    }
-
-    pub fn sliceSentinel(tree: Tree, node: Node.Index) full.Slice {
-        assert(tree.nodes.items(.tag)[node] == .slice_sentinel);
-        const data = tree.nodes.items(.data)[node];
-        const extra = tree.extraData(data.rhs, Node.SliceSentinel);
-        return .{
-            .ast = .{
-                .sliced = data.lhs,
-                .lbracket = tree.nodes.items(.main_token)[node],
-                .start = extra.start,
-                .end = extra.end,
-                .sentinel = extra.sentinel,
-            },
-        };
-    }
-
-    pub fn containerDeclTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ContainerDecl {
-        assert(tree.nodes.items(.tag)[node] == .container_decl_two or
-            tree.nodes.items(.tag)[node] == .container_decl_two_trailing);
-        const data = tree.nodes.items(.data)[node];
-        buffer.* = .{ data.lhs, data.rhs };
-        const members = if (data.rhs != 0)
-            buffer[0..2]
-        else if (data.lhs != 0)
-            buffer[0..1]
-        else
-            buffer[0..0];
-        return tree.fullContainerDecl(.{
-            .main_token = tree.nodes.items(.main_token)[node],
-            .enum_token = null,
-            .members = members,
-            .arg = 0,
-        });
-    }
-
-    pub fn containerDecl(tree: Tree, node: Node.Index) full.ContainerDecl {
-        assert(tree.nodes.items(.tag)[node] == .container_decl or
-            tree.nodes.items(.tag)[node] == .container_decl_trailing);
-        const data = tree.nodes.items(.data)[node];
-        return tree.fullContainerDecl(.{
-            .main_token = tree.nodes.items(.main_token)[node],
-            .enum_token = null,
-            .members = tree.extra_data[data.lhs..data.rhs],
-            .arg = 0,
-        });
-    }
-
-    pub fn containerDeclArg(tree: Tree, node: Node.Index) full.ContainerDecl {
-        assert(tree.nodes.items(.tag)[node] == .container_decl_arg or
-            tree.nodes.items(.tag)[node] == .container_decl_arg_trailing);
-        const data = tree.nodes.items(.data)[node];
-        const members_range = tree.extraData(data.rhs, Node.SubRange);
-        return tree.fullContainerDecl(.{
-            .main_token = tree.nodes.items(.main_token)[node],
-            .enum_token = null,
-            .members = tree.extra_data[members_range.start..members_range.end],
-            .arg = data.lhs,
-        });
-    }
-
-    pub fn taggedUnionTwo(tree: Tree, buffer: *[2]Node.Index, node: Node.Index) full.ContainerDecl {
-        assert(tree.nodes.items(.tag)[node] == .tagged_union_two or
-            tree.nodes.items(.tag)[node] == .tagged_union_two_trailing);
-        const data = tree.nodes.items(.data)[node];
-        buffer.* = .{ data.lhs, data.rhs };
-        const members = if (data.rhs != 0)
-            buffer[0..2]
-        else if (data.lhs != 0)
-            buffer[0..1]
-        else
-            buffer[0..0];
-        const main_token = tree.nodes.items(.main_token)[node];
-        return tree.fullContainerDecl(.{
-            .main_token = main_token,
-            .enum_token = main_token + 2, // union lparen enum
-            .members = members,
-            .arg = 0,
-        });
-    }
-
-    pub fn taggedUnion(tree: Tree, node: Node.Index) full.ContainerDecl {
-        assert(tree.nodes.items(.tag)[node] == .tagged_union or
-            tree.nodes.items(.tag)[node] == .tagged_union_trailing);
-        const data = tree.nodes.items(.data)[node];
-        const main_token = tree.nodes.items(.main_token)[node];
-        return tree.fullContainerDecl(.{
-            .main_token = main_token,
-            .enum_token = main_token + 2, // union lparen enum
-            .members = tree.extra_data[data.lhs..data.rhs],
-            .arg = 0,
-        });
-    }
-
-    pub fn taggedUnionEnumTag(tree: Tree, node: Node.Index) full.ContainerDecl {
-        assert(tree.nodes.items(.tag)[node] == .tagged_union_enum_tag or
-            tree.nodes.items(.tag)[node] == .tagged_union_enum_tag_trailing);
-        const data = tree.nodes.items(.data)[node];
-        const members_range = tree.extraData(data.rhs, Node.SubRange);
-        const main_token = tree.nodes.items(.main_token)[node];
-        return tree.fullContainerDecl(.{
-            .main_token = main_token,
-            .enum_token = main_token + 2, // union lparen enum
-            .members = tree.extra_data[members_range.start..members_range.end],
-            .arg = data.lhs,
-        });
-    }
-
-    pub fn switchCaseOne(tree: Tree, node: Node.Index) full.SwitchCase {
-        const data = &tree.nodes.items(.data)[node];
-        const values: *[1]Node.Index = &data.lhs;
-        return tree.fullSwitchCase(.{
-            .values = if (data.lhs == 0) values[0..0] else values[0..1],
-            .arrow_token = tree.nodes.items(.main_token)[node],
-            .target_expr = data.rhs,
-        });
-    }
-
-    pub fn switchCase(tree: Tree, node: Node.Index) full.SwitchCase {
-        const data = tree.nodes.items(.data)[node];
-        const extra = tree.extraData(data.lhs, Node.SubRange);
-        return tree.fullSwitchCase(.{
-            .values = tree.extra_data[extra.start..extra.end],
-            .arrow_token = tree.nodes.items(.main_token)[node],
-            .target_expr = data.rhs,
-        });
-    }
-
-    pub fn asmSimple(tree: Tree, node: Node.Index) full.Asm {
-        const data = tree.nodes.items(.data)[node];
-        return tree.fullAsm(.{
-            .asm_token = tree.nodes.items(.main_token)[node],
-            .template = data.lhs,
-            .items = &.{},
-            .rparen = data.rhs,
-        });
-    }
-
-    pub fn asmFull(tree: Tree, node: Node.Index) full.Asm {
-        const data = tree.nodes.items(.data)[node];
-        const extra = tree.extraData(data.rhs, Node.Asm);
-        return tree.fullAsm(.{
-            .asm_token = tree.nodes.items(.main_token)[node],
-            .template = data.lhs,
-            .items = tree.extra_data[extra.items_start..extra.items_end],
-            .rparen = extra.rparen,
-        });
-    }
-
-    pub fn whileSimple(tree: Tree, node: Node.Index) full.While {
-        const data = tree.nodes.items(.data)[node];
-        return tree.fullWhile(.{
-            .while_token = tree.nodes.items(.main_token)[node],
-            .cond_expr = data.lhs,
-            .cont_expr = 0,
-            .then_expr = data.rhs,
-            .else_expr = 0,
-        });
-    }
-
-    pub fn whileCont(tree: Tree, node: Node.Index) full.While {
-        const data = tree.nodes.items(.data)[node];
-        const extra = tree.extraData(data.rhs, Node.WhileCont);
-        return tree.fullWhile(.{
-            .while_token = tree.nodes.items(.main_token)[node],
-            .cond_expr = data.lhs,
-            .cont_expr = extra.cont_expr,
-            .then_expr = extra.then_expr,
-            .else_expr = 0,
-        });
-    }
-
-    pub fn whileFull(tree: Tree, node: Node.Index) full.While {
-        const data = tree.nodes.items(.data)[node];
-        const extra = tree.extraData(data.rhs, Node.While);
-        return tree.fullWhile(.{
-            .while_token = tree.nodes.items(.main_token)[node],
-            .cond_expr = data.lhs,
-            .cont_expr = extra.cont_expr,
-            .then_expr = extra.then_expr,
-            .else_expr = extra.else_expr,
-        });
-    }
-
-    pub fn forSimple(tree: Tree, node: Node.Index) full.While {
-        const data = tree.nodes.items(.data)[node];
-        return tree.fullWhile(.{
-            .while_token = tree.nodes.items(.main_token)[node],
-            .cond_expr = data.lhs,
-            .cont_expr = 0,
-            .then_expr = data.rhs,
-            .else_expr = 0,
-        });
-    }
-
-    pub fn forFull(tree: Tree, node: Node.Index) full.While {
-        const data = tree.nodes.items(.data)[node];
-        const extra = tree.extraData(data.rhs, Node.If);
-        return tree.fullWhile(.{
-            .while_token = tree.nodes.items(.main_token)[node],
-            .cond_expr = data.lhs,
-            .cont_expr = 0,
-            .then_expr = extra.then_expr,
-            .else_expr = extra.else_expr,
-        });
-    }
-
-    pub fn callOne(tree: Tree, buffer: *[1]Node.Index, node: Node.Index) full.Call {
-        const data = tree.nodes.items(.data)[node];
-        buffer.* = .{data.rhs};
-        const params = if (data.rhs != 0) buffer[0..1] else buffer[0..0];
-        return tree.fullCall(.{
-            .lparen = tree.nodes.items(.main_token)[node],
-            .fn_expr = data.lhs,
-            .params = params,
-        });
-    }
-
-    pub fn callFull(tree: Tree, node: Node.Index) full.Call {
-        const data = tree.nodes.items(.data)[node];
-        const extra = tree.extraData(data.rhs, Node.SubRange);
-        return tree.fullCall(.{
-            .lparen = tree.nodes.items(.main_token)[node],
-            .fn_expr = data.lhs,
-            .params = tree.extra_data[extra.start..extra.end],
-        });
-    }
-
-    fn fullVarDecl(tree: Tree, info: full.VarDecl.Ast) full.VarDecl {
-        const token_tags = tree.tokens.items(.tag);
-        var result: full.VarDecl = .{
-            .ast = info,
-            .visib_token = null,
-            .extern_export_token = null,
-            .lib_name = null,
-            .threadlocal_token = null,
-            .comptime_token = null,
-        };
-        var i = info.mut_token;
-        while (i > 0) {
-            i -= 1;
-            switch (token_tags[i]) {
-                .keyword_extern, .keyword_export => result.extern_export_token = i,
-                .keyword_comptime => result.comptime_token = i,
-                .keyword_pub => result.visib_token = i,
-                .keyword_threadlocal => result.threadlocal_token = i,
-                .string_literal => result.lib_name = i,
-                else => break,
-            }
-        }
-        return result;
-    }
-
-    fn fullIf(tree: Tree, info: full.If.Ast) full.If {
-        const token_tags = tree.tokens.items(.tag);
-        var result: full.If = .{
-            .ast = info,
-            .payload_token = null,
-            .error_token = null,
-            .else_token = undefined,
-        };
-        // if (cond_expr) |x|
-        //              ^ ^
-        const payload_pipe = tree.lastToken(info.cond_expr) + 2;
-        if (token_tags[payload_pipe] == .pipe) {
-            result.payload_token = payload_pipe + 1;
-        }
-        if (info.else_expr != 0) {
-            // then_expr else |x|
-            //           ^    ^
-            result.else_token = tree.lastToken(info.then_expr) + 1;
-            if (token_tags[result.else_token + 1] == .pipe) {
-                result.error_token = result.else_token + 2;
-            }
-        }
-        return result;
-    }
-
-    fn fullContainerField(tree: Tree, info: full.ContainerField.Ast) full.ContainerField {
-        const token_tags = tree.tokens.items(.tag);
-        var result: full.ContainerField = .{
-            .ast = info,
-            .comptime_token = null,
-        };
-        // comptime name: type = init,
-        // ^
-        if (info.name_token > 0 and token_tags[info.name_token - 1] == .keyword_comptime) {
-            result.comptime_token = info.name_token - 1;
-        }
-        return result;
-    }
-
-    fn fullFnProto(tree: Tree, info: full.FnProto.Ast) full.FnProto {
-        const token_tags = tree.tokens.items(.tag);
-        var result: full.FnProto = .{
-            .ast = info,
-            .visib_token = null,
-            .extern_export_inline_token = null,
-            .lib_name = null,
-            .name_token = null,
-            .lparen = undefined,
-        };
-        var i = info.fn_token;
-        while (i > 0) {
-            i -= 1;
-            switch (token_tags[i]) {
-                .keyword_extern,
-                .keyword_export,
-                .keyword_inline,
-                .keyword_noinline,
-                => result.extern_export_inline_token = i,
-                .keyword_pub => result.visib_token = i,
-                .string_literal => result.lib_name = i,
-                else => break,
-            }
-        }
-        const after_fn_token = info.fn_token + 1;
-        if (token_tags[after_fn_token] == .identifier) {
-            result.name_token = after_fn_token;
-            result.lparen = after_fn_token + 1;
-        } else {
-            result.lparen = after_fn_token;
-        }
-        assert(token_tags[result.lparen] == .l_paren);
-
-        return result;
-    }
-
-    fn fullStructInit(tree: Tree, info: full.StructInit.Ast) full.StructInit {
-        _ = tree;
-        var result: full.StructInit = .{
-            .ast = info,
-        };
-        return result;
-    }
-
-    fn fullPtrType(tree: Tree, info: full.PtrType.Ast) full.PtrType {
-        const token_tags = tree.tokens.items(.tag);
-        // TODO: looks like stage1 isn't quite smart enough to handle enum
-        // literals in some places here
-        const Size = std.builtin.TypeInfo.Pointer.Size;
-        const size: Size = switch (token_tags[info.main_token]) {
-            .asterisk,
-            .asterisk_asterisk,
-            => switch (token_tags[info.main_token + 1]) {
-                .r_bracket, .colon => .Many,
-                .identifier => if (token_tags[info.main_token - 1] == .l_bracket) Size.C else .One,
-                else => .One,
-            },
-            .l_bracket => Size.Slice,
-            else => unreachable,
-        };
-        var result: full.PtrType = .{
-            .size = size,
-            .allowzero_token = null,
-            .const_token = null,
-            .volatile_token = null,
-            .ast = info,
-        };
-        // We need to be careful that we don't iterate over any sub-expressions
-        // here while looking for modifiers as that could result in false
-        // positives. Therefore, start after a sentinel if there is one and
-        // skip over any align node and bit range nodes.
-        var i = if (info.sentinel != 0) tree.lastToken(info.sentinel) + 1 else info.main_token;
-        const end = tree.firstToken(info.child_type);
-        while (i < end) : (i += 1) {
-            switch (token_tags[i]) {
-                .keyword_allowzero => result.allowzero_token = i,
-                .keyword_const => result.const_token = i,
-                .keyword_volatile => result.volatile_token = i,
-                .keyword_align => {
-                    assert(info.align_node != 0);
-                    if (info.bit_range_end != 0) {
-                        assert(info.bit_range_start != 0);
-                        i = tree.lastToken(info.bit_range_end) + 1;
-                    } else {
-                        i = tree.lastToken(info.align_node) + 1;
-                    }
-                },
-                else => {},
-            }
-        }
-        return result;
-    }
-
-    fn fullContainerDecl(tree: Tree, info: full.ContainerDecl.Ast) full.ContainerDecl {
-        const token_tags = tree.tokens.items(.tag);
-        var result: full.ContainerDecl = .{
-            .ast = info,
-            .layout_token = null,
-        };
-        switch (token_tags[info.main_token - 1]) {
-            .keyword_extern, .keyword_packed => result.layout_token = info.main_token - 1,
-            else => {},
-        }
-        return result;
-    }
-
-    fn fullSwitchCase(tree: Tree, info: full.SwitchCase.Ast) full.SwitchCase {
-        const token_tags = tree.tokens.items(.tag);
-        var result: full.SwitchCase = .{
-            .ast = info,
-            .payload_token = null,
-        };
-        if (token_tags[info.arrow_token + 1] == .pipe) {
-            result.payload_token = info.arrow_token + 2;
-        }
-        return result;
-    }
-
-    fn fullAsm(tree: Tree, info: full.Asm.Ast) full.Asm {
-        const token_tags = tree.tokens.items(.tag);
-        const node_tags = tree.nodes.items(.tag);
-        var result: full.Asm = .{
-            .ast = info,
-            .volatile_token = null,
-            .inputs = &.{},
-            .outputs = &.{},
-            .first_clobber = null,
-        };
-        if (token_tags[info.asm_token + 1] == .keyword_volatile) {
-            result.volatile_token = info.asm_token + 1;
-        }
-        const outputs_end: usize = for (info.items) |item, i| {
-            switch (node_tags[item]) {
-                .asm_output => continue,
-                else => break i,
-            }
-        } else info.items.len;
-
-        result.outputs = info.items[0..outputs_end];
-        result.inputs = info.items[outputs_end..];
-
-        if (info.items.len == 0) {
-            // asm ("foo" ::: "a", "b");
-            const template_token = tree.lastToken(info.template);
-            if (token_tags[template_token + 1] == .colon and
-                token_tags[template_token + 2] == .colon and
-                token_tags[template_token + 3] == .colon and
-                token_tags[template_token + 4] == .string_literal)
-            {
-                result.first_clobber = template_token + 4;
-            }
-        } else if (result.inputs.len != 0) {
-            // asm ("foo" :: [_] "" (y) : "a", "b");
-            const last_input = result.inputs[result.inputs.len - 1];
-            const rparen = tree.lastToken(last_input);
-            var i = rparen + 1;
-            // Allow a (useless) comma right after the closing parenthesis.
-            if (token_tags[i] == .comma) i += 1;
-            if (token_tags[i] == .colon and
-                token_tags[i + 1] == .string_literal)
-            {
-                result.first_clobber = i + 1;
-            }
-        } else {
-            // asm ("foo" : [_] "" (x) :: "a", "b");
-            const last_output = result.outputs[result.outputs.len - 1];
-            const rparen = tree.lastToken(last_output);
-            var i = rparen + 1;
-            // Allow a (useless) comma right after the closing parenthesis.
-            if (token_tags[i] == .comma) i += 1;
-            if (token_tags[i] == .colon and
-                token_tags[i + 1] == .colon and
-                token_tags[i + 2] == .string_literal)
-            {
-                result.first_clobber = i + 2;
-            }
-        }
-
-        return result;
-    }
-
-    fn fullWhile(tree: Tree, info: full.While.Ast) full.While {
-        const token_tags = tree.tokens.items(.tag);
-        var result: full.While = .{
-            .ast = info,
-            .inline_token = null,
-            .label_token = null,
-            .payload_token = null,
-            .else_token = undefined,
-            .error_token = null,
-        };
-        var tok_i = info.while_token - 1;
-        if (token_tags[tok_i] == .keyword_inline) {
-            result.inline_token = tok_i;
-            tok_i -= 1;
-        }
-        if (token_tags[tok_i] == .colon and
-            token_tags[tok_i - 1] == .identifier)
-        {
-            result.label_token = tok_i - 1;
-        }
-        const last_cond_token = tree.lastToken(info.cond_expr);
-        if (token_tags[last_cond_token + 2] == .pipe) {
-            result.payload_token = last_cond_token + 3;
-        }
-        if (info.else_expr != 0) {
-            // then_expr else |x|
-            //           ^    ^
-            result.else_token = tree.lastToken(info.then_expr) + 1;
-            if (token_tags[result.else_token + 1] == .pipe) {
-                result.error_token = result.else_token + 2;
-            }
-        }
-        return result;
-    }
-
-    fn fullCall(tree: Tree, info: full.Call.Ast) full.Call {
-        const token_tags = tree.tokens.items(.tag);
-        var result: full.Call = .{
-            .ast = info,
-            .async_token = null,
-        };
-        const maybe_async_token = tree.firstToken(info.fn_expr) - 1;
-        if (token_tags[maybe_async_token] == .keyword_async) {
-            result.async_token = maybe_async_token;
-        }
-        return result;
-    }
-};
-
-/// Fully assembled AST node information.
-pub const full = struct {
-    pub const VarDecl = struct {
-        visib_token: ?TokenIndex,
-        extern_export_token: ?TokenIndex,
-        lib_name: ?TokenIndex,
-        threadlocal_token: ?TokenIndex,
-        comptime_token: ?TokenIndex,
-        ast: Ast,
-
-        pub const Ast = struct {
-            mut_token: TokenIndex,
-            type_node: Node.Index,
-            align_node: Node.Index,
-            section_node: Node.Index,
-            init_node: Node.Index,
-        };
-    };
-
-    pub const If = struct {
-        /// Points to the first token after the `|`. Will either be an identifier or
-        /// a `*` (with an identifier immediately after it).
-        payload_token: ?TokenIndex,
-        /// Points to the identifier after the `|`.
-        error_token: ?TokenIndex,
-        /// Populated only if else_expr != 0.
-        else_token: TokenIndex,
-        ast: Ast,
-
-        pub const Ast = struct {
-            if_token: TokenIndex,
-            cond_expr: Node.Index,
-            then_expr: Node.Index,
-            else_expr: Node.Index,
-        };
-    };
-
-    pub const While = struct {
-        ast: Ast,
-        inline_token: ?TokenIndex,
-        label_token: ?TokenIndex,
-        payload_token: ?TokenIndex,
-        error_token: ?TokenIndex,
-        /// Populated only if else_expr != 0.
-        else_token: TokenIndex,
-
-        pub const Ast = struct {
-            while_token: TokenIndex,
-            cond_expr: Node.Index,
-            cont_expr: Node.Index,
-            then_expr: Node.Index,
-            else_expr: Node.Index,
-        };
-    };
-
-    pub const ContainerField = struct {
-        comptime_token: ?TokenIndex,
-        ast: Ast,
-
-        pub const Ast = struct {
-            name_token: TokenIndex,
-            type_expr: Node.Index,
-            value_expr: Node.Index,
-            align_expr: Node.Index,
-        };
-    };
-
-    pub const FnProto = struct {
-        visib_token: ?TokenIndex,
-        extern_export_inline_token: ?TokenIndex,
-        lib_name: ?TokenIndex,
-        name_token: ?TokenIndex,
-        lparen: TokenIndex,
-        ast: Ast,
-
-        pub const Ast = struct {
-            proto_node: Node.Index,
-            fn_token: TokenIndex,
-            return_type: Node.Index,
-            params: []const Node.Index,
-            align_expr: Node.Index,
-            section_expr: Node.Index,
-            callconv_expr: Node.Index,
-        };
-
-        pub const Param = struct {
-            first_doc_comment: ?TokenIndex,
-            name_token: ?TokenIndex,
-            comptime_noalias: ?TokenIndex,
-            anytype_ellipsis3: ?TokenIndex,
-            type_expr: Node.Index,
-        };
-
-        /// Abstracts over the fact that anytype and ... are not included
-        /// in the params slice, since they are simple identifiers and
-        /// not sub-expressions.
-        pub const Iterator = struct {
-            tree: *const Tree,
-            fn_proto: *const FnProto,
-            param_i: usize,
-            tok_i: TokenIndex,
-            tok_flag: bool,
-
-            pub fn next(it: *Iterator) ?Param {
-                const token_tags = it.tree.tokens.items(.tag);
-                while (true) {
-                    var first_doc_comment: ?TokenIndex = null;
-                    var comptime_noalias: ?TokenIndex = null;
-                    var name_token: ?TokenIndex = null;
-                    if (!it.tok_flag) {
-                        if (it.param_i >= it.fn_proto.ast.params.len) {
-                            return null;
-                        }
-                        const param_type = it.fn_proto.ast.params[it.param_i];
-                        var tok_i = it.tree.firstToken(param_type) - 1;
-                        while (true) : (tok_i -= 1) switch (token_tags[tok_i]) {
-                            .colon => continue,
-                            .identifier => name_token = tok_i,
-                            .doc_comment => first_doc_comment = tok_i,
-                            .keyword_comptime, .keyword_noalias => comptime_noalias = tok_i,
-                            else => break,
-                        };
-                        it.param_i += 1;
-                        it.tok_i = it.tree.lastToken(param_type) + 1;
-                        // Look for anytype and ... params afterwards.
-                        if (token_tags[it.tok_i] == .comma) {
-                            it.tok_i += 1;
-                        }
-                        it.tok_flag = true;
-                        return Param{
-                            .first_doc_comment = first_doc_comment,
-                            .comptime_noalias = comptime_noalias,
-                            .name_token = name_token,
-                            .anytype_ellipsis3 = null,
-                            .type_expr = param_type,
-                        };
-                    }
-                    if (token_tags[it.tok_i] == .comma) {
-                        it.tok_i += 1;
-                    }
-                    if (token_tags[it.tok_i] == .r_paren) {
-                        return null;
-                    }
-                    if (token_tags[it.tok_i] == .doc_comment) {
-                        first_doc_comment = it.tok_i;
-                        while (token_tags[it.tok_i] == .doc_comment) {
-                            it.tok_i += 1;
-                        }
-                    }
-                    switch (token_tags[it.tok_i]) {
-                        .ellipsis3 => {
-                            it.tok_flag = false; // Next iteration should return null.
-                            return Param{
-                                .first_doc_comment = first_doc_comment,
-                                .comptime_noalias = null,
-                                .name_token = null,
-                                .anytype_ellipsis3 = it.tok_i,
-                                .type_expr = 0,
-                            };
-                        },
-                        .keyword_noalias, .keyword_comptime => {
-                            comptime_noalias = it.tok_i;
-                            it.tok_i += 1;
-                        },
-                        else => {},
-                    }
-                    if (token_tags[it.tok_i] == .identifier and
-                        token_tags[it.tok_i + 1] == .colon)
-                    {
-                        name_token = it.tok_i;
-                        it.tok_i += 2;
-                    }
-                    if (token_tags[it.tok_i] == .keyword_anytype) {
-                        it.tok_i += 1;
-                        return Param{
-                            .first_doc_comment = first_doc_comment,
-                            .comptime_noalias = comptime_noalias,
-                            .name_token = name_token,
-                            .anytype_ellipsis3 = it.tok_i - 1,
-                            .type_expr = 0,
-                        };
-                    }
-                    it.tok_flag = false;
-                }
-            }
-        };
-
-        pub fn iterate(fn_proto: FnProto, tree: Tree) Iterator {
-            return .{
-                .tree = &tree,
-                .fn_proto = &fn_proto,
-                .param_i = 0,
-                .tok_i = fn_proto.lparen + 1,
-                .tok_flag = true,
-            };
-        }
-    };
-
-    pub const StructInit = struct {
-        ast: Ast,
-
-        pub const Ast = struct {
-            lbrace: TokenIndex,
-            fields: []const Node.Index,
-            type_expr: Node.Index,
-        };
-    };
-
-    pub const ArrayInit = struct {
-        ast: Ast,
-
-        pub const Ast = struct {
-            lbrace: TokenIndex,
-            elements: []const Node.Index,
-            type_expr: Node.Index,
-        };
-    };
-
-    pub const ArrayType = struct {
-        ast: Ast,
-
-        pub const Ast = struct {
-            lbracket: TokenIndex,
-            elem_count: Node.Index,
-            sentinel: Node.Index,
-            elem_type: Node.Index,
-        };
-    };
-
-    pub const PtrType = struct {
-        size: std.builtin.TypeInfo.Pointer.Size,
-        allowzero_token: ?TokenIndex,
-        const_token: ?TokenIndex,
-        volatile_token: ?TokenIndex,
-        ast: Ast,
-
-        pub const Ast = struct {
-            main_token: TokenIndex,
-            align_node: Node.Index,
-            sentinel: Node.Index,
-            bit_range_start: Node.Index,
-            bit_range_end: Node.Index,
-            child_type: Node.Index,
-        };
-    };
-
-    pub const Slice = struct {
-        ast: Ast,
-
-        pub const Ast = struct {
-            sliced: Node.Index,
-            lbracket: TokenIndex,
-            start: Node.Index,
-            end: Node.Index,
-            sentinel: Node.Index,
-        };
-    };
-
-    pub const ContainerDecl = struct {
-        layout_token: ?TokenIndex,
-        ast: Ast,
-
-        pub const Ast = struct {
-            main_token: TokenIndex,
-            /// Populated when main_token is Keyword_union.
-            enum_token: ?TokenIndex,
-            members: []const Node.Index,
-            arg: Node.Index,
-        };
-    };
-
-    pub const SwitchCase = struct {
-        /// Points to the first token after the `|`. Will either be an identifier or
-        /// a `*` (with an identifier immediately after it).
-        payload_token: ?TokenIndex,
-        ast: Ast,
-
-        pub const Ast = struct {
-            /// If empty, this is an else case
-            values: []const Node.Index,
-            arrow_token: TokenIndex,
-            target_expr: Node.Index,
-        };
-    };
-
-    pub const Asm = struct {
-        ast: Ast,
-        volatile_token: ?TokenIndex,
-        first_clobber: ?TokenIndex,
-        outputs: []const Node.Index,
-        inputs: []const Node.Index,
-
-        pub const Ast = struct {
-            asm_token: TokenIndex,
-            template: Node.Index,
-            items: []const Node.Index,
-            rparen: TokenIndex,
-        };
-    };
-
-    pub const Call = struct {
-        ast: Ast,
-        async_token: ?TokenIndex,
-
-        pub const Ast = struct {
-            lparen: TokenIndex,
-            fn_expr: Node.Index,
-            params: []const Node.Index,
-        };
-    };
-};
-
-pub const Error = struct {
-    tag: Tag,
-    token: TokenIndex,
-    extra: union {
-        none: void,
-        expected_tag: Token.Tag,
-    } = .{ .none = {} },
-
-    pub const Tag = enum {
-        asterisk_after_ptr_deref,
-        decl_between_fields,
-        expected_block,
-        expected_block_or_assignment,
-        expected_block_or_expr,
-        expected_block_or_field,
-        expected_container_members,
-        expected_expr,
-        expected_expr_or_assignment,
-        expected_fn,
-        expected_inlinable,
-        expected_labelable,
-        expected_param_list,
-        expected_prefix_expr,
-        expected_primary_type_expr,
-        expected_pub_item,
-        expected_return_type,
-        expected_semi_or_else,
-        expected_semi_or_lbrace,
-        expected_statement,
-        expected_string_literal,
-        expected_suffix_op,
-        expected_type_expr,
-        expected_var_decl,
-        expected_var_decl_or_fn,
-        expected_loop_payload,
-        expected_container,
-        extra_align_qualifier,
-        extra_allowzero_qualifier,
-        extra_const_qualifier,
-        extra_volatile_qualifier,
-        ptr_mod_on_array_child_type,
-        invalid_bit_range,
-        invalid_token,
-        same_line_doc_comment,
-        unattached_doc_comment,
-        varargs_nonfinal,
-
-        /// `expected_tag` is populated.
-        expected_token,
-    };
-};
-
-pub const Node = struct {
-    tag: Tag,
-    main_token: TokenIndex,
-    data: Data,
-
-    pub const Index = u32;
-
-    comptime {
-        // Goal is to keep this under one byte for efficiency.
-        assert(@sizeOf(Tag) == 1);
-    }
-
-    /// Note: The FooComma/FooSemicolon variants exist to ease the implementation of
-    /// Tree.lastToken()
-    pub const Tag = enum {
-        /// sub_list[lhs...rhs]
-        root,
-        /// `usingnamespace lhs;`. rhs unused. main_token is `usingnamespace`.
-        @"usingnamespace",
-        /// lhs is test name token (must be string literal), if any.
-        /// rhs is the body node.
-        test_decl,
-        /// lhs is the index into extra_data.
-        /// rhs is the initialization expression, if any.
-        /// main_token is `var` or `const`.
-        global_var_decl,
-        /// `var a: x align(y) = rhs`
-        /// lhs is the index into extra_data.
-        /// main_token is `var` or `const`.
-        local_var_decl,
-        /// `var a: lhs = rhs`. lhs and rhs may be unused.
-        /// Can be local or global.
-        /// main_token is `var` or `const`.
-        simple_var_decl,
-        /// `var a align(lhs) = rhs`. lhs and rhs may be unused.
-        /// Can be local or global.
-        /// main_token is `var` or `const`.
-        aligned_var_decl,
-        /// lhs is the identifier token payload if any,
-        /// rhs is the deferred expression.
-        @"errdefer",
-        /// lhs is unused.
-        /// rhs is the deferred expression.
-        @"defer",
-        /// lhs catch rhs
-        /// lhs catch |err| rhs
-        /// main_token is the `catch` keyword.
-        /// payload is determined by looking at the next token after the `catch` keyword.
-        @"catch",
-        /// `lhs.a`. main_token is the dot. rhs is the identifier token index.
-        field_access,
-        /// `lhs.?`. main_token is the dot. rhs is the `?` token index.
-        unwrap_optional,
-        /// `lhs == rhs`. main_token is op.
-        equal_equal,
-        /// `lhs != rhs`. main_token is op.
-        bang_equal,
-        /// `lhs < rhs`. main_token is op.
-        less_than,
-        /// `lhs > rhs`. main_token is op.
-        greater_than,
-        /// `lhs <= rhs`. main_token is op.
-        less_or_equal,
-        /// `lhs >= rhs`. main_token is op.
-        greater_or_equal,
-        /// `lhs *= rhs`. main_token is op.
-        assign_mul,
-        /// `lhs /= rhs`. main_token is op.
-        assign_div,
-        /// `lhs *= rhs`. main_token is op.
-        assign_mod,
-        /// `lhs += rhs`. main_token is op.
-        assign_add,
-        /// `lhs -= rhs`. main_token is op.
-        assign_sub,
-        /// `lhs <<= rhs`. main_token is op.
-        assign_bit_shift_left,
-        /// `lhs >>= rhs`. main_token is op.
-        assign_bit_shift_right,
-        /// `lhs &= rhs`. main_token is op.
-        assign_bit_and,
-        /// `lhs ^= rhs`. main_token is op.
-        assign_bit_xor,
-        /// `lhs |= rhs`. main_token is op.
-        assign_bit_or,
-        /// `lhs *%= rhs`. main_token is op.
-        assign_mul_wrap,
-        /// `lhs +%= rhs`. main_token is op.
-        assign_add_wrap,
-        /// `lhs -%= rhs`. main_token is op.
-        assign_sub_wrap,
-        /// `lhs = rhs`. main_token is op.
-        assign,
-        /// `lhs || rhs`. main_token is the `||`.
-        merge_error_sets,
-        /// `lhs * rhs`. main_token is the `*`.
-        mul,
-        /// `lhs / rhs`. main_token is the `/`.
-        div,
-        /// `lhs % rhs`. main_token is the `%`.
-        mod,
-        /// `lhs ** rhs`. main_token is the `**`.
-        array_mult,
-        /// `lhs *% rhs`. main_token is the `*%`.
-        mul_wrap,
-        /// `lhs + rhs`. main_token is the `+`.
-        add,
-        /// `lhs - rhs`. main_token is the `-`.
-        sub,
-        /// `lhs ++ rhs`. main_token is the `++`.
-        array_cat,
-        /// `lhs +% rhs`. main_token is the `+%`.
-        add_wrap,
-        /// `lhs -% rhs`. main_token is the `-%`.
-        sub_wrap,
-        /// `lhs << rhs`. main_token is the `<<`.
-        bit_shift_left,
-        /// `lhs >> rhs`. main_token is the `>>`.
-        bit_shift_right,
-        /// `lhs & rhs`. main_token is the `&`.
-        bit_and,
-        /// `lhs ^ rhs`. main_token is the `^`.
-        bit_xor,
-        /// `lhs | rhs`. main_token is the `|`.
-        bit_or,
-        /// `lhs orelse rhs`. main_token is the `orelse`.
-        @"orelse",
-        /// `lhs and rhs`. main_token is the `and`.
-        bool_and,
-        /// `lhs or rhs`. main_token is the `or`.
-        bool_or,
-        /// `op lhs`. rhs unused. main_token is op.
-        bool_not,
-        /// `op lhs`. rhs unused. main_token is op.
-        negation,
-        /// `op lhs`. rhs unused. main_token is op.
-        bit_not,
-        /// `op lhs`. rhs unused. main_token is op.
-        negation_wrap,
-        /// `op lhs`. rhs unused. main_token is op.
-        address_of,
-        /// `op lhs`. rhs unused. main_token is op.
-        @"try",
-        /// `op lhs`. rhs unused. main_token is op.
-        @"await",
-        /// `?lhs`. rhs unused. main_token is the `?`.
-        optional_type,
-        /// `[lhs]rhs`.
-        array_type,
-        /// `[lhs:a]b`. `ArrayTypeSentinel[rhs]`.
-        array_type_sentinel,
-        /// `[*]align(lhs) rhs`. lhs can be omitted.
-        /// `*align(lhs) rhs`. lhs can be omitted.
-        /// `[]rhs`.
-        /// main_token is the asterisk if a pointer or the lbracket if a slice
-        /// main_token might be a ** token, which is shared with a parent/child
-        /// pointer type and may require special handling.
-        ptr_type_aligned,
-        /// `[*:lhs]rhs`. lhs can be omitted.
-        /// `*rhs`.
-        /// `[:lhs]rhs`.
-        /// main_token is the asterisk if a pointer or the lbracket if a slice
-        /// main_token might be a ** token, which is shared with a parent/child
-        /// pointer type and may require special handling.
-        ptr_type_sentinel,
-        /// lhs is index into ptr_type. rhs is the element type expression.
-        /// main_token is the asterisk if a pointer or the lbracket if a slice
-        /// main_token might be a ** token, which is shared with a parent/child
-        /// pointer type and may require special handling.
-        ptr_type,
-        /// lhs is index into ptr_type_bit_range. rhs is the element type expression.
-        /// main_token is the asterisk if a pointer or the lbracket if a slice
-        /// main_token might be a ** token, which is shared with a parent/child
-        /// pointer type and may require special handling.
-        ptr_type_bit_range,
-        /// `lhs[rhs..]`
-        /// main_token is the lbracket.
-        slice_open,
-        /// `lhs[b..c]`. rhs is index into Slice
-        /// main_token is the lbracket.
-        slice,
-        /// `lhs[b..c :d]`. rhs is index into SliceSentinel
-        /// main_token is the lbracket.
-        slice_sentinel,
-        /// `lhs.*`. rhs is unused.
-        deref,
-        /// `lhs[rhs]`.
-        array_access,
-        /// `lhs{rhs}`. rhs can be omitted.
-        array_init_one,
-        /// `lhs{rhs,}`. rhs can *not* be omitted
-        array_init_one_comma,
-        /// `.{lhs, rhs}`. lhs and rhs can be omitted.
-        array_init_dot_two,
-        /// Same as `array_init_dot_two` except there is known to be a trailing comma
-        /// before the final rbrace.
-        array_init_dot_two_comma,
-        /// `.{a, b}`. `sub_list[lhs..rhs]`.
-        array_init_dot,
-        /// Same as `array_init_dot` except there is known to be a trailing comma
-        /// before the final rbrace.
-        array_init_dot_comma,
-        /// `lhs{a, b}`. `sub_range_list[rhs]`. lhs can be omitted which means `.{a, b}`.
-        array_init,
-        /// Same as `array_init` except there is known to be a trailing comma
-        /// before the final rbrace.
-        array_init_comma,
-        /// `lhs{.a = rhs}`. rhs can be omitted making it empty.
-        /// main_token is the lbrace.
-        struct_init_one,
-        /// `lhs{.a = rhs,}`. rhs can *not* be omitted.
-        /// main_token is the lbrace.
-        struct_init_one_comma,
-        /// `.{.a = lhs, .b = rhs}`. lhs and rhs can be omitted.
-        /// main_token is the lbrace.
-        /// No trailing comma before the rbrace.
-        struct_init_dot_two,
-        /// Same as `struct_init_dot_two` except there is known to be a trailing comma
-        /// before the final rbrace.
-        struct_init_dot_two_comma,
-        /// `.{.a = b, .c = d}`. `sub_list[lhs..rhs]`.
-        /// main_token is the lbrace.
-        struct_init_dot,
-        /// Same as `struct_init_dot` except there is known to be a trailing comma
-        /// before the final rbrace.
-        struct_init_dot_comma,
-        /// `lhs{.a = b, .c = d}`. `sub_range_list[rhs]`.
-        /// lhs can be omitted which means `.{.a = b, .c = d}`.
-        /// main_token is the lbrace.
-        struct_init,
-        /// Same as `struct_init` except there is known to be a trailing comma
-        /// before the final rbrace.
-        struct_init_comma,
-        /// `lhs(rhs)`. rhs can be omitted.
-        /// main_token is the lparen.
-        call_one,
-        /// `lhs(rhs,)`. rhs can be omitted.
-        /// main_token is the lparen.
-        call_one_comma,
-        /// `async lhs(rhs)`. rhs can be omitted.
-        async_call_one,
-        /// `async lhs(rhs,)`.
-        async_call_one_comma,
-        /// `lhs(a, b, c)`. `SubRange[rhs]`.
-        /// main_token is the `(`.
-        call,
-        /// `lhs(a, b, c,)`. `SubRange[rhs]`.
-        /// main_token is the `(`.
-        call_comma,
-        /// `async lhs(a, b, c)`. `SubRange[rhs]`.
-        /// main_token is the `(`.
-        async_call,
-        /// `async lhs(a, b, c,)`. `SubRange[rhs]`.
-        /// main_token is the `(`.
-        async_call_comma,
-        /// `switch(lhs) {}`. `SubRange[rhs]`.
-        @"switch",
-        /// Same as switch except there is known to be a trailing comma
-        /// before the final rbrace
-        switch_comma,
-        /// `lhs => rhs`. If lhs is omitted it means `else`.
-        /// main_token is the `=>`
-        switch_case_one,
-        /// `a, b, c => rhs`. `SubRange[lhs]`.
-        /// main_token is the `=>`
-        switch_case,
-        /// `lhs...rhs`.
-        switch_range,
-        /// `while (lhs) rhs`.
-        /// `while (lhs) |x| rhs`.
-        while_simple,
-        /// `while (lhs) : (a) b`. `WhileCont[rhs]`.
-        /// `while (lhs) : (a) b`. `WhileCont[rhs]`.
-        while_cont,
-        /// `while (lhs) : (a) b else c`. `While[rhs]`.
-        /// `while (lhs) |x| : (a) b else c`. `While[rhs]`.
-        /// `while (lhs) |x| : (a) b else |y| c`. `While[rhs]`.
-        @"while",
-        /// `for (lhs) rhs`.
-        for_simple,
-        /// `for (lhs) a else b`. `if_list[rhs]`.
-        @"for",
-        /// `if (lhs) rhs`.
-        /// `if (lhs) |a| rhs`.
-        if_simple,
-        /// `if (lhs) a else b`. `If[rhs]`.
-        /// `if (lhs) |x| a else b`. `If[rhs]`.
-        /// `if (lhs) |x| a else |y| b`. `If[rhs]`.
-        @"if",
-        /// `suspend lhs`. lhs can be omitted. rhs is unused.
-        @"suspend",
-        /// `resume lhs`. rhs is unused.
-        @"resume",
-        /// `continue`. lhs is token index of label if any. rhs is unused.
-        @"continue",
-        /// `break :lhs rhs`
-        /// both lhs and rhs may be omitted.
-        @"break",
-        /// `return lhs`. lhs can be omitted. rhs is unused.
-        @"return",
-        /// `fn(a: lhs) rhs`. lhs can be omitted.
-        /// anytype and ... parameters are omitted from the AST tree.
-        /// main_token is the `fn` keyword.
-        /// extern function declarations use this tag.
-        fn_proto_simple,
-        /// `fn(a: b, c: d) rhs`. `sub_range_list[lhs]`.
-        /// anytype and ... parameters are omitted from the AST tree.
-        /// main_token is the `fn` keyword.
-        /// extern function declarations use this tag.
-        fn_proto_multi,
-        /// `fn(a: b) rhs linksection(e) callconv(f)`. `FnProtoOne[lhs]`.
-        /// zero or one parameters.
-        /// anytype and ... parameters are omitted from the AST tree.
-        /// main_token is the `fn` keyword.
-        /// extern function declarations use this tag.
-        fn_proto_one,
-        /// `fn(a: b, c: d) rhs linksection(e) callconv(f)`. `FnProto[lhs]`.
-        /// anytype and ... parameters are omitted from the AST tree.
-        /// main_token is the `fn` keyword.
-        /// extern function declarations use this tag.
-        fn_proto,
-        /// lhs is the fn_proto.
-        /// rhs is the function body block.
-        /// Note that extern function declarations use the fn_proto tags rather
-        /// than this one.
-        fn_decl,
-        /// `anyframe->rhs`. main_token is `anyframe`. `lhs` is arrow token index.
-        anyframe_type,
-        /// Both lhs and rhs unused.
-        anyframe_literal,
-        /// Both lhs and rhs unused.
-        char_literal,
-        /// Both lhs and rhs unused.
-        integer_literal,
-        /// Both lhs and rhs unused.
-        float_literal,
-        /// Both lhs and rhs unused.
-        unreachable_literal,
-        /// Both lhs and rhs unused.
-        /// Most identifiers will not have explicit AST nodes, however for expressions
-        /// which could be one of many different kinds of AST nodes, there will be an
-        /// identifier AST node for it.
-        identifier,
-        /// lhs is the dot token index, rhs unused, main_token is the identifier.
-        enum_literal,
-        /// main_token is the string literal token
-        /// Both lhs and rhs unused.
-        string_literal,
-        /// main_token is the first token index (redundant with lhs)
-        /// lhs is the first token index; rhs is the last token index.
-        /// Could be a series of multiline_string_literal_line tokens, or a single
-        /// string_literal token.
-        multiline_string_literal,
-        /// `(lhs)`. main_token is the `(`; rhs is the token index of the `)`.
-        grouped_expression,
-        /// `@a(lhs, rhs)`. lhs and rhs may be omitted.
-        /// main_token is the builtin token.
-        builtin_call_two,
-        /// Same as builtin_call_two but there is known to be a trailing comma before the rparen.
-        builtin_call_two_comma,
-        /// `@a(b, c)`. `sub_list[lhs..rhs]`.
-        /// main_token is the builtin token.
-        builtin_call,
-        /// Same as builtin_call but there is known to be a trailing comma before the rparen.
-        builtin_call_comma,
-        /// `error{a, b}`.
-        /// rhs is the rbrace, lhs is unused.
-        error_set_decl,
-        /// `struct {}`, `union {}`, `opaque {}`, `enum {}`. `extra_data[lhs..rhs]`.
-        /// main_token is `struct`, `union`, `opaque`, `enum` keyword.
-        container_decl,
-        /// Same as ContainerDecl but there is known to be a trailing comma
-        /// or semicolon before the rbrace.
-        container_decl_trailing,
-        /// `struct {lhs, rhs}`, `union {lhs, rhs}`, `opaque {lhs, rhs}`, `enum {lhs, rhs}`.
-        /// lhs or rhs can be omitted.
-        /// main_token is `struct`, `union`, `opaque`, `enum` keyword.
-        container_decl_two,
-        /// Same as ContainerDeclTwo except there is known to be a trailing comma
-        /// or semicolon before the rbrace.
-        container_decl_two_trailing,
-        /// `union(lhs)` / `enum(lhs)`. `SubRange[rhs]`.
-        container_decl_arg,
-        /// Same as container_decl_arg but there is known to be a trailing
-        /// comma or semicolon before the rbrace.
-        container_decl_arg_trailing,
-        /// `union(enum) {}`. `sub_list[lhs..rhs]`.
-        /// Note that tagged unions with explicitly provided enums are represented
-        /// by `container_decl_arg`.
-        tagged_union,
-        /// Same as tagged_union but there is known to be a trailing comma
-        /// or semicolon before the rbrace.
-        tagged_union_trailing,
-        /// `union(enum) {lhs, rhs}`. lhs or rhs may be omitted.
-        /// Note that tagged unions with explicitly provided enums are represented
-        /// by `container_decl_arg`.
-        tagged_union_two,
-        /// Same as tagged_union_two but there is known to be a trailing comma
-        /// or semicolon before the rbrace.
-        tagged_union_two_trailing,
-        /// `union(enum(lhs)) {}`. `SubRange[rhs]`.
-        tagged_union_enum_tag,
-        /// Same as tagged_union_enum_tag but there is known to be a trailing comma
-        /// or semicolon before the rbrace.
-        tagged_union_enum_tag_trailing,
-        /// `a: lhs = rhs,`. lhs and rhs can be omitted.
-        /// main_token is the field name identifier.
-        /// lastToken() does not include the possible trailing comma.
-        container_field_init,
-        /// `a: lhs align(rhs),`. rhs can be omitted.
-        /// main_token is the field name identifier.
-        /// lastToken() does not include the possible trailing comma.
-        container_field_align,
-        /// `a: lhs align(c) = d,`. `container_field_list[rhs]`.
-        /// main_token is the field name identifier.
-        /// lastToken() does not include the possible trailing comma.
-        container_field,
-        /// `anytype`. both lhs and rhs unused.
-        /// Used by `ContainerField`.
-        @"anytype",
-        /// `comptime lhs`. rhs unused.
-        @"comptime",
-        /// `nosuspend lhs`. rhs unused.
-        @"nosuspend",
-        /// `{lhs rhs}`. rhs or lhs can be omitted.
-        /// main_token points at the lbrace.
-        block_two,
-        /// Same as block_two but there is known to be a semicolon before the rbrace.
-        block_two_semicolon,
-        /// `{}`. `sub_list[lhs..rhs]`.
-        /// main_token points at the lbrace.
-        block,
-        /// Same as block but there is known to be a semicolon before the rbrace.
-        block_semicolon,
-        /// `asm(lhs)`. rhs is the token index of the rparen.
-        asm_simple,
-        /// `asm(lhs, a)`. `Asm[rhs]`.
-        @"asm",
-        /// `[a] "b" (c)`. lhs is 0, rhs is token index of the rparen.
-        /// `[a] "b" (-> lhs)`. rhs is token index of the rparen.
-        /// main_token is `a`.
-        asm_output,
-        /// `[a] "b" (lhs)`. rhs is token index of the rparen.
-        /// main_token is `a`.
-        asm_input,
-        /// `error.a`. lhs is token index of `.`. rhs is token index of `a`.
-        error_value,
-        /// `lhs!rhs`. main_token is the `!`.
-        error_union,
-
-        pub fn isContainerField(tag: Tag) bool {
-            return switch (tag) {
-                .container_field_init,
-                .container_field_align,
-                .container_field,
-                => true,
-
-                else => false,
-            };
-        }
-    };
-
-    pub const Data = struct {
-        lhs: Index,
-        rhs: Index,
-    };
-
-    pub const LocalVarDecl = struct {
-        type_node: Index,
-        align_node: Index,
-    };
-
-    pub const ArrayTypeSentinel = struct {
-        elem_type: Index,
-        sentinel: Index,
-    };
-
-    pub const PtrType = struct {
-        sentinel: Index,
-        align_node: Index,
-    };
-
-    pub const PtrTypeBitRange = struct {
-        sentinel: Index,
-        align_node: Index,
-        bit_range_start: Index,
-        bit_range_end: Index,
-    };
-
-    pub const SubRange = struct {
-        /// Index into sub_list.
-        start: Index,
-        /// Index into sub_list.
-        end: Index,
-    };
-
-    pub const If = struct {
-        then_expr: Index,
-        else_expr: Index,
-    };
-
-    pub const ContainerField = struct {
-        value_expr: Index,
-        align_expr: Index,
-    };
-
-    pub const GlobalVarDecl = struct {
-        type_node: Index,
-        align_node: Index,
-        section_node: Index,
-    };
-
-    pub const Slice = struct {
-        start: Index,
-        end: Index,
-    };
-
-    pub const SliceSentinel = struct {
-        start: Index,
-        /// May be 0 if the slice is "open"
-        end: Index,
-        sentinel: Index,
-    };
-
-    pub const While = struct {
-        cont_expr: Index,
-        then_expr: Index,
-        else_expr: Index,
-    };
-
-    pub const WhileCont = struct {
-        cont_expr: Index,
-        then_expr: Index,
-    };
-
-    pub const FnProtoOne = struct {
-        /// Populated if there is exactly 1 parameter. Otherwise there are 0 parameters.
-        param: Index,
-        /// Populated if align(A) is present.
-        align_expr: Index,
-        /// Populated if linksection(A) is present.
-        section_expr: Index,
-        /// Populated if callconv(A) is present.
-        callconv_expr: Index,
-    };
-
-    pub const FnProto = struct {
-        params_start: Index,
-        params_end: Index,
-        /// Populated if align(A) is present.
-        align_expr: Index,
-        /// Populated if linksection(A) is present.
-        section_expr: Index,
-        /// Populated if callconv(A) is present.
-        callconv_expr: Index,
-    };
-
-    pub const Asm = struct {
-        items_start: Index,
-        items_end: Index,
-        /// Needed to make lastToken() work.
-        rparen: TokenIndex,
-    };
-};
lib/std/zig/parse.zig
@@ -1,19 +1,18 @@
 const std = @import("../std.zig");
 const assert = std.debug.assert;
 const Allocator = std.mem.Allocator;
-const ast = std.zig.ast;
-const Node = ast.Node;
-const Tree = ast.Tree;
-const AstError = ast.Error;
-const TokenIndex = ast.TokenIndex;
+const Ast = std.zig.Ast;
+const Node = Ast.Node;
+const AstError = Ast.Error;
+const TokenIndex = Ast.TokenIndex;
 const Token = std.zig.Token;
 
 pub const Error = error{ParseError} || Allocator.Error;
 
 /// Result should be freed with tree.deinit() when there are
 /// no more references to any of the tokens or nodes.
-pub fn parse(gpa: *Allocator, source: [:0]const u8) Allocator.Error!Tree {
-    var tokens = ast.TokenList{};
+pub fn parse(gpa: *Allocator, source: [:0]const u8) Allocator.Error!Ast {
+    var tokens = Ast.TokenList{};
     defer tokens.deinit(gpa);
 
     // Empirically, the zig std lib has an 8:1 ratio of source bytes to token count.
@@ -69,7 +68,7 @@ pub fn parse(gpa: *Allocator, source: [:0]const u8) Allocator.Error!Tree {
     };
 
     // TODO experiment with compacting the MultiArrayList slices here
-    return Tree{
+    return Ast{
         .source = source,
         .tokens = tokens.toOwnedSlice(),
         .nodes = parser.nodes.toOwnedSlice(),
@@ -80,15 +79,15 @@ pub fn parse(gpa: *Allocator, source: [:0]const u8) Allocator.Error!Tree {
 
 const null_node: Node.Index = 0;
 
-/// Represents in-progress parsing, will be converted to an ast.Tree after completion.
+/// Represents in-progress parsing, will be converted to an Ast after completion.
 const Parser = struct {
     gpa: *Allocator,
     source: []const u8,
     token_tags: []const Token.Tag,
-    token_starts: []const ast.ByteOffset,
+    token_starts: []const Ast.ByteOffset,
     tok_i: TokenIndex,
     errors: std.ArrayListUnmanaged(AstError),
-    nodes: ast.NodeList,
+    nodes: Ast.NodeList,
     extra_data: std.ArrayListUnmanaged(Node.Index),
     scratch: std.ArrayListUnmanaged(Node.Index),
 
@@ -121,13 +120,13 @@ const Parser = struct {
         };
     }
 
-    fn addNode(p: *Parser, elem: ast.NodeList.Elem) Allocator.Error!Node.Index {
+    fn addNode(p: *Parser, elem: Ast.NodeList.Elem) Allocator.Error!Node.Index {
         const result = @intCast(Node.Index, p.nodes.len);
         try p.nodes.append(p.gpa, elem);
         return result;
     }
 
-    fn setNode(p: *Parser, i: usize, elem: ast.NodeList.Elem) Node.Index {
+    fn setNode(p: *Parser, i: usize, elem: Ast.NodeList.Elem) Node.Index {
         p.nodes.set(i, elem);
         return @intCast(Node.Index, i);
     }
@@ -148,7 +147,7 @@ const Parser = struct {
         return result;
     }
 
-    fn warn(p: *Parser, tag: ast.Error.Tag) error{OutOfMemory}!void {
+    fn warn(p: *Parser, tag: Ast.Error.Tag) error{OutOfMemory}!void {
         @setCold(true);
         try p.warnMsg(.{ .tag = tag, .token = p.tok_i });
     }
@@ -161,12 +160,12 @@ const Parser = struct {
             .extra = .{ .expected_tag = expected_token },
         });
     }
-    fn warnMsg(p: *Parser, msg: ast.Error) error{OutOfMemory}!void {
+    fn warnMsg(p: *Parser, msg: Ast.Error) error{OutOfMemory}!void {
         @setCold(true);
         try p.errors.append(p.gpa, msg);
     }
 
-    fn fail(p: *Parser, tag: ast.Error.Tag) error{ ParseError, OutOfMemory } {
+    fn fail(p: *Parser, tag: Ast.Error.Tag) error{ ParseError, OutOfMemory } {
         @setCold(true);
         return p.failMsg(.{ .tag = tag, .token = p.tok_i });
     }
@@ -180,7 +179,7 @@ const Parser = struct {
         });
     }
 
-    fn failMsg(p: *Parser, msg: ast.Error) error{ ParseError, OutOfMemory } {
+    fn failMsg(p: *Parser, msg: Ast.Error) error{ ParseError, OutOfMemory } {
         @setCold(true);
         try p.warnMsg(msg);
         return error.ParseError;
lib/std/zig/parser_test.zig
@@ -5308,7 +5308,7 @@ fn testCanonical(source: [:0]const u8) !void {
     return testTransform(source, source);
 }
 
-const Error = std.zig.ast.Error.Tag;
+const Error = std.zig.Ast.Error.Tag;
 
 fn testError(source: [:0]const u8, expected_errors: []const Error) !void {
     var tree = try std.zig.parse(std.testing.allocator, source);
lib/std/zig/render.zig
@@ -3,17 +3,17 @@ const assert = std.debug.assert;
 const mem = std.mem;
 const Allocator = std.mem.Allocator;
 const meta = std.meta;
-const ast = std.zig.ast;
+const Ast = std.zig.Ast;
 const Token = std.zig.Token;
 
 const indent_delta = 4;
 const asm_indent_delta = 2;
 
-pub const Error = ast.Tree.RenderError;
+pub const Error = Ast.RenderError;
 
 const Ais = AutoIndentingStream(std.ArrayList(u8).Writer);
 
-pub fn renderTree(buffer: *std.ArrayList(u8), tree: ast.Tree) Error!void {
+pub fn renderTree(buffer: *std.ArrayList(u8), tree: Ast) Error!void {
     assert(tree.errors.len == 0); // Cannot render an invalid tree.
     var auto_indenting_stream = Ais{
         .indent_delta = indent_delta,
@@ -37,7 +37,7 @@ pub fn renderTree(buffer: *std.ArrayList(u8), tree: ast.Tree) Error!void {
 }
 
 /// Render all members in the given slice, keeping empty lines where appropriate
-fn renderMembers(gpa: *Allocator, ais: *Ais, tree: ast.Tree, members: []const ast.Node.Index) Error!void {
+fn renderMembers(gpa: *Allocator, ais: *Ais, tree: Ast, members: []const Ast.Node.Index) Error!void {
     if (members.len == 0) return;
     try renderMember(gpa, ais, tree, members[0], .newline);
     for (members[1..]) |member| {
@@ -46,7 +46,7 @@ fn renderMembers(gpa: *Allocator, ais: *Ais, tree: ast.Tree, members: []const as
     }
 }
 
-fn renderMember(gpa: *Allocator, ais: *Ais, tree: ast.Tree, decl: ast.Node.Index, space: Space) Error!void {
+fn renderMember(gpa: *Allocator, ais: *Ais, tree: Ast, decl: Ast.Node.Index, space: Space) Error!void {
     const token_tags = tree.tokens.items(.tag);
     const main_tokens = tree.nodes.items(.main_token);
     const datas = tree.nodes.items(.data);
@@ -83,9 +83,9 @@ fn renderMember(gpa: *Allocator, ais: *Ais, tree: ast.Tree, decl: ast.Node.Index
             switch (tree.nodes.items(.tag)[fn_proto]) {
                 .fn_proto_one, .fn_proto => {
                     const callconv_expr = if (tree.nodes.items(.tag)[fn_proto] == .fn_proto_one)
-                        tree.extraData(datas[fn_proto].lhs, ast.Node.FnProtoOne).callconv_expr
+                        tree.extraData(datas[fn_proto].lhs, Ast.Node.FnProtoOne).callconv_expr
                     else
-                        tree.extraData(datas[fn_proto].lhs, ast.Node.FnProto).callconv_expr;
+                        tree.extraData(datas[fn_proto].lhs, Ast.Node.FnProto).callconv_expr;
                     if (callconv_expr != 0 and tree.nodes.items(.tag)[callconv_expr] == .enum_literal) {
                         if (mem.eql(u8, "Inline", tree.tokenSlice(main_tokens[callconv_expr]))) {
                             try ais.writer().writeAll("inline ");
@@ -168,7 +168,7 @@ fn renderMember(gpa: *Allocator, ais: *Ais, tree: ast.Tree, decl: ast.Node.Index
 }
 
 /// Render all expressions in the slice, keeping empty lines where appropriate
-fn renderExpressions(gpa: *Allocator, ais: *Ais, tree: ast.Tree, expressions: []const ast.Node.Index, space: Space) Error!void {
+fn renderExpressions(gpa: *Allocator, ais: *Ais, tree: Ast, expressions: []const Ast.Node.Index, space: Space) Error!void {
     if (expressions.len == 0) return;
     try renderExpression(gpa, ais, tree, expressions[0], space);
     for (expressions[1..]) |expression| {
@@ -177,7 +177,7 @@ fn renderExpressions(gpa: *Allocator, ais: *Ais, tree: ast.Tree, expressions: []
     }
 }
 
-fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void {
+fn renderExpression(gpa: *Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index, space: Space) Error!void {
     const token_tags = tree.tokens.items(.tag);
     const main_tokens = tree.nodes.items(.main_token);
     const node_tags = tree.nodes.items(.tag);
@@ -220,7 +220,7 @@ fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.I
         .block_two,
         .block_two_semicolon,
         => {
-            const statements = [2]ast.Node.Index{ datas[node].lhs, datas[node].rhs };
+            const statements = [2]Ast.Node.Index{ datas[node].lhs, datas[node].rhs };
             if (datas[node].lhs == 0) {
                 return renderBlock(gpa, ais, tree, node, statements[0..0], space);
             } else if (datas[node].rhs == 0) {
@@ -413,11 +413,11 @@ fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.I
         .ptr_type_bit_range => return renderPtrType(gpa, ais, tree, tree.ptrTypeBitRange(node), space),
 
         .array_init_one, .array_init_one_comma => {
-            var elements: [1]ast.Node.Index = undefined;
+            var elements: [1]Ast.Node.Index = undefined;
             return renderArrayInit(gpa, ais, tree, tree.arrayInitOne(&elements, node), space);
         },
         .array_init_dot_two, .array_init_dot_two_comma => {
-            var elements: [2]ast.Node.Index = undefined;
+            var elements: [2]Ast.Node.Index = undefined;
             return renderArrayInit(gpa, ais, tree, tree.arrayInitDotTwo(&elements, node), space);
         },
         .array_init_dot,
@@ -428,11 +428,11 @@ fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.I
         => return renderArrayInit(gpa, ais, tree, tree.arrayInit(node), space),
 
         .struct_init_one, .struct_init_one_comma => {
-            var fields: [1]ast.Node.Index = undefined;
+            var fields: [1]Ast.Node.Index = undefined;
             return renderStructInit(gpa, ais, tree, node, tree.structInitOne(&fields, node), space);
         },
         .struct_init_dot_two, .struct_init_dot_two_comma => {
-            var fields: [2]ast.Node.Index = undefined;
+            var fields: [2]Ast.Node.Index = undefined;
             return renderStructInit(gpa, ais, tree, node, tree.structInitDotTwo(&fields, node), space);
         },
         .struct_init_dot,
@@ -443,7 +443,7 @@ fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.I
         => return renderStructInit(gpa, ais, tree, node, tree.structInit(node), space),
 
         .call_one, .call_one_comma, .async_call_one, .async_call_one_comma => {
-            var params: [1]ast.Node.Index = undefined;
+            var params: [1]Ast.Node.Index = undefined;
             return renderCall(gpa, ais, tree, tree.callOne(&params, node), space);
         },
 
@@ -536,7 +536,7 @@ fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.I
         => return renderContainerDecl(gpa, ais, tree, node, tree.containerDecl(node), space),
 
         .container_decl_two, .container_decl_two_trailing => {
-            var buffer: [2]ast.Node.Index = undefined;
+            var buffer: [2]Ast.Node.Index = undefined;
             return renderContainerDecl(gpa, ais, tree, node, tree.containerDeclTwo(&buffer, node), space);
         },
         .container_decl_arg,
@@ -548,7 +548,7 @@ fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.I
         => return renderContainerDecl(gpa, ais, tree, node, tree.taggedUnion(node), space),
 
         .tagged_union_two, .tagged_union_two_trailing => {
-            var buffer: [2]ast.Node.Index = undefined;
+            var buffer: [2]Ast.Node.Index = undefined;
             return renderContainerDecl(gpa, ais, tree, node, tree.taggedUnionTwo(&buffer, node), space);
         },
         .tagged_union_enum_tag,
@@ -619,12 +619,12 @@ fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.I
         },
 
         .fn_proto_simple => {
-            var params: [1]ast.Node.Index = undefined;
+            var params: [1]Ast.Node.Index = undefined;
             return renderFnProto(gpa, ais, tree, tree.fnProtoSimple(&params, node), space);
         },
         .fn_proto_multi => return renderFnProto(gpa, ais, tree, tree.fnProtoMulti(node), space),
         .fn_proto_one => {
-            var params: [1]ast.Node.Index = undefined;
+            var params: [1]Ast.Node.Index = undefined;
             return renderFnProto(gpa, ais, tree, tree.fnProtoOne(&params, node), space);
         },
         .fn_proto => return renderFnProto(gpa, ais, tree, tree.fnProto(node), space),
@@ -645,7 +645,7 @@ fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.I
         => {
             const switch_token = main_tokens[node];
             const condition = datas[node].lhs;
-            const extra = tree.extraData(datas[node].rhs, ast.Node.SubRange);
+            const extra = tree.extraData(datas[node].rhs, Ast.Node.SubRange);
             const cases = tree.extra_data[extra.start..extra.end];
             const rparen = tree.lastToken(condition) + 1;
 
@@ -704,8 +704,8 @@ fn renderExpression(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.I
 fn renderArrayType(
     gpa: *Allocator,
     ais: *Ais,
-    tree: ast.Tree,
-    array_type: ast.full.ArrayType,
+    tree: Ast,
+    array_type: Ast.full.ArrayType,
     space: Space,
 ) Error!void {
     const rbracket = tree.firstToken(array_type.ast.elem_type) - 1;
@@ -726,8 +726,8 @@ fn renderArrayType(
 fn renderPtrType(
     gpa: *Allocator,
     ais: *Ais,
-    tree: ast.Tree,
-    ptr_type: ast.full.PtrType,
+    tree: Ast,
+    ptr_type: Ast.full.PtrType,
     space: Space,
 ) Error!void {
     switch (ptr_type.size) {
@@ -811,9 +811,9 @@ fn renderPtrType(
 fn renderSlice(
     gpa: *Allocator,
     ais: *Ais,
-    tree: ast.Tree,
-    slice_node: ast.Node.Index,
-    slice: ast.full.Slice,
+    tree: Ast,
+    slice_node: Ast.Node.Index,
+    slice: Ast.full.Slice,
     space: Space,
 ) Error!void {
     const node_tags = tree.nodes.items(.tag);
@@ -847,8 +847,8 @@ fn renderSlice(
 fn renderAsmOutput(
     gpa: *Allocator,
     ais: *Ais,
-    tree: ast.Tree,
-    asm_output: ast.Node.Index,
+    tree: Ast,
+    asm_output: Ast.Node.Index,
     space: Space,
 ) Error!void {
     const token_tags = tree.tokens.items(.tag);
@@ -877,8 +877,8 @@ fn renderAsmOutput(
 fn renderAsmInput(
     gpa: *Allocator,
     ais: *Ais,
-    tree: ast.Tree,
-    asm_input: ast.Node.Index,
+    tree: Ast,
+    asm_input: Ast.Node.Index,
     space: Space,
 ) Error!void {
     const node_tags = tree.nodes.items(.tag);
@@ -896,7 +896,7 @@ fn renderAsmInput(
     return renderToken(ais, tree, datas[asm_input].rhs, space); // rparen
 }
 
-fn renderVarDecl(gpa: *Allocator, ais: *Ais, tree: ast.Tree, var_decl: ast.full.VarDecl) Error!void {
+fn renderVarDecl(gpa: *Allocator, ais: *Ais, tree: Ast, var_decl: Ast.full.VarDecl) Error!void {
     if (var_decl.visib_token) |visib_token| {
         try renderToken(ais, tree, visib_token, Space.space); // pub
     }
@@ -985,7 +985,7 @@ fn renderVarDecl(gpa: *Allocator, ais: *Ais, tree: ast.Tree, var_decl: ast.full.
     return renderToken(ais, tree, var_decl.ast.mut_token + 2, .newline); // ;
 }
 
-fn renderIf(gpa: *Allocator, ais: *Ais, tree: ast.Tree, if_node: ast.full.If, space: Space) Error!void {
+fn renderIf(gpa: *Allocator, ais: *Ais, tree: Ast, if_node: Ast.full.If, space: Space) Error!void {
     return renderWhile(gpa, ais, tree, .{
         .ast = .{
             .while_token = if_node.ast.if_token,
@@ -1004,7 +1004,7 @@ fn renderIf(gpa: *Allocator, ais: *Ais, tree: ast.Tree, if_node: ast.full.If, sp
 
 /// Note that this function is additionally used to render if and for expressions, with
 /// respective values set to null.
-fn renderWhile(gpa: *Allocator, ais: *Ais, tree: ast.Tree, while_node: ast.full.While, space: Space) Error!void {
+fn renderWhile(gpa: *Allocator, ais: *Ais, tree: Ast, while_node: Ast.full.While, space: Space) Error!void {
     const node_tags = tree.nodes.items(.tag);
     const token_tags = tree.tokens.items(.tag);
 
@@ -1109,8 +1109,8 @@ fn renderWhile(gpa: *Allocator, ais: *Ais, tree: ast.Tree, while_node: ast.full.
 fn renderContainerField(
     gpa: *Allocator,
     ais: *Ais,
-    tree: ast.Tree,
-    field: ast.full.ContainerField,
+    tree: Ast,
+    field: Ast.full.ContainerField,
     space: Space,
 ) Error!void {
     if (field.comptime_token) |t| {
@@ -1183,9 +1183,9 @@ fn renderContainerField(
 fn renderBuiltinCall(
     gpa: *Allocator,
     ais: *Ais,
-    tree: ast.Tree,
-    builtin_token: ast.TokenIndex,
-    params: []const ast.Node.Index,
+    tree: Ast,
+    builtin_token: Ast.TokenIndex,
+    params: []const Ast.Node.Index,
     space: Space,
 ) Error!void {
     const token_tags = tree.tokens.items(.tag);
@@ -1238,7 +1238,7 @@ fn renderBuiltinCall(
     }
 }
 
-fn renderFnProto(gpa: *Allocator, ais: *Ais, tree: ast.Tree, fn_proto: ast.full.FnProto, space: Space) Error!void {
+fn renderFnProto(gpa: *Allocator, ais: *Ais, tree: Ast, fn_proto: Ast.full.FnProto, space: Space) Error!void {
     const token_tags = tree.tokens.items(.tag);
     const token_starts = tree.tokens.items(.start);
 
@@ -1438,8 +1438,8 @@ fn renderFnProto(gpa: *Allocator, ais: *Ais, tree: ast.Tree, fn_proto: ast.full.
 fn renderSwitchCase(
     gpa: *Allocator,
     ais: *Ais,
-    tree: ast.Tree,
-    switch_case: ast.full.SwitchCase,
+    tree: Ast,
+    switch_case: Ast.full.SwitchCase,
     space: Space,
 ) Error!void {
     const node_tags = tree.nodes.items(.tag);
@@ -1491,9 +1491,9 @@ fn renderSwitchCase(
 fn renderBlock(
     gpa: *Allocator,
     ais: *Ais,
-    tree: ast.Tree,
-    block_node: ast.Node.Index,
-    statements: []const ast.Node.Index,
+    tree: Ast,
+    block_node: Ast.Node.Index,
+    statements: []const Ast.Node.Index,
     space: Space,
 ) Error!void {
     const token_tags = tree.tokens.items(.tag);
@@ -1531,9 +1531,9 @@ fn renderBlock(
 fn renderStructInit(
     gpa: *Allocator,
     ais: *Ais,
-    tree: ast.Tree,
-    struct_node: ast.Node.Index,
-    struct_init: ast.full.StructInit,
+    tree: Ast,
+    struct_node: Ast.Node.Index,
+    struct_init: Ast.full.StructInit,
     space: Space,
 ) Error!void {
     const token_tags = tree.tokens.items(.tag);
@@ -1590,8 +1590,8 @@ fn renderStructInit(
 fn renderArrayInit(
     gpa: *Allocator,
     ais: *Ais,
-    tree: ast.Tree,
-    array_init: ast.full.ArrayInit,
+    tree: Ast,
+    array_init: Ast.full.ArrayInit,
     space: Space,
 ) Error!void {
     const token_tags = tree.tokens.items(.tag);
@@ -1787,9 +1787,9 @@ fn renderArrayInit(
 fn renderContainerDecl(
     gpa: *Allocator,
     ais: *Ais,
-    tree: ast.Tree,
-    container_decl_node: ast.Node.Index,
-    container_decl: ast.full.ContainerDecl,
+    tree: Ast,
+    container_decl_node: Ast.Node.Index,
+    container_decl: Ast.full.ContainerDecl,
     space: Space,
 ) Error!void {
     const token_tags = tree.tokens.items(.tag);
@@ -1799,7 +1799,7 @@ fn renderContainerDecl(
         try renderToken(ais, tree, layout_token, .space);
     }
 
-    var lbrace: ast.TokenIndex = undefined;
+    var lbrace: Ast.TokenIndex = undefined;
     if (container_decl.ast.enum_token) |enum_token| {
         try renderToken(ais, tree, container_decl.ast.main_token, .none); // union
         try renderToken(ais, tree, enum_token - 1, .none); // lparen
@@ -1869,8 +1869,8 @@ fn renderContainerDecl(
 fn renderAsm(
     gpa: *Allocator,
     ais: *Ais,
-    tree: ast.Tree,
-    asm_node: ast.full.Asm,
+    tree: Ast,
+    asm_node: Ast.full.Asm,
     space: Space,
 ) Error!void {
     const token_tags = tree.tokens.items(.tag);
@@ -2018,8 +2018,8 @@ fn renderAsm(
 fn renderCall(
     gpa: *Allocator,
     ais: *Ais,
-    tree: ast.Tree,
-    call: ast.full.Call,
+    tree: Ast,
+    call: Ast.full.Call,
     space: Space,
 ) Error!void {
     const token_tags = tree.tokens.items(.tag);
@@ -2091,7 +2091,7 @@ fn renderCall(
 
 /// Renders the given expression indented, popping the indent before rendering
 /// any following line comments
-fn renderExpressionIndented(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void {
+fn renderExpressionIndented(gpa: *Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index, space: Space) Error!void {
     const token_starts = tree.tokens.items(.start);
     const token_tags = tree.tokens.items(.tag);
 
@@ -2148,7 +2148,7 @@ fn renderExpressionIndented(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: as
 }
 
 /// Render an expression, and the comma that follows it, if it is present in the source.
-fn renderExpressionComma(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.Node.Index, space: Space) Error!void {
+fn renderExpressionComma(gpa: *Allocator, ais: *Ais, tree: Ast, node: Ast.Node.Index, space: Space) Error!void {
     const token_tags = tree.tokens.items(.tag);
     const maybe_comma = tree.lastToken(node) + 1;
     if (token_tags[maybe_comma] == .comma) {
@@ -2159,7 +2159,7 @@ fn renderExpressionComma(gpa: *Allocator, ais: *Ais, tree: ast.Tree, node: ast.N
     }
 }
 
-fn renderTokenComma(ais: *Ais, tree: ast.Tree, token: ast.TokenIndex, space: Space) Error!void {
+fn renderTokenComma(ais: *Ais, tree: Ast, token: Ast.TokenIndex, space: Space) Error!void {
     const token_tags = tree.tokens.items(.tag);
     const maybe_comma = token + 1;
     if (token_tags[maybe_comma] == .comma) {
@@ -2191,7 +2191,7 @@ const Space = enum {
     skip,
 };
 
-fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Space) Error!void {
+fn renderToken(ais: *Ais, tree: Ast, token_index: Ast.TokenIndex, space: Space) Error!void {
     const token_tags = tree.tokens.items(.tag);
     const token_starts = tree.tokens.items(.start);
 
@@ -2238,7 +2238,7 @@ fn renderToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex, space: Sp
 /// `start_token` to `end_token`. This is used to determine if e.g. a
 /// fn_proto should be wrapped and have a trailing comma inserted even if
 /// there is none in the source.
-fn hasComment(tree: ast.Tree, start_token: ast.TokenIndex, end_token: ast.TokenIndex) bool {
+fn hasComment(tree: Ast, start_token: Ast.TokenIndex, end_token: Ast.TokenIndex) bool {
     const token_starts = tree.tokens.items(.start);
 
     var i = start_token;
@@ -2253,7 +2253,7 @@ fn hasComment(tree: ast.Tree, start_token: ast.TokenIndex, end_token: ast.TokenI
 
 /// Returns true if there exists a multiline string literal between the start
 /// of token `start_token` and the start of token `end_token`.
-fn hasMultilineString(tree: ast.Tree, start_token: ast.TokenIndex, end_token: ast.TokenIndex) bool {
+fn hasMultilineString(tree: Ast, start_token: Ast.TokenIndex, end_token: Ast.TokenIndex) bool {
     const token_tags = tree.tokens.items(.tag);
 
     for (token_tags[start_token..end_token]) |tag| {
@@ -2268,7 +2268,7 @@ fn hasMultilineString(tree: ast.Tree, start_token: ast.TokenIndex, end_token: as
 
 /// Assumes that start is the first byte past the previous token and
 /// that end is the last byte before the next token.
-fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize) Error!bool {
+fn renderComments(ais: *Ais, tree: Ast, start: usize, end: usize) Error!bool {
     var index: usize = start;
     while (mem.indexOf(u8, tree.source[index..end], "//")) |offset| {
         const comment_start = index + offset;
@@ -2325,12 +2325,12 @@ fn renderComments(ais: *Ais, tree: ast.Tree, start: usize, end: usize) Error!boo
     return index != start;
 }
 
-fn renderExtraNewline(ais: *Ais, tree: ast.Tree, node: ast.Node.Index) Error!void {
+fn renderExtraNewline(ais: *Ais, tree: Ast, node: Ast.Node.Index) Error!void {
     return renderExtraNewlineToken(ais, tree, tree.firstToken(node));
 }
 
 /// Check if there is an empty line immediately before the given token. If so, render it.
-fn renderExtraNewlineToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenIndex) Error!void {
+fn renderExtraNewlineToken(ais: *Ais, tree: Ast, token_index: Ast.TokenIndex) Error!void {
     const token_starts = tree.tokens.items(.start);
     const token_start = token_starts[token_index];
     if (token_start == 0) return;
@@ -2355,7 +2355,7 @@ fn renderExtraNewlineToken(ais: *Ais, tree: ast.Tree, token_index: ast.TokenInde
 
 /// end_token is the token one past the last doc comment token. This function
 /// searches backwards from there.
-fn renderDocComments(ais: *Ais, tree: ast.Tree, end_token: ast.TokenIndex) Error!void {
+fn renderDocComments(ais: *Ais, tree: Ast, end_token: Ast.TokenIndex) Error!void {
     // Search backwards for the first doc comment.
     const token_tags = tree.tokens.items(.tag);
     if (end_token == 0) return;
@@ -2376,7 +2376,7 @@ fn renderDocComments(ais: *Ais, tree: ast.Tree, end_token: ast.TokenIndex) Error
 }
 
 /// start_token is first container doc comment token.
-fn renderContainerDocComments(ais: *Ais, tree: ast.Tree, start_token: ast.TokenIndex) Error!void {
+fn renderContainerDocComments(ais: *Ais, tree: Ast, start_token: Ast.TokenIndex) Error!void {
     const token_tags = tree.tokens.items(.tag);
     var tok = start_token;
     while (token_tags[tok] == .container_doc_comment) : (tok += 1) {
@@ -2390,7 +2390,7 @@ fn renderContainerDocComments(ais: *Ais, tree: ast.Tree, start_token: ast.TokenI
     }
 }
 
-fn tokenSliceForRender(tree: ast.Tree, token_index: ast.TokenIndex) []const u8 {
+fn tokenSliceForRender(tree: Ast, token_index: Ast.TokenIndex) []const u8 {
     var ret = tree.tokenSlice(token_index);
     if (tree.tokens.items(.tag)[token_index] == .multiline_string_literal_line) {
         assert(ret[ret.len - 1] == '\n');
@@ -2399,7 +2399,7 @@ fn tokenSliceForRender(tree: ast.Tree, token_index: ast.TokenIndex) []const u8 {
     return ret;
 }
 
-fn hasSameLineComment(tree: ast.Tree, token_index: ast.TokenIndex) bool {
+fn hasSameLineComment(tree: Ast, token_index: Ast.TokenIndex) bool {
     const token_starts = tree.tokens.items(.start);
     const between_source = tree.source[token_starts[token_index]..token_starts[token_index + 1]];
     for (between_source) |byte| switch (byte) {
@@ -2412,7 +2412,7 @@ fn hasSameLineComment(tree: ast.Tree, token_index: ast.TokenIndex) bool {
 
 /// Returns `true` if and only if there are any tokens or line comments between
 /// start_token and end_token.
-fn anythingBetween(tree: ast.Tree, start_token: ast.TokenIndex, end_token: ast.TokenIndex) bool {
+fn anythingBetween(tree: Ast, start_token: Ast.TokenIndex, end_token: Ast.TokenIndex) bool {
     if (start_token + 1 != end_token) return true;
     const token_starts = tree.tokens.items(.start);
     const between_source = tree.source[token_starts[start_token]..token_starts[start_token + 1]];
@@ -2431,7 +2431,7 @@ fn writeFixingWhitespace(writer: std.ArrayList(u8).Writer, slice: []const u8) Er
     };
 }
 
-fn nodeIsBlock(tag: ast.Node.Tag) bool {
+fn nodeIsBlock(tag: Ast.Node.Tag) bool {
     return switch (tag) {
         .block,
         .block_semicolon,
@@ -2450,7 +2450,7 @@ fn nodeIsBlock(tag: ast.Node.Tag) bool {
     };
 }
 
-fn nodeIsIfForWhileSwitch(tag: ast.Node.Tag) bool {
+fn nodeIsIfForWhileSwitch(tag: Ast.Node.Tag) bool {
     return switch (tag) {
         .@"if",
         .if_simple,
@@ -2466,7 +2466,7 @@ fn nodeIsIfForWhileSwitch(tag: ast.Node.Tag) bool {
     };
 }
 
-fn nodeCausesSliceOpSpace(tag: ast.Node.Tag) bool {
+fn nodeCausesSliceOpSpace(tag: Ast.Node.Tag) bool {
     return switch (tag) {
         .@"catch",
         .add,
@@ -2516,7 +2516,7 @@ fn nodeCausesSliceOpSpace(tag: ast.Node.Tag) bool {
 }
 
 // Returns the number of nodes in `expr` that are on the same line as `rtoken`.
-fn rowSize(tree: ast.Tree, exprs: []const ast.Node.Index, rtoken: ast.TokenIndex) usize {
+fn rowSize(tree: Ast, exprs: []const Ast.Node.Index, rtoken: Ast.TokenIndex) usize {
     const token_tags = tree.tokens.items(.tag);
 
     const first_token = tree.firstToken(exprs[0]);
lib/std/zig.zig
@@ -10,7 +10,7 @@ pub const fmtEscapes = fmt.fmtEscapes;
 pub const isValidId = fmt.isValidId;
 pub const parse = @import("zig/parse.zig").parse;
 pub const string_literal = @import("zig/string_literal.zig");
-pub const ast = @import("zig/ast.zig");
+pub const Ast = @import("zig/Ast.zig");
 pub const system = @import("zig/system.zig");
 pub const CrossTarget = @import("zig/cross_target.zig").CrossTarget;
 
src/translate_c/ast.zig
@@ -714,9 +714,9 @@ pub const Payload = struct {
     };
 };
 
-/// Converts the nodes into a Zig ast.
+/// Converts the nodes into a Zig Ast.
 /// Caller must free the source slice.
-pub fn render(gpa: *Allocator, nodes: []const Node) !std.zig.ast.Tree {
+pub fn render(gpa: *Allocator, nodes: []const Node) !std.zig.Ast {
     var ctx = Context{
         .gpa = gpa,
         .buf = std.ArrayList(u8).init(gpa),
@@ -767,7 +767,7 @@ pub fn render(gpa: *Allocator, nodes: []const Node) !std.zig.ast.Tree {
         .start = @intCast(u32, ctx.buf.items.len),
     });
 
-    return std.zig.ast.Tree{
+    return std.zig.Ast{
         .source = try ctx.buf.toOwnedSliceSentinel(0),
         .tokens = ctx.tokens.toOwnedSlice(),
         .nodes = ctx.nodes.toOwnedSlice(),
@@ -776,17 +776,17 @@ pub fn render(gpa: *Allocator, nodes: []const Node) !std.zig.ast.Tree {
     };
 }
 
-const NodeIndex = std.zig.ast.Node.Index;
-const NodeSubRange = std.zig.ast.Node.SubRange;
-const TokenIndex = std.zig.ast.TokenIndex;
+const NodeIndex = std.zig.Ast.Node.Index;
+const NodeSubRange = std.zig.Ast.Node.SubRange;
+const TokenIndex = std.zig.Ast.TokenIndex;
 const TokenTag = std.zig.Token.Tag;
 
 const Context = struct {
     gpa: *Allocator,
     buf: std.ArrayList(u8) = .{},
-    nodes: std.zig.ast.NodeList = .{},
-    extra_data: std.ArrayListUnmanaged(std.zig.ast.Node.Index) = .{},
-    tokens: std.zig.ast.TokenList = .{},
+    nodes: std.zig.Ast.NodeList = .{},
+    extra_data: std.ArrayListUnmanaged(std.zig.Ast.Node.Index) = .{},
+    tokens: std.zig.Ast.TokenList = .{},
 
     fn addTokenFmt(c: *Context, tag: TokenTag, comptime format: []const u8, args: anytype) Allocator.Error!TokenIndex {
         const start_index = c.buf.items.len;
@@ -831,7 +831,7 @@ const Context = struct {
         };
     }
 
-    fn addNode(c: *Context, elem: std.zig.ast.NodeList.Elem) Allocator.Error!NodeIndex {
+    fn addNode(c: *Context, elem: std.zig.Ast.NodeList.Elem) Allocator.Error!NodeIndex {
         const result = @intCast(NodeIndex, c.nodes.len);
         try c.nodes.append(c.gpa, elem);
         return result;
@@ -1166,7 +1166,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
                 .main_token = l_bracket,
                 .data = .{
                     .lhs = string,
-                    .rhs = try c.addExtra(std.zig.ast.Node.Slice{
+                    .rhs = try c.addExtra(std.zig.Ast.Node.Slice{
                         .start = start,
                         .end = end,
                     }),
@@ -1601,7 +1601,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
                     .main_token = while_tok,
                     .data = .{
                         .lhs = cond,
-                        .rhs = try c.addExtra(std.zig.ast.Node.WhileCont{
+                        .rhs = try c.addExtra(std.zig.Ast.Node.WhileCont{
                             .cont_expr = cont_expr,
                             .then_expr = body,
                         }),
@@ -1654,7 +1654,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
                 .main_token = if_tok,
                 .data = .{
                     .lhs = cond,
-                    .rhs = try c.addExtra(std.zig.ast.Node.If{
+                    .rhs = try c.addExtra(std.zig.Ast.Node.If{
                         .then_expr = then_expr,
                         .else_expr = else_expr,
                     }),
@@ -2175,7 +2175,7 @@ fn renderNullSentinelArrayType(c: *Context, len: usize, elem_type: Node) !NodeIn
         .main_token = l_bracket,
         .data = .{
             .lhs = len_expr,
-            .rhs = try c.addExtra(std.zig.ast.Node.ArrayTypeSentinel{
+            .rhs = try c.addExtra(std.zig.Ast.Node.ArrayTypeSentinel{
                 .sentinel = sentinel_expr,
                 .elem_type = elem_type_expr,
             }),
@@ -2378,7 +2378,7 @@ fn renderNodeGrouped(c: *Context, node: Node) !NodeIndex {
     }
 }
 
-fn renderPrefixOp(c: *Context, node: Node, tag: std.zig.ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex {
+fn renderPrefixOp(c: *Context, node: Node, tag: std.zig.Ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex {
     const payload = @fieldParentPtr(Payload.UnOp, "base", node.ptr_otherwise).data;
     return c.addNode(.{
         .tag = tag,
@@ -2390,7 +2390,7 @@ fn renderPrefixOp(c: *Context, node: Node, tag: std.zig.ast.Node.Tag, tok_tag: T
     });
 }
 
-fn renderBinOpGrouped(c: *Context, node: Node, tag: std.zig.ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex {
+fn renderBinOpGrouped(c: *Context, node: Node, tag: std.zig.Ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex {
     const payload = @fieldParentPtr(Payload.BinOp, "base", node.ptr_otherwise).data;
     const lhs = try renderNodeGrouped(c, payload.lhs);
     return c.addNode(.{
@@ -2403,7 +2403,7 @@ fn renderBinOpGrouped(c: *Context, node: Node, tag: std.zig.ast.Node.Tag, tok_ta
     });
 }
 
-fn renderBinOp(c: *Context, node: Node, tag: std.zig.ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex {
+fn renderBinOp(c: *Context, node: Node, tag: std.zig.Ast.Node.Tag, tok_tag: TokenTag, bytes: []const u8) !NodeIndex {
     const payload = @fieldParentPtr(Payload.BinOp, "base", node.ptr_otherwise).data;
     const lhs = try renderNode(c, payload.lhs);
     return c.addNode(.{
@@ -2604,7 +2604,7 @@ fn renderVar(c: *Context, node: Node) !NodeIndex {
                 .tag = .local_var_decl,
                 .main_token = mut_tok,
                 .data = .{
-                    .lhs = try c.addExtra(std.zig.ast.Node.LocalVarDecl{
+                    .lhs = try c.addExtra(std.zig.Ast.Node.LocalVarDecl{
                         .type_node = type_node,
                         .align_node = align_node,
                     }),
@@ -2617,7 +2617,7 @@ fn renderVar(c: *Context, node: Node) !NodeIndex {
             .tag = .global_var_decl,
             .main_token = mut_tok,
             .data = .{
-                .lhs = try c.addExtra(std.zig.ast.Node.GlobalVarDecl{
+                .lhs = try c.addExtra(std.zig.Ast.Node.GlobalVarDecl{
                     .type_node = type_node,
                     .align_node = align_node,
                     .section_node = section_node,
@@ -2709,7 +2709,7 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex {
                 .tag = .fn_proto_one,
                 .main_token = fn_token,
                 .data = .{
-                    .lhs = try c.addExtra(std.zig.ast.Node.FnProtoOne{
+                    .lhs = try c.addExtra(std.zig.Ast.Node.FnProtoOne{
                         .param = params.items[0],
                         .align_expr = align_expr,
                         .section_expr = section_expr,
@@ -2723,7 +2723,7 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex {
                 .tag = .fn_proto,
                 .main_token = fn_token,
                 .data = .{
-                    .lhs = try c.addExtra(std.zig.ast.Node.FnProto{
+                    .lhs = try c.addExtra(std.zig.Ast.Node.FnProto{
                         .params_start = span.start,
                         .params_end = span.end,
                         .align_expr = align_expr,
@@ -2781,7 +2781,7 @@ fn renderMacroFunc(c: *Context, node: Node) !NodeIndex {
                 .tag = .fn_proto_multi,
                 .main_token = fn_token,
                 .data = .{
-                    .lhs = try c.addExtra(std.zig.ast.Node.SubRange{
+                    .lhs = try c.addExtra(std.zig.Ast.Node.SubRange{
                         .start = span.start,
                         .end = span.end,
                     }),
src/AstGen.zig
@@ -2,7 +2,7 @@
 const AstGen = @This();
 
 const std = @import("std");
-const ast = std.zig.ast;
+const Ast = std.zig.Ast;
 const mem = std.mem;
 const Allocator = std.mem.Allocator;
 const assert = std.debug.assert;
@@ -13,7 +13,7 @@ const trace = @import("tracy.zig").trace;
 const BuiltinFn = @import("BuiltinFn.zig");
 
 gpa: *Allocator,
-tree: *const ast.Tree,
+tree: *const Ast,
 instructions: std.MultiArrayList(Zir.Inst) = .{},
 extra: ArrayListUnmanaged(u32) = .{},
 string_bytes: ArrayListUnmanaged(u8) = .{},
@@ -36,7 +36,7 @@ compile_errors: ArrayListUnmanaged(Zir.Inst.CompileErrors.Item) = .{},
 fn_block: ?*GenZir = null,
 /// Maps string table indexes to the first `@import` ZIR instruction
 /// that uses this string as the operand.
-imports: std.AutoArrayHashMapUnmanaged(u32, ast.TokenIndex) = .{},
+imports: std.AutoArrayHashMapUnmanaged(u32, Ast.TokenIndex) = .{},
 
 const InnerError = error{ OutOfMemory, AnalysisFail };
 
@@ -70,7 +70,7 @@ fn appendRefsAssumeCapacity(astgen: *AstGen, refs: []const Zir.Inst.Ref) void {
     astgen.extra.appendSliceAssumeCapacity(coerced);
 }
 
-pub fn generate(gpa: *Allocator, tree: ast.Tree) Allocator.Error!Zir {
+pub fn generate(gpa: *Allocator, tree: Ast) Allocator.Error!Zir {
     var arena = std.heap.ArenaAllocator.init(gpa);
     defer arena.deinit();
 
@@ -106,7 +106,7 @@ pub fn generate(gpa: *Allocator, tree: ast.Tree) Allocator.Error!Zir {
     };
     defer gen_scope.instructions.deinit(gpa);
 
-    const container_decl: ast.full.ContainerDecl = .{
+    const container_decl: Ast.full.ContainerDecl = .{
         .layout_token = null,
         .ast = .{
             .main_token = undefined,
@@ -265,7 +265,7 @@ pub const bool_rl: ResultLoc = .{ .ty = .bool_type };
 pub const type_rl: ResultLoc = .{ .ty = .type_type };
 pub const coerced_type_rl: ResultLoc = .{ .coerced_ty = .type_type };
 
-fn typeExpr(gz: *GenZir, scope: *Scope, type_node: ast.Node.Index) InnerError!Zir.Inst.Ref {
+fn typeExpr(gz: *GenZir, scope: *Scope, type_node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
     const prev_force_comptime = gz.force_comptime;
     gz.force_comptime = true;
     defer gz.force_comptime = prev_force_comptime;
@@ -278,8 +278,8 @@ fn reachableExpr(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    src_node: ast.Node.Index,
+    node: Ast.Node.Index,
+    src_node: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const result_inst = try expr(gz, scope, rl, node);
     if (gz.refIsNoReturn(result_inst)) {
@@ -290,7 +290,7 @@ fn reachableExpr(
     return result_inst;
 }
 
-fn lvalExpr(gz: *GenZir, scope: *Scope, node: ast.Node.Index) InnerError!Zir.Inst.Ref {
+fn lvalExpr(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
     const node_tags = tree.nodes.items(.tag);
@@ -481,7 +481,7 @@ fn lvalExpr(gz: *GenZir, scope: *Scope, node: ast.Node.Index) InnerError!Zir.Ins
 /// When `rl` is discard, ptr, inferred_ptr, or inferred_ptr, the
 /// result instruction can be used to inspect whether it is isNoReturn() but that is it,
 /// it must otherwise not be used.
-fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!Zir.Inst.Ref {
+fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
     const main_tokens = tree.nodes.items(.main_token);
@@ -640,13 +640,13 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerEr
 
         .builtin_call_two, .builtin_call_two_comma => {
             if (node_datas[node].lhs == 0) {
-                const params = [_]ast.Node.Index{};
+                const params = [_]Ast.Node.Index{};
                 return builtinCall(gz, scope, rl, node, &params);
             } else if (node_datas[node].rhs == 0) {
-                const params = [_]ast.Node.Index{node_datas[node].lhs};
+                const params = [_]Ast.Node.Index{node_datas[node].lhs};
                 return builtinCall(gz, scope, rl, node, &params);
             } else {
-                const params = [_]ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs };
+                const params = [_]Ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs };
                 return builtinCall(gz, scope, rl, node, &params);
             }
         },
@@ -656,7 +656,7 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerEr
         },
 
         .call_one, .call_one_comma, .async_call_one, .async_call_one_comma => {
-            var params: [1]ast.Node.Index = undefined;
+            var params: [1]Ast.Node.Index = undefined;
             return callExpr(gz, scope, rl, node, tree.callOne(&params, node));
         },
         .call, .call_comma, .async_call, .async_call_comma => {
@@ -704,7 +704,7 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerEr
         },
         .slice => {
             const lhs = try expr(gz, scope, .ref, node_datas[node].lhs);
-            const extra = tree.extraData(node_datas[node].rhs, ast.Node.Slice);
+            const extra = tree.extraData(node_datas[node].rhs, Ast.Node.Slice);
             const start = try expr(gz, scope, .{ .ty = .usize_type }, extra.start);
             const end = try expr(gz, scope, .{ .ty = .usize_type }, extra.end);
             const result = try gz.addPlNode(.slice_end, node, Zir.Inst.SliceEnd{
@@ -722,7 +722,7 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerEr
         },
         .slice_sentinel => {
             const lhs = try expr(gz, scope, .ref, node_datas[node].lhs);
-            const extra = tree.extraData(node_datas[node].rhs, ast.Node.SliceSentinel);
+            const extra = tree.extraData(node_datas[node].rhs, Ast.Node.SliceSentinel);
             const start = try expr(gz, scope, .{ .ty = .usize_type }, extra.start);
             const end = if (extra.end != 0) try expr(gz, scope, .{ .ty = .usize_type }, extra.end) else .none;
             const sentinel = try expr(gz, scope, .{ .ty = .usize_type }, extra.sentinel);
@@ -773,7 +773,7 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerEr
             ), node),
         },
         .block_two, .block_two_semicolon => {
-            const statements = [2]ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs };
+            const statements = [2]Ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs };
             if (node_datas[node].lhs == 0) {
                 return blockExpr(gz, scope, rl, node, statements[0..0]);
             } else if (node_datas[node].rhs == 0) {
@@ -796,7 +796,7 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerEr
         },
         .@"catch" => {
             const catch_token = main_tokens[node];
-            const payload_token: ?ast.TokenIndex = if (token_tags[catch_token + 1] == .pipe)
+            const payload_token: ?Ast.TokenIndex = if (token_tags[catch_token + 1] == .pipe)
                 catch_token + 2
             else
                 null;
@@ -863,7 +863,7 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerEr
         .container_decl_trailing,
         => return containerDecl(gz, scope, rl, node, tree.containerDecl(node)),
         .container_decl_two, .container_decl_two_trailing => {
-            var buffer: [2]ast.Node.Index = undefined;
+            var buffer: [2]Ast.Node.Index = undefined;
             return containerDecl(gz, scope, rl, node, tree.containerDeclTwo(&buffer, node));
         },
         .container_decl_arg,
@@ -874,7 +874,7 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerEr
         .tagged_union_trailing,
         => return containerDecl(gz, scope, rl, node, tree.taggedUnion(node)),
         .tagged_union_two, .tagged_union_two_trailing => {
-            var buffer: [2]ast.Node.Index = undefined;
+            var buffer: [2]Ast.Node.Index = undefined;
             return containerDecl(gz, scope, rl, node, tree.taggedUnionTwo(&buffer, node));
         },
         .tagged_union_enum_tag,
@@ -900,11 +900,11 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerEr
         .@"try" => return tryExpr(gz, scope, rl, node, node_datas[node].lhs),
 
         .array_init_one, .array_init_one_comma => {
-            var elements: [1]ast.Node.Index = undefined;
+            var elements: [1]Ast.Node.Index = undefined;
             return arrayInitExpr(gz, scope, rl, node, tree.arrayInitOne(&elements, node));
         },
         .array_init_dot_two, .array_init_dot_two_comma => {
-            var elements: [2]ast.Node.Index = undefined;
+            var elements: [2]Ast.Node.Index = undefined;
             return arrayInitExpr(gz, scope, rl, node, tree.arrayInitDotTwo(&elements, node));
         },
         .array_init_dot,
@@ -915,11 +915,11 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerEr
         => return arrayInitExpr(gz, scope, rl, node, tree.arrayInit(node)),
 
         .struct_init_one, .struct_init_one_comma => {
-            var fields: [1]ast.Node.Index = undefined;
+            var fields: [1]Ast.Node.Index = undefined;
             return structInitExpr(gz, scope, rl, node, tree.structInitOne(&fields, node));
         },
         .struct_init_dot_two, .struct_init_dot_two_comma => {
-            var fields: [2]ast.Node.Index = undefined;
+            var fields: [2]Ast.Node.Index = undefined;
             return structInitExpr(gz, scope, rl, node, tree.structInitDotTwo(&fields, node));
         },
         .struct_init_dot,
@@ -930,14 +930,14 @@ fn expr(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerEr
         => return structInitExpr(gz, scope, rl, node, tree.structInit(node)),
 
         .fn_proto_simple => {
-            var params: [1]ast.Node.Index = undefined;
+            var params: [1]Ast.Node.Index = undefined;
             return fnProtoExpr(gz, scope, rl, tree.fnProtoSimple(&params, node));
         },
         .fn_proto_multi => {
             return fnProtoExpr(gz, scope, rl, tree.fnProtoMulti(node));
         },
         .fn_proto_one => {
-            var params: [1]ast.Node.Index = undefined;
+            var params: [1]Ast.Node.Index = undefined;
             return fnProtoExpr(gz, scope, rl, tree.fnProtoOne(&params, node));
         },
         .fn_proto => {
@@ -950,7 +950,7 @@ fn nosuspendExpr(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
@@ -971,7 +971,7 @@ fn nosuspendExpr(
 fn suspendExpr(
     gz: *GenZir,
     scope: *Scope,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const gpa = astgen.gpa;
@@ -1011,7 +1011,7 @@ fn awaitExpr(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
@@ -1033,7 +1033,7 @@ fn resumeExpr(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
@@ -1048,7 +1048,7 @@ fn fnProtoExpr(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    fn_proto: ast.full.FnProto,
+    fn_proto: Ast.full.FnProto,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const gpa = astgen.gpa;
@@ -1159,8 +1159,8 @@ fn arrayInitExpr(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    array_init: ast.full.ArrayInit,
+    node: Ast.Node.Index,
+    array_init: Ast.full.ArrayInit,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
@@ -1179,7 +1179,7 @@ fn arrayInitExpr(
         };
 
         infer: {
-            const array_type: ast.full.ArrayType = switch (node_tags[array_init.ast.type_expr]) {
+            const array_type: Ast.full.ArrayType = switch (node_tags[array_init.ast.type_expr]) {
                 .array_type => tree.arrayType(array_init.ast.type_expr),
                 .array_type_sentinel => tree.arrayTypeSentinel(array_init.ast.type_expr),
                 else => break :infer,
@@ -1256,8 +1256,8 @@ fn arrayInitExpr(
 fn arrayInitExprRlNone(
     gz: *GenZir,
     scope: *Scope,
-    node: ast.Node.Index,
-    elements: []const ast.Node.Index,
+    node: Ast.Node.Index,
+    elements: []const Ast.Node.Index,
     tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
@@ -1278,8 +1278,8 @@ fn arrayInitExprRlNone(
 fn arrayInitExprRlTy(
     gz: *GenZir,
     scope: *Scope,
-    node: ast.Node.Index,
-    elements: []const ast.Node.Index,
+    node: Ast.Node.Index,
+    elements: []const Ast.Node.Index,
     elem_ty_inst: Zir.Inst.Ref,
     tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
@@ -1304,8 +1304,8 @@ fn arrayInitExprRlTy(
 fn arrayInitExprRlPtr(
     gz: *GenZir,
     scope: *Scope,
-    node: ast.Node.Index,
-    elements: []const ast.Node.Index,
+    node: Ast.Node.Index,
+    elements: []const Ast.Node.Index,
     result_ptr: Zir.Inst.Ref,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
@@ -1334,8 +1334,8 @@ fn structInitExpr(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    struct_init: ast.full.StructInit,
+    node: Ast.Node.Index,
+    struct_init: Ast.full.StructInit,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
@@ -1347,7 +1347,7 @@ fn structInitExpr(
     } else array: {
         const node_tags = tree.nodes.items(.tag);
         const main_tokens = tree.nodes.items(.main_token);
-        const array_type: ast.full.ArrayType = switch (node_tags[struct_init.ast.type_expr]) {
+        const array_type: Ast.full.ArrayType = switch (node_tags[struct_init.ast.type_expr]) {
             .array_type => tree.arrayType(struct_init.ast.type_expr),
             .array_type_sentinel => tree.arrayTypeSentinel(struct_init.ast.type_expr),
             else => break :array,
@@ -1420,8 +1420,8 @@ fn structInitExpr(
 fn structInitExprRlNone(
     gz: *GenZir,
     scope: *Scope,
-    node: ast.Node.Index,
-    struct_init: ast.full.StructInit,
+    node: Ast.Node.Index,
+    struct_init: Ast.full.StructInit,
     tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
@@ -1454,8 +1454,8 @@ fn structInitExprRlNone(
 fn structInitExprRlPtr(
     gz: *GenZir,
     scope: *Scope,
-    node: ast.Node.Index,
-    struct_init: ast.full.StructInit,
+    node: Ast.Node.Index,
+    struct_init: Ast.full.StructInit,
     result_ptr: Zir.Inst.Ref,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
@@ -1488,8 +1488,8 @@ fn structInitExprRlPtr(
 fn structInitExprRlTy(
     gz: *GenZir,
     scope: *Scope,
-    node: ast.Node.Index,
-    struct_init: ast.full.StructInit,
+    node: Ast.Node.Index,
+    struct_init: Ast.full.StructInit,
     ty_inst: Zir.Inst.Ref,
     tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
@@ -1530,7 +1530,7 @@ fn comptimeExpr(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const prev_force_comptime = gz.force_comptime;
     gz.force_comptime = true;
@@ -1546,7 +1546,7 @@ fn comptimeExprAst(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     if (gz.force_comptime) {
@@ -1561,7 +1561,7 @@ fn comptimeExprAst(
     return result;
 }
 
-fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: ast.Node.Index) InnerError!Zir.Inst.Ref {
+fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
     const astgen = parent_gz.astgen;
     const tree = astgen.tree;
     const node_datas = tree.nodes.items(.data);
@@ -1636,7 +1636,7 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: ast.Node.Index) Inn
     }
 }
 
-fn continueExpr(parent_gz: *GenZir, parent_scope: *Scope, node: ast.Node.Index) InnerError!Zir.Inst.Ref {
+fn continueExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
     const astgen = parent_gz.astgen;
     const tree = astgen.tree;
     const node_datas = tree.nodes.items(.data);
@@ -1694,8 +1694,8 @@ fn blockExpr(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    block_node: ast.Node.Index,
-    statements: []const ast.Node.Index,
+    block_node: Ast.Node.Index,
+    statements: []const Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const tracy = trace(@src());
     defer tracy.end();
@@ -1716,7 +1716,7 @@ fn blockExpr(
     return rvalue(gz, rl, .void_value, block_node);
 }
 
-fn checkLabelRedefinition(astgen: *AstGen, parent_scope: *Scope, label: ast.TokenIndex) !void {
+fn checkLabelRedefinition(astgen: *AstGen, parent_scope: *Scope, label: Ast.TokenIndex) !void {
     // Look for the label in the scope.
     var scope = parent_scope;
     while (true) {
@@ -1752,8 +1752,8 @@ fn labeledBlockExpr(
     gz: *GenZir,
     parent_scope: *Scope,
     rl: ResultLoc,
-    block_node: ast.Node.Index,
-    statements: []const ast.Node.Index,
+    block_node: Ast.Node.Index,
+    statements: []const Ast.Node.Index,
     zir_tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
     const tracy = trace(@src());
@@ -1829,7 +1829,7 @@ fn labeledBlockExpr(
     }
 }
 
-fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const ast.Node.Index) !void {
+fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const Ast.Node.Index) !void {
     const astgen = gz.astgen;
     const tree = astgen.tree;
     const node_tags = tree.nodes.items(.tag);
@@ -1837,7 +1837,7 @@ fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const ast.Nod
     var block_arena = std.heap.ArenaAllocator.init(gz.astgen.gpa);
     defer block_arena.deinit();
 
-    var noreturn_src_node: ast.Node.Index = 0;
+    var noreturn_src_node: Ast.Node.Index = 0;
     var scope = parent_scope;
     for (statements) |statement| {
         if (noreturn_src_node != 0) {
@@ -1892,12 +1892,12 @@ fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const ast.Nod
 
 /// Returns AST source node of the thing that is noreturn if the statement is definitely `noreturn`.
 /// Otherwise returns 0.
-fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: ast.Node.Index) InnerError!ast.Node.Index {
+fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: Ast.Node.Index) InnerError!Ast.Node.Index {
     try emitDbgNode(gz, statement);
     // We need to emit an error if the result is not `noreturn` or `void`, but
     // we want to avoid adding the ZIR instruction if possible for performance.
     const maybe_unused_result = try expr(gz, scope, .none, statement);
-    var noreturn_src_node: ast.Node.Index = 0;
+    var noreturn_src_node: Ast.Node.Index = 0;
     const elide_check = if (refToIndex(maybe_unused_result)) |inst| b: {
         // Note that this array becomes invalid after appending more items to it
         // in the above while loop.
@@ -2344,7 +2344,7 @@ fn checkUsed(
 
 fn makeDeferScope(
     scope: *Scope,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
     block_arena: *Allocator,
     scope_tag: Scope.Tag,
 ) InnerError!*Scope {
@@ -2360,9 +2360,9 @@ fn makeDeferScope(
 fn varDecl(
     gz: *GenZir,
     scope: *Scope,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
     block_arena: *Allocator,
-    var_decl: ast.full.VarDecl,
+    var_decl: Ast.full.VarDecl,
 ) InnerError!*Scope {
     try emitDbgNode(gz, node);
     const astgen = gz.astgen;
@@ -2574,7 +2574,7 @@ fn varDecl(
     }
 }
 
-fn emitDbgNode(gz: *GenZir, node: ast.Node.Index) !void {
+fn emitDbgNode(gz: *GenZir, node: Ast.Node.Index) !void {
     // The instruction emitted here is for debugging runtime code.
     // If the current block will be evaluated only during semantic analysis
     // then no dbg_stmt ZIR instruction is needed.
@@ -2598,7 +2598,7 @@ fn emitDbgNode(gz: *GenZir, node: ast.Node.Index) !void {
     } });
 }
 
-fn assign(gz: *GenZir, scope: *Scope, infix_node: ast.Node.Index) InnerError!void {
+fn assign(gz: *GenZir, scope: *Scope, infix_node: Ast.Node.Index) InnerError!void {
     try emitDbgNode(gz, infix_node);
     const astgen = gz.astgen;
     const tree = astgen.tree;
@@ -2623,7 +2623,7 @@ fn assign(gz: *GenZir, scope: *Scope, infix_node: ast.Node.Index) InnerError!voi
 fn assignOp(
     gz: *GenZir,
     scope: *Scope,
-    infix_node: ast.Node.Index,
+    infix_node: Ast.Node.Index,
     op_inst_tag: Zir.Inst.Tag,
 ) InnerError!void {
     try emitDbgNode(gz, infix_node);
@@ -2646,7 +2646,7 @@ fn assignOp(
 fn assignShift(
     gz: *GenZir,
     scope: *Scope,
-    infix_node: ast.Node.Index,
+    infix_node: Ast.Node.Index,
     op_inst_tag: Zir.Inst.Tag,
 ) InnerError!void {
     try emitDbgNode(gz, infix_node);
@@ -2666,7 +2666,7 @@ fn assignShift(
     _ = try gz.addBin(.store, lhs_ptr, result);
 }
 
-fn boolNot(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!Zir.Inst.Ref {
+fn boolNot(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
     const node_datas = tree.nodes.items(.data);
@@ -2676,7 +2676,7 @@ fn boolNot(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) Inne
     return rvalue(gz, rl, result, node);
 }
 
-fn bitNot(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!Zir.Inst.Ref {
+fn bitNot(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
     const node_datas = tree.nodes.items(.data);
@@ -2690,7 +2690,7 @@ fn negation(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
     tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
@@ -2706,8 +2706,8 @@ fn ptrType(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    ptr_info: ast.full.PtrType,
+    node: Ast.Node.Index,
+    ptr_info: Ast.full.PtrType,
 ) InnerError!Zir.Inst.Ref {
     const elem_type = try typeExpr(gz, scope, ptr_info.ast.child_type);
 
@@ -2788,7 +2788,7 @@ fn ptrType(
     return rvalue(gz, rl, result, node);
 }
 
-fn arrayType(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !Zir.Inst.Ref {
+fn arrayType(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) !Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
     const node_datas = tree.nodes.items(.data);
@@ -2808,13 +2808,13 @@ fn arrayType(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !Z
     return rvalue(gz, rl, result, node);
 }
 
-fn arrayTypeSentinel(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !Zir.Inst.Ref {
+fn arrayTypeSentinel(gz: *GenZir, scope: *Scope, rl: ResultLoc, node: Ast.Node.Index) !Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
     const node_datas = tree.nodes.items(.data);
     const node_tags = tree.nodes.items(.tag);
     const main_tokens = tree.nodes.items(.main_token);
-    const extra = tree.extraData(node_datas[node].rhs, ast.Node.ArrayTypeSentinel);
+    const extra = tree.extraData(node_datas[node].rhs, Ast.Node.ArrayTypeSentinel);
 
     const len_node = node_datas[node].lhs;
     if (node_tags[len_node] == .identifier and
@@ -2870,9 +2870,9 @@ fn fnDecl(
     gz: *GenZir,
     scope: *Scope,
     wip_decls: *WipDecls,
-    decl_node: ast.Node.Index,
-    body_node: ast.Node.Index,
-    fn_proto: ast.full.FnProto,
+    decl_node: Ast.Node.Index,
+    body_node: Ast.Node.Index,
+    fn_proto: Ast.full.FnProto,
 ) InnerError!void {
     const gpa = astgen.gpa;
     const tree = astgen.tree;
@@ -3135,8 +3135,8 @@ fn globalVarDecl(
     gz: *GenZir,
     scope: *Scope,
     wip_decls: *WipDecls,
-    node: ast.Node.Index,
-    var_decl: ast.full.VarDecl,
+    node: Ast.Node.Index,
+    var_decl: Ast.full.VarDecl,
 ) InnerError!void {
     const gpa = astgen.gpa;
     const tree = astgen.tree;
@@ -3279,7 +3279,7 @@ fn comptimeDecl(
     gz: *GenZir,
     scope: *Scope,
     wip_decls: *WipDecls,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
 ) InnerError!void {
     const gpa = astgen.gpa;
     const tree = astgen.tree;
@@ -3326,7 +3326,7 @@ fn usingnamespaceDecl(
     gz: *GenZir,
     scope: *Scope,
     wip_decls: *WipDecls,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
 ) InnerError!void {
     const gpa = astgen.gpa;
     const tree = astgen.tree;
@@ -3377,7 +3377,7 @@ fn testDecl(
     gz: *GenZir,
     scope: *Scope,
     wip_decls: *WipDecls,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
 ) InnerError!void {
     const gpa = astgen.gpa;
     const tree = astgen.tree;
@@ -3468,8 +3468,8 @@ fn testDecl(
 fn structDeclInner(
     gz: *GenZir,
     scope: *Scope,
-    node: ast.Node.Index,
-    container_decl: ast.full.ContainerDecl,
+    node: Ast.Node.Index,
+    container_decl: Ast.full.ContainerDecl,
     layout: std.builtin.TypeInfo.ContainerLayout,
 ) InnerError!Zir.Inst.Ref {
     if (container_decl.ast.members.len == 0) {
@@ -3537,7 +3537,7 @@ fn structDeclInner(
                 const body = node_datas[member_node].rhs;
                 switch (node_tags[fn_proto]) {
                     .fn_proto_simple => {
-                        var params: [1]ast.Node.Index = undefined;
+                        var params: [1]Ast.Node.Index = undefined;
                         astgen.fnDecl(gz, &namespace.base, &wip_decls, member_node, body, tree.fnProtoSimple(&params, fn_proto)) catch |err| switch (err) {
                             error.OutOfMemory => return error.OutOfMemory,
                             error.AnalysisFail => {},
@@ -3552,7 +3552,7 @@ fn structDeclInner(
                         continue;
                     },
                     .fn_proto_one => {
-                        var params: [1]ast.Node.Index = undefined;
+                        var params: [1]Ast.Node.Index = undefined;
                         astgen.fnDecl(gz, &namespace.base, &wip_decls, member_node, body, tree.fnProtoOne(&params, fn_proto)) catch |err| switch (err) {
                             error.OutOfMemory => return error.OutOfMemory,
                             error.AnalysisFail => {},
@@ -3570,7 +3570,7 @@ fn structDeclInner(
                 }
             },
             .fn_proto_simple => {
-                var params: [1]ast.Node.Index = undefined;
+                var params: [1]Ast.Node.Index = undefined;
                 astgen.fnDecl(gz, &namespace.base, &wip_decls, member_node, 0, tree.fnProtoSimple(&params, member_node)) catch |err| switch (err) {
                     error.OutOfMemory => return error.OutOfMemory,
                     error.AnalysisFail => {},
@@ -3585,7 +3585,7 @@ fn structDeclInner(
                 continue;
             },
             .fn_proto_one => {
-                var params: [1]ast.Node.Index = undefined;
+                var params: [1]Ast.Node.Index = undefined;
                 astgen.fnDecl(gz, &namespace.base, &wip_decls, member_node, 0, tree.fnProtoOne(&params, member_node)) catch |err| switch (err) {
                     error.OutOfMemory => return error.OutOfMemory,
                     error.AnalysisFail => {},
@@ -3750,10 +3750,10 @@ fn structDeclInner(
 fn unionDeclInner(
     gz: *GenZir,
     scope: *Scope,
-    node: ast.Node.Index,
-    members: []const ast.Node.Index,
+    node: Ast.Node.Index,
+    members: []const Ast.Node.Index,
     layout: std.builtin.TypeInfo.ContainerLayout,
-    arg_node: ast.Node.Index,
+    arg_node: Ast.Node.Index,
     have_auto_enum: bool,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
@@ -3812,7 +3812,7 @@ fn unionDeclInner(
                 const body = node_datas[member_node].rhs;
                 switch (node_tags[fn_proto]) {
                     .fn_proto_simple => {
-                        var params: [1]ast.Node.Index = undefined;
+                        var params: [1]Ast.Node.Index = undefined;
                         astgen.fnDecl(gz, &namespace.base, &wip_decls, member_node, body, tree.fnProtoSimple(&params, fn_proto)) catch |err| switch (err) {
                             error.OutOfMemory => return error.OutOfMemory,
                             error.AnalysisFail => {},
@@ -3827,7 +3827,7 @@ fn unionDeclInner(
                         continue;
                     },
                     .fn_proto_one => {
-                        var params: [1]ast.Node.Index = undefined;
+                        var params: [1]Ast.Node.Index = undefined;
                         astgen.fnDecl(gz, &namespace.base, &wip_decls, member_node, body, tree.fnProtoOne(&params, fn_proto)) catch |err| switch (err) {
                             error.OutOfMemory => return error.OutOfMemory,
                             error.AnalysisFail => {},
@@ -3845,7 +3845,7 @@ fn unionDeclInner(
                 }
             },
             .fn_proto_simple => {
-                var params: [1]ast.Node.Index = undefined;
+                var params: [1]Ast.Node.Index = undefined;
                 astgen.fnDecl(gz, &namespace.base, &wip_decls, member_node, 0, tree.fnProtoSimple(&params, member_node)) catch |err| switch (err) {
                     error.OutOfMemory => return error.OutOfMemory,
                     error.AnalysisFail => {},
@@ -3860,7 +3860,7 @@ fn unionDeclInner(
                 continue;
             },
             .fn_proto_one => {
-                var params: [1]ast.Node.Index = undefined;
+                var params: [1]Ast.Node.Index = undefined;
                 astgen.fnDecl(gz, &namespace.base, &wip_decls, member_node, 0, tree.fnProtoOne(&params, member_node)) catch |err| switch (err) {
                     error.OutOfMemory => return error.OutOfMemory,
                     error.AnalysisFail => {},
@@ -4033,8 +4033,8 @@ fn containerDecl(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    container_decl: ast.full.ContainerDecl,
+    node: Ast.Node.Index,
+    container_decl: Ast.full.ContainerDecl,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const gpa = astgen.gpa;
@@ -4084,7 +4084,7 @@ fn containerDecl(
                 var values: usize = 0;
                 var total_fields: usize = 0;
                 var decls: usize = 0;
-                var nonexhaustive_node: ast.Node.Index = 0;
+                var nonexhaustive_node: Ast.Node.Index = 0;
                 for (container_decl.ast.members) |member_node| {
                     const member = switch (node_tags[member_node]) {
                         .container_field_init => tree.containerFieldInit(member_node),
@@ -4225,7 +4225,7 @@ fn containerDecl(
                         const body = node_datas[member_node].rhs;
                         switch (node_tags[fn_proto]) {
                             .fn_proto_simple => {
-                                var params: [1]ast.Node.Index = undefined;
+                                var params: [1]Ast.Node.Index = undefined;
                                 astgen.fnDecl(gz, &namespace.base, &wip_decls, member_node, body, tree.fnProtoSimple(&params, fn_proto)) catch |err| switch (err) {
                                     error.OutOfMemory => return error.OutOfMemory,
                                     error.AnalysisFail => {},
@@ -4240,7 +4240,7 @@ fn containerDecl(
                                 continue;
                             },
                             .fn_proto_one => {
-                                var params: [1]ast.Node.Index = undefined;
+                                var params: [1]Ast.Node.Index = undefined;
                                 astgen.fnDecl(gz, &namespace.base, &wip_decls, member_node, body, tree.fnProtoOne(&params, fn_proto)) catch |err| switch (err) {
                                     error.OutOfMemory => return error.OutOfMemory,
                                     error.AnalysisFail => {},
@@ -4258,7 +4258,7 @@ fn containerDecl(
                         }
                     },
                     .fn_proto_simple => {
-                        var params: [1]ast.Node.Index = undefined;
+                        var params: [1]Ast.Node.Index = undefined;
                         astgen.fnDecl(gz, &namespace.base, &wip_decls, member_node, 0, tree.fnProtoSimple(&params, member_node)) catch |err| switch (err) {
                             error.OutOfMemory => return error.OutOfMemory,
                             error.AnalysisFail => {},
@@ -4273,7 +4273,7 @@ fn containerDecl(
                         continue;
                     },
                     .fn_proto_one => {
-                        var params: [1]ast.Node.Index = undefined;
+                        var params: [1]Ast.Node.Index = undefined;
                         astgen.fnDecl(gz, &namespace.base, &wip_decls, member_node, 0, tree.fnProtoOne(&params, member_node)) catch |err| switch (err) {
                             error.OutOfMemory => return error.OutOfMemory,
                             error.AnalysisFail => {},
@@ -4441,7 +4441,7 @@ fn containerDecl(
                         const body = node_datas[member_node].rhs;
                         switch (node_tags[fn_proto]) {
                             .fn_proto_simple => {
-                                var params: [1]ast.Node.Index = undefined;
+                                var params: [1]Ast.Node.Index = undefined;
                                 astgen.fnDecl(gz, &namespace.base, &wip_decls, member_node, body, tree.fnProtoSimple(&params, fn_proto)) catch |err| switch (err) {
                                     error.OutOfMemory => return error.OutOfMemory,
                                     error.AnalysisFail => {},
@@ -4456,7 +4456,7 @@ fn containerDecl(
                                 continue;
                             },
                             .fn_proto_one => {
-                                var params: [1]ast.Node.Index = undefined;
+                                var params: [1]Ast.Node.Index = undefined;
                                 astgen.fnDecl(gz, &namespace.base, &wip_decls, member_node, body, tree.fnProtoOne(&params, fn_proto)) catch |err| switch (err) {
                                     error.OutOfMemory => return error.OutOfMemory,
                                     error.AnalysisFail => {},
@@ -4474,7 +4474,7 @@ fn containerDecl(
                         }
                     },
                     .fn_proto_simple => {
-                        var params: [1]ast.Node.Index = undefined;
+                        var params: [1]Ast.Node.Index = undefined;
                         astgen.fnDecl(gz, &namespace.base, &wip_decls, member_node, 0, tree.fnProtoSimple(&params, member_node)) catch |err| switch (err) {
                             error.OutOfMemory => return error.OutOfMemory,
                             error.AnalysisFail => {},
@@ -4489,7 +4489,7 @@ fn containerDecl(
                         continue;
                     },
                     .fn_proto_one => {
-                        var params: [1]ast.Node.Index = undefined;
+                        var params: [1]Ast.Node.Index = undefined;
                         astgen.fnDecl(gz, &namespace.base, &wip_decls, member_node, 0, tree.fnProtoOne(&params, member_node)) catch |err| switch (err) {
                             error.OutOfMemory => return error.OutOfMemory,
                             error.AnalysisFail => {},
@@ -4590,7 +4590,7 @@ fn containerDecl(
     }
 }
 
-fn errorSetDecl(gz: *GenZir, rl: ResultLoc, node: ast.Node.Index) InnerError!Zir.Inst.Ref {
+fn errorSetDecl(gz: *GenZir, rl: ResultLoc, node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const gpa = astgen.gpa;
     const tree = astgen.tree;
@@ -4629,8 +4629,8 @@ fn tryExpr(
     parent_gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    operand_node: ast.Node.Index,
+    node: Ast.Node.Index,
+    operand_node: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const astgen = parent_gz.astgen;
 
@@ -4705,13 +4705,13 @@ fn orelseCatchExpr(
     parent_gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    lhs: ast.Node.Index,
+    node: Ast.Node.Index,
+    lhs: Ast.Node.Index,
     cond_op: Zir.Inst.Tag,
     unwrap_op: Zir.Inst.Tag,
     unwrap_code_op: Zir.Inst.Tag,
-    rhs: ast.Node.Index,
-    payload_token: ?ast.TokenIndex,
+    rhs: Ast.Node.Index,
+    payload_token: ?Ast.TokenIndex,
 ) InnerError!Zir.Inst.Ref {
     const astgen = parent_gz.astgen;
     const tree = astgen.tree;
@@ -4796,7 +4796,7 @@ fn orelseCatchExpr(
 fn finishThenElseBlock(
     parent_gz: *GenZir,
     rl: ResultLoc,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
     block_scope: *GenZir,
     then_scope: *GenZir,
     else_scope: *GenZir,
@@ -4852,7 +4852,7 @@ fn finishThenElseBlock(
 /// tokens without allocating.
 /// OK in theory it could do it without allocating. This implementation
 /// allocates when the @"" form is used.
-fn tokenIdentEql(astgen: *AstGen, token1: ast.TokenIndex, token2: ast.TokenIndex) !bool {
+fn tokenIdentEql(astgen: *AstGen, token1: Ast.TokenIndex, token2: Ast.TokenIndex) !bool {
     const ident_name_1 = try astgen.identifierTokenString(token1);
     const ident_name_2 = try astgen.identifierTokenString(token2);
     return mem.eql(u8, ident_name_1, ident_name_2);
@@ -4862,7 +4862,7 @@ fn fieldAccess(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
@@ -4889,7 +4889,7 @@ fn arrayAccess(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
@@ -4912,7 +4912,7 @@ fn simpleBinOp(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
     op_inst_tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
@@ -4929,8 +4929,8 @@ fn simpleBinOp(
 fn simpleStrTok(
     gz: *GenZir,
     rl: ResultLoc,
-    ident_token: ast.TokenIndex,
-    node: ast.Node.Index,
+    ident_token: Ast.TokenIndex,
+    node: Ast.Node.Index,
     op_inst_tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
@@ -4943,7 +4943,7 @@ fn boolBinOp(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
     zir_tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
@@ -4969,8 +4969,8 @@ fn ifExpr(
     parent_gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    if_full: ast.full.If,
+    node: Ast.Node.Index,
+    if_full: Ast.full.If,
 ) InnerError!Zir.Inst.Ref {
     const astgen = parent_gz.astgen;
     const tree = astgen.tree;
@@ -5089,7 +5089,7 @@ fn ifExpr(
 
     const else_node = if_full.ast.else_expr;
     const else_info: struct {
-        src: ast.Node.Index,
+        src: Ast.Node.Index,
         result: Zir.Inst.Ref,
     } = if (else_node != 0) blk: {
         block_scope.break_count += 1;
@@ -5215,8 +5215,8 @@ fn whileExpr(
     parent_gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    while_full: ast.full.While,
+    node: Ast.Node.Index,
+    while_full: Ast.full.While,
 ) InnerError!Zir.Inst.Ref {
     const astgen = parent_gz.astgen;
     const tree = astgen.tree;
@@ -5368,7 +5368,7 @@ fn whileExpr(
 
     const else_node = while_full.ast.else_expr;
     const else_info: struct {
-        src: ast.Node.Index,
+        src: Ast.Node.Index,
         result: Zir.Inst.Ref,
     } = if (else_node != 0) blk: {
         loop_scope.break_count += 1;
@@ -5435,8 +5435,8 @@ fn forExpr(
     parent_gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    for_full: ast.full.While,
+    node: Ast.Node.Index,
+    for_full: Ast.full.While,
 ) InnerError!Zir.Inst.Ref {
     const astgen = parent_gz.astgen;
 
@@ -5577,7 +5577,7 @@ fn forExpr(
 
     const else_node = for_full.ast.else_expr;
     const else_info: struct {
-        src: ast.Node.Index,
+        src: Ast.Node.Index,
         result: Zir.Inst.Ref,
     } = if (else_node != 0) blk: {
         loop_scope.break_count += 1;
@@ -5618,7 +5618,7 @@ fn switchExpr(
     parent_gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    switch_node: ast.Node.Index,
+    switch_node: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const astgen = parent_gz.astgen;
     const gpa = astgen.gpa;
@@ -5628,7 +5628,7 @@ fn switchExpr(
     const main_tokens = tree.nodes.items(.main_token);
     const token_tags = tree.tokens.items(.tag);
     const operand_node = node_datas[switch_node].lhs;
-    const extra = tree.extraData(node_datas[switch_node].rhs, ast.Node.SubRange);
+    const extra = tree.extraData(node_datas[switch_node].rhs, Ast.Node.SubRange);
     const case_nodes = tree.extra_data[extra.start..extra.end];
 
     // We perform two passes over the AST. This first pass is to collect information
@@ -5638,9 +5638,9 @@ fn switchExpr(
     var scalar_cases_len: u32 = 0;
     var multi_cases_len: u32 = 0;
     var special_prong: Zir.SpecialProng = .none;
-    var special_node: ast.Node.Index = 0;
-    var else_src: ?ast.TokenIndex = null;
-    var underscore_src: ?ast.TokenIndex = null;
+    var special_node: Ast.Node.Index = 0;
+    var else_src: ?Ast.TokenIndex = null;
+    var underscore_src: ?Ast.TokenIndex = null;
     for (case_nodes) |case_node| {
         const case = switch (node_tags[case_node]) {
             .switch_case_one => tree.switchCaseOne(case_node),
@@ -6212,7 +6212,7 @@ fn switchExpr(
     }
 }
 
-fn ret(gz: *GenZir, scope: *Scope, node: ast.Node.Index) InnerError!Zir.Inst.Ref {
+fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
     const node_datas = tree.nodes.items(.data);
@@ -6311,7 +6311,7 @@ fn identifier(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    ident: ast.Node.Index,
+    ident: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const tracy = trace(@src());
     defer tracy.end();
@@ -6363,8 +6363,8 @@ fn identifier(
     // Local variables, including function parameters.
     const name_str_index = try astgen.identAsString(ident_token);
     var s = scope;
-    var found_already: ?ast.Node.Index = null; // we have found a decl with the same name already
-    var hit_namespace: ast.Node.Index = 0;
+    var found_already: ?Ast.Node.Index = null; // we have found a decl with the same name already
+    var hit_namespace: Ast.Node.Index = 0;
     while (true) switch (s.tag) {
         .local_val => {
             const local_val = s.cast(Scope.LocalVal).?;
@@ -6434,7 +6434,7 @@ fn identifier(
 fn stringLiteral(
     gz: *GenZir,
     rl: ResultLoc,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
@@ -6454,7 +6454,7 @@ fn stringLiteral(
 fn multilineStringLiteral(
     gz: *GenZir,
     rl: ResultLoc,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const str = try astgen.strLitNodeAsString(node);
@@ -6468,7 +6468,7 @@ fn multilineStringLiteral(
     return rvalue(gz, rl, result, node);
 }
 
-fn charLiteral(gz: *GenZir, rl: ResultLoc, node: ast.Node.Index) !Zir.Inst.Ref {
+fn charLiteral(gz: *GenZir, rl: ResultLoc, node: Ast.Node.Index) !Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
     const main_tokens = tree.nodes.items(.main_token);
@@ -6547,7 +6547,7 @@ fn charLiteral(gz: *GenZir, rl: ResultLoc, node: ast.Node.Index) !Zir.Inst.Ref {
     }
 }
 
-fn integerLiteral(gz: *GenZir, rl: ResultLoc, node: ast.Node.Index) InnerError!Zir.Inst.Ref {
+fn integerLiteral(gz: *GenZir, rl: ResultLoc, node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
     const main_tokens = tree.nodes.items(.main_token);
@@ -6593,7 +6593,7 @@ fn integerLiteral(gz: *GenZir, rl: ResultLoc, node: ast.Node.Index) InnerError!Z
     return rvalue(gz, rl, result, node);
 }
 
-fn floatLiteral(gz: *GenZir, rl: ResultLoc, node: ast.Node.Index) InnerError!Zir.Inst.Ref {
+fn floatLiteral(gz: *GenZir, rl: ResultLoc, node: Ast.Node.Index) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
     const main_tokens = tree.nodes.items(.main_token);
@@ -6633,8 +6633,8 @@ fn asmExpr(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    full: ast.full.Asm,
+    node: Ast.Node.Index,
+    full: Ast.full.Asm,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
@@ -6791,9 +6791,9 @@ fn as(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    lhs: ast.Node.Index,
-    rhs: ast.Node.Index,
+    node: Ast.Node.Index,
+    lhs: Ast.Node.Index,
+    rhs: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const dest_type = try typeExpr(gz, scope, lhs);
     switch (rl) {
@@ -6814,8 +6814,8 @@ fn unionInit(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    params: []const ast.Node.Index,
+    node: Ast.Node.Index,
+    params: []const Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const union_type = try typeExpr(gz, scope, params[0]);
     const field_name = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, params[1]);
@@ -6840,9 +6840,9 @@ fn unionInit(
 fn unionInitRlPtr(
     parent_gz: *GenZir,
     scope: *Scope,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
     result_ptr: Zir.Inst.Ref,
-    expr_node: ast.Node.Index,
+    expr_node: Ast.Node.Index,
     union_type: Zir.Inst.Ref,
     field_name: Zir.Inst.Ref,
 ) InnerError!Zir.Inst.Ref {
@@ -6859,9 +6859,9 @@ fn asRlPtr(
     parent_gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    src_node: ast.Node.Index,
+    src_node: Ast.Node.Index,
     result_ptr: Zir.Inst.Ref,
-    operand_node: ast.Node.Index,
+    operand_node: Ast.Node.Index,
     dest_type: Zir.Inst.Ref,
 ) InnerError!Zir.Inst.Ref {
     // Detect whether this expr() call goes into rvalue() to store the result into the
@@ -6899,9 +6899,9 @@ fn bitCast(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    lhs: ast.Node.Index,
-    rhs: ast.Node.Index,
+    node: Ast.Node.Index,
+    lhs: Ast.Node.Index,
+    rhs: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const dest_type = try typeExpr(gz, scope, lhs);
@@ -6929,10 +6929,10 @@ fn bitCast(
 fn bitCastRlPtr(
     gz: *GenZir,
     scope: *Scope,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
     dest_type: Zir.Inst.Ref,
     result_ptr: Zir.Inst.Ref,
-    rhs: ast.Node.Index,
+    rhs: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const casted_result_ptr = try gz.addPlNode(.bitcast_result_ptr, node, Zir.Inst.Bin{
         .lhs = dest_type,
@@ -6945,8 +6945,8 @@ fn typeOf(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    params: []const ast.Node.Index,
+    node: Ast.Node.Index,
+    params: []const Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     if (params.len < 1) {
         return gz.astgen.failNode(node, "expected at least 1 argument, found 0", .{});
@@ -6970,8 +6970,8 @@ fn builtinCall(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    params: []const ast.Node.Index,
+    node: Ast.Node.Index,
+    params: []const Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const tree = astgen.tree;
@@ -7463,7 +7463,7 @@ fn builtinCall(
 fn simpleNoOpVoid(
     gz: *GenZir,
     rl: ResultLoc,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
     tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
     _ = try gz.addNode(tag, node);
@@ -7474,9 +7474,9 @@ fn hasDeclOrField(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    lhs_node: ast.Node.Index,
-    rhs_node: ast.Node.Index,
+    node: Ast.Node.Index,
+    lhs_node: Ast.Node.Index,
+    rhs_node: Ast.Node.Index,
     tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
     const container_type = try typeExpr(gz, scope, lhs_node);
@@ -7492,9 +7492,9 @@ fn typeCast(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    lhs_node: ast.Node.Index,
-    rhs_node: ast.Node.Index,
+    node: Ast.Node.Index,
+    lhs_node: Ast.Node.Index,
+    rhs_node: Ast.Node.Index,
     tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
     const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{
@@ -7508,8 +7508,8 @@ fn simpleUnOpType(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    operand_node: ast.Node.Index,
+    node: Ast.Node.Index,
+    operand_node: Ast.Node.Index,
     tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
     const operand = try typeExpr(gz, scope, operand_node);
@@ -7521,9 +7521,9 @@ fn simpleUnOp(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
     operand_rl: ResultLoc,
-    operand_node: ast.Node.Index,
+    operand_node: Ast.Node.Index,
     tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
     const operand = try expr(gz, scope, operand_rl, operand_node);
@@ -7535,8 +7535,8 @@ fn cmpxchg(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    params: []const ast.Node.Index,
+    node: Ast.Node.Index,
+    params: []const Ast.Node.Index,
     tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
     const int_type = try typeExpr(gz, scope, params[0]);
@@ -7565,9 +7565,9 @@ fn bitBuiltin(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    int_type_node: ast.Node.Index,
-    operand_node: ast.Node.Index,
+    node: Ast.Node.Index,
+    int_type_node: Ast.Node.Index,
+    operand_node: Ast.Node.Index,
     tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
     const int_type = try typeExpr(gz, scope, int_type_node);
@@ -7580,9 +7580,9 @@ fn divBuiltin(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    lhs_node: ast.Node.Index,
-    rhs_node: ast.Node.Index,
+    node: Ast.Node.Index,
+    lhs_node: Ast.Node.Index,
+    rhs_node: Ast.Node.Index,
     tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
     const result = try gz.addPlNode(tag, node, Zir.Inst.Bin{
@@ -7596,8 +7596,8 @@ fn simpleCBuiltin(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    operand_node: ast.Node.Index,
+    node: Ast.Node.Index,
+    operand_node: Ast.Node.Index,
     tag: Zir.Inst.Extended,
 ) InnerError!Zir.Inst.Ref {
     const operand = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, operand_node);
@@ -7612,9 +7612,9 @@ fn offsetOf(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    lhs_node: ast.Node.Index,
-    rhs_node: ast.Node.Index,
+    node: Ast.Node.Index,
+    lhs_node: Ast.Node.Index,
+    rhs_node: Ast.Node.Index,
     tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
     const type_inst = try typeExpr(gz, scope, lhs_node);
@@ -7630,9 +7630,9 @@ fn shiftOp(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    lhs_node: ast.Node.Index,
-    rhs_node: ast.Node.Index,
+    node: Ast.Node.Index,
+    lhs_node: Ast.Node.Index,
+    rhs_node: Ast.Node.Index,
     tag: Zir.Inst.Tag,
 ) InnerError!Zir.Inst.Ref {
     const lhs = try expr(gz, scope, .none, lhs_node);
@@ -7649,8 +7649,8 @@ fn cImport(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    body_node: ast.Node.Index,
+    node: Ast.Node.Index,
+    body_node: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const gpa = astgen.gpa;
@@ -7674,8 +7674,8 @@ fn overflowArithmetic(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    params: []const ast.Node.Index,
+    node: Ast.Node.Index,
+    params: []const Ast.Node.Index,
     tag: Zir.Inst.Extended,
 ) InnerError!Zir.Inst.Ref {
     const int_type = try typeExpr(gz, scope, params[0]);
@@ -7722,8 +7722,8 @@ fn callExpr(
     gz: *GenZir,
     scope: *Scope,
     rl: ResultLoc,
-    node: ast.Node.Index,
-    call: ast.full.Call,
+    node: Ast.Node.Index,
+    call: Ast.full.Call,
 ) InnerError!Zir.Inst.Ref {
     const astgen = gz.astgen;
     const lhs = try expr(gz, scope, .none, call.ast.fn_expr);
@@ -7812,7 +7812,7 @@ pub const simple_types = std.ComptimeStringMap(Zir.Inst.Ref, .{
     .{ "void", .void_type },
 });
 
-fn nodeMayNeedMemoryLocation(tree: *const ast.Tree, start_node: ast.Node.Index) bool {
+fn nodeMayNeedMemoryLocation(tree: *const Ast, start_node: Ast.Node.Index) bool {
     const node_tags = tree.nodes.items(.tag);
     const node_datas = tree.nodes.items(.data);
     const main_tokens = tree.nodes.items(.main_token);
@@ -8021,7 +8021,7 @@ fn nodeMayNeedMemoryLocation(tree: *const ast.Tree, start_node: ast.Node.Index)
     }
 }
 
-fn nodeMayEvalToError(tree: *const ast.Tree, start_node: ast.Node.Index) enum { never, always, maybe } {
+fn nodeMayEvalToError(tree: *const Ast, start_node: Ast.Node.Index) enum { never, always, maybe } {
     const node_tags = tree.nodes.items(.tag);
     const node_datas = tree.nodes.items(.data);
     const main_tokens = tree.nodes.items(.main_token);
@@ -8230,7 +8230,7 @@ fn nodeMayEvalToError(tree: *const ast.Tree, start_node: ast.Node.Index) enum {
     }
 }
 
-fn nodeImpliesRuntimeBits(tree: *const ast.Tree, start_node: ast.Node.Index) bool {
+fn nodeImpliesRuntimeBits(tree: *const Ast, start_node: Ast.Node.Index) bool {
     const node_tags = tree.nodes.items(.tag);
     const node_datas = tree.nodes.items(.data);
 
@@ -8417,7 +8417,7 @@ fn rvalue(
     gz: *GenZir,
     rl: ResultLoc,
     result: Zir.Inst.Ref,
-    src_node: ast.Node.Index,
+    src_node: Ast.Node.Index,
 ) InnerError!Zir.Inst.Ref {
     if (gz.endsWithNoReturn()) return result;
     switch (rl) {
@@ -8522,7 +8522,7 @@ fn rvalue(
 /// and allocates the result within `astgen.arena`.
 /// Otherwise, returns a reference to the source code bytes directly.
 /// See also `appendIdentStr` and `parseStrLit`.
-fn identifierTokenString(astgen: *AstGen, token: ast.TokenIndex) InnerError![]const u8 {
+fn identifierTokenString(astgen: *AstGen, token: Ast.TokenIndex) InnerError![]const u8 {
     const tree = astgen.tree;
     const token_tags = tree.tokens.items(.tag);
     assert(token_tags[token] == .identifier);
@@ -8542,7 +8542,7 @@ fn identifierTokenString(astgen: *AstGen, token: ast.TokenIndex) InnerError![]co
 /// See also `identifierTokenString` and `parseStrLit`.
 fn appendIdentStr(
     astgen: *AstGen,
-    token: ast.TokenIndex,
+    token: Ast.TokenIndex,
     buf: *ArrayListUnmanaged(u8),
 ) InnerError!void {
     const tree = astgen.tree;
@@ -8559,7 +8559,7 @@ fn appendIdentStr(
 /// Appends the result to `buf`.
 fn parseStrLit(
     astgen: *AstGen,
-    token: ast.TokenIndex,
+    token: Ast.TokenIndex,
     buf: *ArrayListUnmanaged(u8),
     bytes: []const u8,
     offset: u32,
@@ -8623,7 +8623,7 @@ fn parseStrLit(
 
 fn failNode(
     astgen: *AstGen,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
     comptime format: []const u8,
     args: anytype,
 ) InnerError {
@@ -8632,7 +8632,7 @@ fn failNode(
 
 fn failNodeNotes(
     astgen: *AstGen,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
     comptime format: []const u8,
     args: anytype,
     notes: []const u32,
@@ -8664,7 +8664,7 @@ fn failNodeNotes(
 
 fn failTok(
     astgen: *AstGen,
-    token: ast.TokenIndex,
+    token: Ast.TokenIndex,
     comptime format: []const u8,
     args: anytype,
 ) InnerError {
@@ -8673,7 +8673,7 @@ fn failTok(
 
 fn failTokNotes(
     astgen: *AstGen,
-    token: ast.TokenIndex,
+    token: Ast.TokenIndex,
     comptime format: []const u8,
     args: anytype,
     notes: []const u32,
@@ -8706,7 +8706,7 @@ fn failTokNotes(
 /// Same as `fail`, except given an absolute byte offset.
 fn failOff(
     astgen: *AstGen,
-    token: ast.TokenIndex,
+    token: Ast.TokenIndex,
     byte_offset: u32,
     comptime format: []const u8,
     args: anytype,
@@ -8731,7 +8731,7 @@ fn failOff(
 
 fn errNoteTok(
     astgen: *AstGen,
-    token: ast.TokenIndex,
+    token: Ast.TokenIndex,
     comptime format: []const u8,
     args: anytype,
 ) Allocator.Error!u32 {
@@ -8754,7 +8754,7 @@ fn errNoteTok(
 
 fn errNoteNode(
     astgen: *AstGen,
-    node: ast.Node.Index,
+    node: Ast.Node.Index,
     comptime format: []const u8,
     args: anytype,
 ) Allocator.Error!u32 {
@@ -8775,7 +8775,7 @@ fn errNoteNode(
     });
 }
 
-fn identAsString(astgen: *AstGen, ident_token: ast.TokenIndex) !u32 {
+fn identAsString(astgen: *AstGen, ident_token: Ast.TokenIndex) !u32 {
     const gpa = astgen.gpa;
     const string_bytes = &astgen.string_bytes;
     const str_index = @intCast(u32, string_bytes.items.len);
@@ -8798,7 +8798,7 @@ fn identAsString(astgen: *AstGen, ident_token: ast.TokenIndex) !u32 {
 
 const IndexSlice = struct { index: u32, len: u32 };
 
-fn strLitAsString(astgen: *AstGen, str_lit_token: ast.TokenIndex) !IndexSlice {
+fn strLitAsString(astgen: *AstGen, str_lit_token: Ast.TokenIndex) !IndexSlice {
     const gpa = astgen.gpa;
     const string_bytes = &astgen.string_bytes;
     const str_index = @intCast(u32, string_bytes.items.len);
@@ -8829,7 +8829,7 @@ fn strLitAsString(astgen: *AstGen, str_lit_token: ast.TokenIndex) !IndexSlice {
     }
 }
 
-fn strLitNodeAsString(astgen: *AstGen, node: ast.Node.Index) !IndexSlice {
+fn strLitNodeAsString(astgen: *AstGen, node: Ast.Node.Index) !IndexSlice {
     const tree = astgen.tree;
     const node_datas = tree.nodes.items(.data);
 
@@ -8864,7 +8864,7 @@ fn strLitNodeAsString(astgen: *AstGen, node: ast.Node.Index) !IndexSlice {
     };
 }
 
-fn testNameString(astgen: *AstGen, str_lit_token: ast.TokenIndex) !u32 {
+fn testNameString(astgen: *AstGen, str_lit_token: Ast.TokenIndex) !u32 {
     const gpa = astgen.gpa;
     const string_bytes = &astgen.string_bytes;
     const str_index = @intCast(u32, string_bytes.items.len);
@@ -8921,7 +8921,7 @@ const Scope = struct {
         gen_zir: *GenZir,
         inst: Zir.Inst.Ref,
         /// Source location of the corresponding variable declaration.
-        token_src: ast.TokenIndex,
+        token_src: Ast.TokenIndex,
         /// String table index.
         name: u32,
         id_cat: IdCat,
@@ -8940,7 +8940,7 @@ const Scope = struct {
         gen_zir: *GenZir,
         ptr: Zir.Inst.Ref,
         /// Source location of the corresponding variable declaration.
-        token_src: ast.TokenIndex,
+        token_src: Ast.TokenIndex,
         /// String table index.
         name: u32,
         id_cat: IdCat,
@@ -8955,7 +8955,7 @@ const Scope = struct {
         base: Scope,
         /// Parents can be: `LocalVal`, `LocalPtr`, `GenZir`, `Defer`.
         parent: *Scope,
-        defer_node: ast.Node.Index,
+        defer_node: Ast.Node.Index,
     };
 
     /// Represents a global scope that has any number of declarations in it.
@@ -8967,8 +8967,8 @@ const Scope = struct {
         parent: *Scope,
         /// Maps string table index to the source location of declaration,
         /// for the purposes of reporting name shadowing compile errors.
-        decls: std.AutoHashMapUnmanaged(u32, ast.Node.Index) = .{},
-        node: ast.Node.Index,
+        decls: std.AutoHashMapUnmanaged(u32, Ast.Node.Index) = .{},
+        node: Ast.Node.Index,
     };
 
     const Top = struct {
@@ -8987,7 +8987,7 @@ const GenZir = struct {
     /// How decls created in this scope should be named.
     anon_name_strategy: Zir.Inst.NameStrategy = .anon,
     /// The containing decl AST node.
-    decl_node_index: ast.Node.Index,
+    decl_node_index: Ast.Node.Index,
     /// The containing decl line index, absolute.
     decl_line: u32,
     parent: *Scope,
@@ -9022,8 +9022,8 @@ const GenZir = struct {
     /// a result location pointer.
     labeled_store_to_block_ptr_list: ArrayListUnmanaged(Zir.Inst.Index) = .{},
 
-    suspend_node: ast.Node.Index = 0,
-    nosuspend_node: ast.Node.Index = 0,
+    suspend_node: Ast.Node.Index = 0,
+    nosuspend_node: Ast.Node.Index = 0,
 
     fn makeSubBlock(gz: *GenZir, scope: *Scope) GenZir {
         return .{
@@ -9039,7 +9039,7 @@ const GenZir = struct {
     }
 
     const Label = struct {
-        token: ast.TokenIndex,
+        token: Ast.TokenIndex,
         block_inst: Zir.Inst.Index,
         used: bool = false,
     };
@@ -9060,7 +9060,7 @@ const GenZir = struct {
         return false;
     }
 
-    fn calcLine(gz: GenZir, node: ast.Node.Index) u32 {
+    fn calcLine(gz: GenZir, node: Ast.Node.Index) u32 {
         const astgen = gz.astgen;
         const tree = astgen.tree;
         const source = tree.source;
@@ -9072,15 +9072,15 @@ const GenZir = struct {
         return @intCast(u32, gz.decl_line + astgen.source_line);
     }
 
-    fn nodeIndexToRelative(gz: GenZir, node_index: ast.Node.Index) i32 {
+    fn nodeIndexToRelative(gz: GenZir, node_index: Ast.Node.Index) i32 {
         return @bitCast(i32, node_index) - @bitCast(i32, gz.decl_node_index);
     }
 
-    fn tokenIndexToRelative(gz: GenZir, token: ast.TokenIndex) u32 {
+    fn tokenIndexToRelative(gz: GenZir, token: Ast.TokenIndex) u32 {
         return token - gz.srcToken();
     }
 
-    fn srcToken(gz: GenZir) ast.TokenIndex {
+    fn srcToken(gz: GenZir) Ast.TokenIndex {
         return gz.astgen.tree.firstToken(gz.decl_node_index);
     }
 
@@ -9165,7 +9165,7 @@ const GenZir = struct {
     }
 
     fn addFunc(gz: *GenZir, args: struct {
-        src_node: ast.Node.Index,
+        src_node: Ast.Node.Index,
         body: []const Zir.Inst.Index,
         param_block: Zir.Inst.Index,
         ret_ty: []const Zir.Inst.Index,
@@ -9358,7 +9358,7 @@ const GenZir = struct {
         callee: Zir.Inst.Ref,
         args: []const Zir.Inst.Ref,
         /// Absolute node index. This function does the conversion to offset from Decl.
-        src_node: ast.Node.Index,
+        src_node: Ast.Node.Index,
     ) !Zir.Inst.Ref {
         assert(callee != .none);
         assert(src_node != 0);
@@ -9449,7 +9449,7 @@ const GenZir = struct {
         tag: Zir.Inst.Tag,
         operand: Zir.Inst.Ref,
         /// Absolute node index. This function does the conversion to offset from Decl.
-        src_node: ast.Node.Index,
+        src_node: Ast.Node.Index,
     ) !Zir.Inst.Ref {
         assert(operand != .none);
         return gz.add(.{
@@ -9465,7 +9465,7 @@ const GenZir = struct {
         gz: *GenZir,
         tag: Zir.Inst.Tag,
         /// Absolute node index. This function does the conversion to offset from Decl.
-        src_node: ast.Node.Index,
+        src_node: Ast.Node.Index,
         extra: anytype,
     ) !Zir.Inst.Ref {
         const gpa = gz.astgen.gpa;
@@ -9489,7 +9489,7 @@ const GenZir = struct {
         gz: *GenZir,
         tag: Zir.Inst.Tag,
         /// Absolute token index. This function does the conversion to Decl offset.
-        abs_tok_index: ast.TokenIndex,
+        abs_tok_index: Ast.TokenIndex,
         name: u32,
         body: []const u32,
     ) !Zir.Inst.Index {
@@ -9544,7 +9544,7 @@ const GenZir = struct {
     fn addExtendedMultiOp(
         gz: *GenZir,
         opcode: Zir.Inst.Extended,
-        node: ast.Node.Index,
+        node: Ast.Node.Index,
         operands: []const Zir.Inst.Ref,
     ) !Zir.Inst.Ref {
         const astgen = gz.astgen;
@@ -9605,7 +9605,7 @@ const GenZir = struct {
         tag: Zir.Inst.Tag,
         operand: Zir.Inst.Ref,
         /// Absolute token index. This function does the conversion to Decl offset.
-        abs_tok_index: ast.TokenIndex,
+        abs_tok_index: Ast.TokenIndex,
     ) !Zir.Inst.Ref {
         assert(operand != .none);
         return gz.add(.{
@@ -9622,7 +9622,7 @@ const GenZir = struct {
         tag: Zir.Inst.Tag,
         str_index: u32,
         /// Absolute token index. This function does the conversion to Decl offset.
-        abs_tok_index: ast.TokenIndex,
+        abs_tok_index: Ast.TokenIndex,
     ) !Zir.Inst.Ref {
         return gz.add(.{
             .tag = tag,
@@ -9669,7 +9669,7 @@ const GenZir = struct {
         gz: *GenZir,
         tag: Zir.Inst.Tag,
         decl_index: u32,
-        src_node: ast.Node.Index,
+        src_node: Ast.Node.Index,
     ) !Zir.Inst.Ref {
         return gz.add(.{
             .tag = tag,
@@ -9684,7 +9684,7 @@ const GenZir = struct {
         gz: *GenZir,
         tag: Zir.Inst.Tag,
         /// Absolute node index. This function does the conversion to offset from Decl.
-        src_node: ast.Node.Index,
+        src_node: Ast.Node.Index,
     ) !Zir.Inst.Ref {
         return gz.add(.{
             .tag = tag,
@@ -9696,7 +9696,7 @@ const GenZir = struct {
         gz: *GenZir,
         opcode: Zir.Inst.Extended,
         /// Absolute node index. This function does the conversion to offset from Decl.
-        src_node: ast.Node.Index,
+        src_node: Ast.Node.Index,
     ) !Zir.Inst.Ref {
         return gz.add(.{
             .tag = .extended,
@@ -9712,7 +9712,7 @@ const GenZir = struct {
         gz: *GenZir,
         args: struct {
             /// Absolute node index. This function does the conversion to offset from Decl.
-            node: ast.Node.Index,
+            node: Ast.Node.Index,
             type_inst: Zir.Inst.Ref,
             align_inst: Zir.Inst.Ref,
             is_const: bool,
@@ -9763,7 +9763,7 @@ const GenZir = struct {
         gz: *GenZir,
         args: struct {
             /// Absolute node index. This function does the conversion to offset from Decl.
-            node: ast.Node.Index,
+            node: Ast.Node.Index,
             asm_source: u32,
             output_type_bits: u32,
             is_volatile: bool,
@@ -9820,7 +9820,7 @@ const GenZir = struct {
     /// Note that this returns a `Zir.Inst.Index` not a ref.
     /// Does *not* append the block instruction to the scope.
     /// Leaves the `payload_index` field undefined.
-    fn addBlock(gz: *GenZir, tag: Zir.Inst.Tag, node: ast.Node.Index) !Zir.Inst.Index {
+    fn addBlock(gz: *GenZir, tag: Zir.Inst.Tag, node: Ast.Node.Index) !Zir.Inst.Index {
         const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
         const gpa = gz.astgen.gpa;
         try gz.astgen.instructions.append(gpa, .{
@@ -9835,7 +9835,7 @@ const GenZir = struct {
 
     /// Note that this returns a `Zir.Inst.Index` not a ref.
     /// Leaves the `payload_index` field undefined.
-    fn addCondBr(gz: *GenZir, tag: Zir.Inst.Tag, node: ast.Node.Index) !Zir.Inst.Index {
+    fn addCondBr(gz: *GenZir, tag: Zir.Inst.Tag, node: Ast.Node.Index) !Zir.Inst.Index {
         const gpa = gz.astgen.gpa;
         try gz.instructions.ensureUnusedCapacity(gpa, 1);
         const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
@@ -9851,7 +9851,7 @@ const GenZir = struct {
     }
 
     fn setStruct(gz: *GenZir, inst: Zir.Inst.Index, args: struct {
-        src_node: ast.Node.Index,
+        src_node: Ast.Node.Index,
         body_len: u32,
         fields_len: u32,
         decls_len: u32,
@@ -9896,7 +9896,7 @@ const GenZir = struct {
     }
 
     fn setUnion(gz: *GenZir, inst: Zir.Inst.Index, args: struct {
-        src_node: ast.Node.Index,
+        src_node: Ast.Node.Index,
         tag_type: Zir.Inst.Ref,
         body_len: u32,
         fields_len: u32,
@@ -9946,7 +9946,7 @@ const GenZir = struct {
     }
 
     fn setEnum(gz: *GenZir, inst: Zir.Inst.Index, args: struct {
-        src_node: ast.Node.Index,
+        src_node: Ast.Node.Index,
         tag_type: Zir.Inst.Ref,
         body_len: u32,
         fields_len: u32,
@@ -10019,7 +10019,7 @@ const GenZir = struct {
         return new_index;
     }
 
-    fn addRet(gz: *GenZir, rl: ResultLoc, operand: Zir.Inst.Ref, node: ast.Node.Index) !void {
+    fn addRet(gz: *GenZir, rl: ResultLoc, operand: Zir.Inst.Ref, node: Ast.Node.Index) !void {
         switch (rl) {
             .ptr => |ret_ptr| _ = try gz.addUnNode(.ret_load, ret_ptr, node),
             .ty => _ = try gz.addUnNode(.ret_node, operand, node),
@@ -10052,7 +10052,7 @@ fn detectLocalShadowing(
     astgen: *AstGen,
     scope: *Scope,
     ident_name: u32,
-    name_token: ast.TokenIndex,
+    name_token: Ast.TokenIndex,
     token_bytes: []const u8,
 ) !void {
     const gpa = astgen.gpa;
@@ -10157,7 +10157,7 @@ fn refToIndex(inst: Zir.Inst.Ref) ?Zir.Inst.Index {
     }
 }
 
-fn scanDecls(astgen: *AstGen, namespace: *Scope.Namespace, members: []const ast.Node.Index) !void {
+fn scanDecls(astgen: *AstGen, namespace: *Scope.Namespace, members: []const Ast.Node.Index) !void {
     const gpa = astgen.gpa;
     const tree = astgen.tree;
     const node_tags = tree.nodes.items(.tag);
src/Compilation.zig
@@ -2408,7 +2408,7 @@ const AstGenSrc = union(enum) {
     root,
     import: struct {
         importing_file: *Module.Scope.File,
-        import_tok: std.zig.ast.TokenIndex,
+        import_tok: std.zig.Ast.TokenIndex,
     },
 };
 
src/main.zig
@@ -6,7 +6,7 @@ const mem = std.mem;
 const process = std.process;
 const Allocator = mem.Allocator;
 const ArrayList = std.ArrayList;
-const ast = std.zig.ast;
+const Ast = std.zig.Ast;
 const warn = std.log.warn;
 
 const Compilation = @import("Compilation.zig");
@@ -3423,8 +3423,8 @@ fn fmtPathFile(
 fn printErrMsgToStdErr(
     gpa: *mem.Allocator,
     arena: *mem.Allocator,
-    parse_error: ast.Error,
-    tree: ast.Tree,
+    parse_error: Ast.Error,
+    tree: Ast,
     path: []const u8,
     color: Color,
 ) !void {
@@ -4029,12 +4029,12 @@ pub fn cmdAstCheck(
     }
 
     {
-        const token_bytes = @sizeOf(std.zig.ast.TokenList) +
-            file.tree.tokens.len * (@sizeOf(std.zig.Token.Tag) + @sizeOf(std.zig.ast.ByteOffset));
-        const tree_bytes = @sizeOf(std.zig.ast.Tree) + file.tree.nodes.len *
-            (@sizeOf(std.zig.ast.Node.Tag) +
-            @sizeOf(std.zig.ast.Node.Data) +
-            @sizeOf(std.zig.ast.TokenIndex));
+        const token_bytes = @sizeOf(Ast.TokenList) +
+            file.tree.tokens.len * (@sizeOf(std.zig.Token.Tag) + @sizeOf(Ast.ByteOffset));
+        const tree_bytes = @sizeOf(Ast) + file.tree.nodes.len *
+            (@sizeOf(Ast.Node.Tag) +
+            @sizeOf(Ast.Node.Data) +
+            @sizeOf(Ast.TokenIndex));
         const instruction_bytes = file.zir.instructions.len *
             // Here we don't use @sizeOf(Zir.Inst.Data) because it would include
             // the debug safety tag but we want to measure release size.
src/Module.zig
@@ -11,7 +11,7 @@ const log = std.log.scoped(.module);
 const BigIntConst = std.math.big.int.Const;
 const BigIntMutable = std.math.big.int.Mutable;
 const Target = std.Target;
-const ast = std.zig.ast;
+const Ast = std.zig.Ast;
 
 const Module = @This();
 const Compilation = @import("Compilation.zig");
@@ -291,7 +291,7 @@ pub const Decl = struct {
     generation: u32,
     /// The AST node index of this declaration.
     /// Must be recomputed when the corresponding source file is modified.
-    src_node: ast.Node.Index,
+    src_node: Ast.Node.Index,
     /// Line number corresponding to `src_node`. Stored separately so that source files
     /// do not need to be loaded into memory in order to compute debug line numbers.
     src_line: u32,
@@ -499,19 +499,19 @@ pub const Decl = struct {
         return decl.src_line + offset;
     }
 
-    pub fn relativeToNodeIndex(decl: Decl, offset: i32) ast.Node.Index {
-        return @bitCast(ast.Node.Index, offset + @bitCast(i32, decl.src_node));
+    pub fn relativeToNodeIndex(decl: Decl, offset: i32) Ast.Node.Index {
+        return @bitCast(Ast.Node.Index, offset + @bitCast(i32, decl.src_node));
     }
 
-    pub fn nodeIndexToRelative(decl: Decl, node_index: ast.Node.Index) i32 {
+    pub fn nodeIndexToRelative(decl: Decl, node_index: Ast.Node.Index) i32 {
         return @bitCast(i32, node_index) - @bitCast(i32, decl.src_node);
     }
 
-    pub fn tokSrcLoc(decl: Decl, token_index: ast.TokenIndex) LazySrcLoc {
+    pub fn tokSrcLoc(decl: Decl, token_index: Ast.TokenIndex) LazySrcLoc {
         return .{ .token_offset = token_index - decl.srcToken() };
     }
 
-    pub fn nodeSrcLoc(decl: Decl, node_index: ast.Node.Index) LazySrcLoc {
+    pub fn nodeSrcLoc(decl: Decl, node_index: Ast.Node.Index) LazySrcLoc {
         return .{ .node_offset = decl.nodeIndexToRelative(node_index) };
     }
 
@@ -527,7 +527,7 @@ pub const Decl = struct {
         };
     }
 
-    pub fn srcToken(decl: Decl) ast.TokenIndex {
+    pub fn srcToken(decl: Decl) Ast.TokenIndex {
         const tree = &decl.namespace.file_scope.tree;
         return tree.firstToken(decl.src_node);
     }
@@ -1121,7 +1121,7 @@ pub const Scope = struct {
         /// Whether this is populated depends on `status`.
         stat_mtime: i128,
         /// Whether this is populated or not depends on `tree_loaded`.
-        tree: ast.Tree,
+        tree: Ast,
         /// Whether this is populated or not depends on `zir_loaded`.
         zir: Zir,
         /// Package that this file is a part of, managed externally.
@@ -1220,7 +1220,7 @@ pub const Scope = struct {
             return source;
         }
 
-        pub fn getTree(file: *File, gpa: *Allocator) !*const ast.Tree {
+        pub fn getTree(file: *File, gpa: *Allocator) !*const Ast {
             if (file.tree_loaded) return &file.tree;
 
             const source = try file.getSource(gpa);
@@ -1565,17 +1565,17 @@ pub const ErrorMsg = struct {
 pub const SrcLoc = struct {
     file_scope: *Scope.File,
     /// Might be 0 depending on tag of `lazy`.
-    parent_decl_node: ast.Node.Index,
+    parent_decl_node: Ast.Node.Index,
     /// Relative to `parent_decl_node`.
     lazy: LazySrcLoc,
 
-    pub fn declSrcToken(src_loc: SrcLoc) ast.TokenIndex {
+    pub fn declSrcToken(src_loc: SrcLoc) Ast.TokenIndex {
         const tree = src_loc.file_scope.tree;
         return tree.firstToken(src_loc.parent_decl_node);
     }
 
-    pub fn declRelativeToNodeIndex(src_loc: SrcLoc, offset: i32) ast.TokenIndex {
-        return @bitCast(ast.Node.Index, offset + @bitCast(i32, src_loc.parent_decl_node));
+    pub fn declRelativeToNodeIndex(src_loc: SrcLoc, offset: i32) Ast.TokenIndex {
+        return @bitCast(Ast.Node.Index, offset + @bitCast(i32, src_loc.parent_decl_node));
     }
 
     pub fn byteOffset(src_loc: SrcLoc, gpa: *Allocator) !u32 {
@@ -1701,7 +1701,7 @@ pub const SrcLoc = struct {
                 const tree = try src_loc.file_scope.getTree(gpa);
                 const node_tags = tree.nodes.items(.tag);
                 const node = src_loc.declRelativeToNodeIndex(node_off);
-                var params: [1]ast.Node.Index = undefined;
+                var params: [1]Ast.Node.Index = undefined;
                 const full = switch (node_tags[node]) {
                     .call_one,
                     .call_one_comma,
@@ -1831,7 +1831,7 @@ pub const SrcLoc = struct {
                 const node_datas = tree.nodes.items(.data);
                 const node_tags = tree.nodes.items(.tag);
                 const main_tokens = tree.nodes.items(.main_token);
-                const extra = tree.extraData(node_datas[switch_node].rhs, ast.Node.SubRange);
+                const extra = tree.extraData(node_datas[switch_node].rhs, Ast.Node.SubRange);
                 const case_nodes = tree.extra_data[extra.start..extra.end];
                 for (case_nodes) |case_node| {
                     const case = switch (node_tags[case_node]) {
@@ -1857,7 +1857,7 @@ pub const SrcLoc = struct {
                 const node_datas = tree.nodes.items(.data);
                 const node_tags = tree.nodes.items(.tag);
                 const main_tokens = tree.nodes.items(.main_token);
-                const extra = tree.extraData(node_datas[switch_node].rhs, ast.Node.SubRange);
+                const extra = tree.extraData(node_datas[switch_node].rhs, Ast.Node.SubRange);
                 const case_nodes = tree.extra_data[extra.start..extra.end];
                 for (case_nodes) |case_node| {
                     const case = switch (node_tags[case_node]) {
@@ -1886,7 +1886,7 @@ pub const SrcLoc = struct {
                 const node_datas = tree.nodes.items(.data);
                 const node_tags = tree.nodes.items(.tag);
                 const node = src_loc.declRelativeToNodeIndex(node_off);
-                var params: [1]ast.Node.Index = undefined;
+                var params: [1]Ast.Node.Index = undefined;
                 const full = switch (node_tags[node]) {
                     .fn_proto_simple => tree.fnProtoSimple(&params, node),
                     .fn_proto_multi => tree.fnProtoMulti(node),
@@ -1911,7 +1911,7 @@ pub const SrcLoc = struct {
                 const tree = try src_loc.file_scope.getTree(gpa);
                 const node_tags = tree.nodes.items(.tag);
                 const node = src_loc.declRelativeToNodeIndex(node_off);
-                var params: [1]ast.Node.Index = undefined;
+                var params: [1]Ast.Node.Index = undefined;
                 const full = switch (node_tags[node]) {
                     .fn_proto_simple => tree.fnProtoSimple(&params, node),
                     .fn_proto_multi => tree.fnProtoMulti(node),
@@ -1941,7 +1941,7 @@ pub const SrcLoc = struct {
                 const node_datas = tree.nodes.items(.data);
                 const node_tags = tree.nodes.items(.tag);
                 const parent_node = src_loc.declRelativeToNodeIndex(node_off);
-                var params: [1]ast.Node.Index = undefined;
+                var params: [1]Ast.Node.Index = undefined;
                 const full = switch (node_tags[parent_node]) {
                     .fn_proto_simple => tree.fnProtoSimple(&params, parent_node),
                     .fn_proto_multi => tree.fnProtoMulti(parent_node),
@@ -3967,7 +3967,7 @@ fn markOutdatedDecl(mod: *Module, decl: *Decl) !void {
     decl.analysis = .outdated;
 }
 
-pub fn allocateNewDecl(mod: *Module, namespace: *Scope.Namespace, src_node: ast.Node.Index) !*Decl {
+pub fn allocateNewDecl(mod: *Module, namespace: *Scope.Namespace, src_node: Ast.Node.Index) !*Decl {
     // If we have emit-h then we must allocate a bigger structure to store the emit-h state.
     const new_decl: *Decl = if (mod.emit_h != null) blk: {
         const parent_struct = try mod.gpa.create(DeclPlusEmitH);
@@ -4237,7 +4237,7 @@ pub fn fail(
 pub fn failTok(
     mod: *Module,
     scope: *Scope,
-    token_index: ast.TokenIndex,
+    token_index: Ast.TokenIndex,
     comptime format: []const u8,
     args: anytype,
 ) CompileError {
@@ -4250,7 +4250,7 @@ pub fn failTok(
 pub fn failNode(
     mod: *Module,
     scope: *Scope,
-    node_index: ast.Node.Index,
+    node_index: Ast.Node.Index,
     comptime format: []const u8,
     args: anytype,
 ) CompileError {
@@ -4455,7 +4455,7 @@ pub const SwitchProngSrc = union(enum) {
         const main_tokens = tree.nodes.items(.main_token);
         const node_datas = tree.nodes.items(.data);
         const node_tags = tree.nodes.items(.tag);
-        const extra = tree.extraData(node_datas[switch_node].rhs, ast.Node.SubRange);
+        const extra = tree.extraData(node_datas[switch_node].rhs, Ast.Node.SubRange);
         const case_nodes = tree.extra_data[extra.start..extra.end];
 
         var multi_i: u32 = 0;
src/Sema.zig
@@ -10177,7 +10177,7 @@ fn typeHasOnePossibleValue(
     };
 }
 
-fn getAstTree(sema: *Sema, block: *Scope.Block) CompileError!*const std.zig.ast.Tree {
+fn getAstTree(sema: *Sema, block: *Scope.Block) CompileError!*const std.zig.Ast {
     return block.src_decl.namespace.file_scope.getTree(sema.gpa) catch |err| {
         log.err("unable to load AST to report compile error: {s}", .{@errorName(err)});
         return error.AnalysisFail;
@@ -10186,14 +10186,14 @@ fn getAstTree(sema: *Sema, block: *Scope.Block) CompileError!*const std.zig.ast.
 
 fn enumFieldSrcLoc(
     decl: *Decl,
-    tree: std.zig.ast.Tree,
+    tree: std.zig.Ast,
     node_offset: i32,
     field_index: usize,
 ) LazySrcLoc {
     @setCold(true);
     const enum_node = decl.relativeToNodeIndex(node_offset);
     const node_tags = tree.nodes.items(.tag);
-    var buffer: [2]std.zig.ast.Node.Index = undefined;
+    var buffer: [2]std.zig.Ast.Node.Index = undefined;
     const container_decl = switch (node_tags[enum_node]) {
         .container_decl,
         .container_decl_trailing,
src/translate_c.zig
@@ -356,7 +356,7 @@ pub fn translate(
     args_end: [*]?[*]const u8,
     errors: *[]ClangErrMsg,
     resources_path: [*:0]const u8,
-) !std.zig.ast.Tree {
+) !std.zig.Ast {
     const ast_unit = clang.LoadFromCommandLine(
         args_begin,
         args_end,
@@ -369,7 +369,7 @@ pub fn translate(
     };
     defer ast_unit.delete();
 
-    // For memory that has the same lifetime as the Tree that we return
+    // For memory that has the same lifetime as the Ast that we return
     // from this function.
     var arena = std.heap.ArenaAllocator.init(gpa);
     errdefer arena.deinit();
src/Zir.zig
@@ -16,7 +16,7 @@ const Allocator = std.mem.Allocator;
 const assert = std.debug.assert;
 const BigIntConst = std.math.big.int.Const;
 const BigIntMutable = std.math.big.int.Mutable;
-const ast = std.zig.ast;
+const Ast = std.zig.Ast;
 
 const Zir = @This();
 const Type = @import("type.zig").Type;
@@ -2092,7 +2092,7 @@ pub const Inst = struct {
         /// Used for unary operators, with a token source location.
         un_tok: struct {
             /// Offset from Decl AST token index.
-            src_tok: ast.TokenIndex,
+            src_tok: Ast.TokenIndex,
             /// The meaning of this operand depends on the corresponding `Tag`.
             operand: Ref,
 
@@ -2114,7 +2114,7 @@ pub const Inst = struct {
         },
         pl_tok: struct {
             /// Offset from Decl AST token index.
-            src_tok: ast.TokenIndex,
+            src_tok: Ast.TokenIndex,
             /// index into extra.
             /// `Tag` determines what lives there.
             payload_index: u32,
@@ -2150,7 +2150,7 @@ pub const Inst = struct {
             }
         },
         /// Offset from Decl AST token index.
-        tok: ast.TokenIndex,
+        tok: Ast.TokenIndex,
         /// Offset from Decl AST node index.
         node: i32,
         int: u64,
@@ -2878,9 +2878,9 @@ pub const Inst = struct {
         pub const Item = struct {
             /// null terminated string index
             msg: u32,
-            node: ast.Node.Index,
+            node: Ast.Node.Index,
             /// If node is 0 then this will be populated.
-            token: ast.TokenIndex,
+            token: Ast.TokenIndex,
             /// Can be used in combination with `token`.
             byte_offset: u32,
             /// 0 or a payload index of a `Block`, each is a payload
@@ -2897,7 +2897,7 @@ pub const Inst = struct {
             /// null terminated string index
             name: u32,
             /// points to the import name
-            token: ast.TokenIndex,
+            token: Ast.TokenIndex,
         };
     };
 };
@@ -2912,8 +2912,8 @@ const Writer = struct {
     indent: u32,
     parent_decl_node: u32,
 
-    fn relativeToNodeIndex(self: *Writer, offset: i32) ast.Node.Index {
-        return @bitCast(ast.Node.Index, offset + @bitCast(i32, self.parent_decl_node));
+    fn relativeToNodeIndex(self: *Writer, offset: i32) Ast.Node.Index {
+        return @bitCast(Ast.Node.Index, offset + @bitCast(i32, self.parent_decl_node));
     }
 
     fn writeInstToStream(
CMakeLists.txt
@@ -529,7 +529,7 @@ set(ZIG_STAGE2_SOURCES
     "${CMAKE_SOURCE_DIR}/lib/std/time.zig"
     "${CMAKE_SOURCE_DIR}/lib/std/unicode.zig"
     "${CMAKE_SOURCE_DIR}/lib/std/zig.zig"
-    "${CMAKE_SOURCE_DIR}/lib/std/zig/ast.zig"
+    "${CMAKE_SOURCE_DIR}/lib/std/zig/Ast.zig"
     "${CMAKE_SOURCE_DIR}/lib/std/zig/cross_target.zig"
     "${CMAKE_SOURCE_DIR}/lib/std/zig/parse.zig"
     "${CMAKE_SOURCE_DIR}/lib/std/zig/render.zig"