Commit 1a967db083

Vexu <git@vexu.eu>
2019-12-14 19:40:56
add default initializers to all ast.Node base values
1 parent 1f84586
Changed files (3)
lib
src-self-hosted
lib/std/zig/ast.zig
@@ -581,7 +581,7 @@ pub const Node = struct {
     }
 
     pub const Root = struct {
-        base: Node,
+        base: Node = Node {.id = .Root},
         decls: DeclList,
         eof_token: TokenIndex,
 
@@ -604,7 +604,7 @@ pub const Node = struct {
     };
 
     pub const VarDecl = struct {
-        base: Node,
+        base: Node = Node {.id = .VarDecl},
         doc_comments: ?*DocComment,
         visib_token: ?TokenIndex,
         thread_local_token: ?TokenIndex,
@@ -661,7 +661,7 @@ pub const Node = struct {
     };
 
     pub const Use = struct {
-        base: Node,
+        base: Node = Node {.id = .Use},
         doc_comments: ?*DocComment,
         visib_token: ?TokenIndex,
         use_token: TokenIndex,
@@ -688,7 +688,7 @@ pub const Node = struct {
     };
 
     pub const ErrorSetDecl = struct {
-        base: Node,
+        base: Node = Node {.id = .ErrorSetDecl},
         error_token: TokenIndex,
         decls: DeclList,
         rbrace_token: TokenIndex,
@@ -714,7 +714,7 @@ pub const Node = struct {
     };
 
     pub const ContainerDecl = struct {
-        base: Node,
+        base: Node = Node {.id = .ContainerDecl},
         layout_token: ?TokenIndex,
         kind_token: TokenIndex,
         init_arg_expr: InitArg,
@@ -801,7 +801,7 @@ pub const Node = struct {
     };
 
     pub const ErrorTag = struct {
-        base: Node,
+        base: Node = Node {.id = .ErrorTag},
         doc_comments: ?*DocComment,
         name_token: TokenIndex,
 
@@ -826,7 +826,7 @@ pub const Node = struct {
     };
 
     pub const Identifier = struct {
-        base: Node,
+        base: Node = Node {.id = .Identifier},
         token: TokenIndex,
 
         pub fn iterate(self: *Identifier, index: usize) ?*Node {
@@ -843,7 +843,7 @@ pub const Node = struct {
     };
 
     pub const FnProto = struct {
-        base: Node,
+        base: Node = Node {.id = .FnProto},
         doc_comments: ?*DocComment,
         visib_token: ?TokenIndex,
         fn_token: TokenIndex,
@@ -925,7 +925,7 @@ pub const Node = struct {
     };
 
     pub const AnyFrameType = struct {
-        base: Node,
+        base: Node = Node {.id = .AnyFrameType},
         anyframe_token: TokenIndex,
         result: ?Result,
 
@@ -956,7 +956,7 @@ pub const Node = struct {
     };
 
     pub const ParamDecl = struct {
-        base: Node,
+        base: Node = Node {.id = .ParamDecl},
         doc_comments: ?*DocComment,
         comptime_token: ?TokenIndex,
         noalias_token: ?TokenIndex,
@@ -989,7 +989,7 @@ pub const Node = struct {
     };
 
     pub const Block = struct {
-        base: Node,
+        base: Node = Node {.id = .Block},
         label: ?TokenIndex,
         lbrace: TokenIndex,
         statements: StatementList,
@@ -1020,7 +1020,7 @@ pub const Node = struct {
     };
 
     pub const Defer = struct {
-        base: Node,
+        base: Node = Node {.id = .Defer},
         defer_token: TokenIndex,
         expr: *Node,
 
@@ -1043,7 +1043,7 @@ pub const Node = struct {
     };
 
     pub const Comptime = struct {
-        base: Node,
+        base: Node = Node {.id = .Comptime},
         doc_comments: ?*DocComment,
         comptime_token: TokenIndex,
         expr: *Node,
@@ -1067,7 +1067,7 @@ pub const Node = struct {
     };
 
     pub const Payload = struct {
-        base: Node,
+        base: Node = Node {.id = .Payload},
         lpipe: TokenIndex,
         error_symbol: *Node,
         rpipe: TokenIndex,
@@ -1091,7 +1091,7 @@ pub const Node = struct {
     };
 
     pub const PointerPayload = struct {
-        base: Node,
+        base: Node = Node {.id = .PointerPayload},
         lpipe: TokenIndex,
         ptr_token: ?TokenIndex,
         value_symbol: *Node,
@@ -1116,7 +1116,7 @@ pub const Node = struct {
     };
 
     pub const PointerIndexPayload = struct {
-        base: Node,
+        base: Node = Node {.id = .PointerIndexPayload},
         lpipe: TokenIndex,
         ptr_token: ?TokenIndex,
         value_symbol: *Node,
@@ -1147,7 +1147,7 @@ pub const Node = struct {
     };
 
     pub const Else = struct {
-        base: Node,
+        base: Node = Node {.id = .Else},
         else_token: TokenIndex,
         payload: ?*Node,
         body: *Node,
@@ -1176,7 +1176,7 @@ pub const Node = struct {
     };
 
     pub const Switch = struct {
-        base: Node,
+        base: Node = Node {.id = .Switch},
         switch_token: TokenIndex,
         expr: *Node,
 
@@ -1208,7 +1208,7 @@ pub const Node = struct {
     };
 
     pub const SwitchCase = struct {
-        base: Node,
+        base: Node = Node {.id = .SwitchCase},
         items: ItemList,
         arrow_token: TokenIndex,
         payload: ?*Node,
@@ -1243,7 +1243,7 @@ pub const Node = struct {
     };
 
     pub const SwitchElse = struct {
-        base: Node,
+        base: Node = Node {.id = .SwitchElse},
         token: TokenIndex,
 
         pub fn iterate(self: *SwitchElse, index: usize) ?*Node {
@@ -1260,7 +1260,7 @@ pub const Node = struct {
     };
 
     pub const While = struct {
-        base: Node,
+        base: Node = Node {.id = .While},
         label: ?TokenIndex,
         inline_token: ?TokenIndex,
         while_token: TokenIndex,
@@ -1319,7 +1319,7 @@ pub const Node = struct {
     };
 
     pub const For = struct {
-        base: Node,
+        base: Node = Node {.id = .For},
         label: ?TokenIndex,
         inline_token: ?TokenIndex,
         for_token: TokenIndex,
@@ -1370,7 +1370,7 @@ pub const Node = struct {
     };
 
     pub const If = struct {
-        base: Node,
+        base: Node = Node {.id = .If},
         if_token: TokenIndex,
         condition: *Node,
         payload: ?*Node,
@@ -1413,7 +1413,7 @@ pub const Node = struct {
     };
 
     pub const InfixOp = struct {
-        base: Node,
+        base: Node = Node {.id = .InfixOp},
         op_token: TokenIndex,
         lhs: *Node,
         op: Op,
@@ -1646,7 +1646,7 @@ pub const Node = struct {
     };
 
     pub const FieldInitializer = struct {
-        base: Node,
+        base: Node = Node {.id = .FieldInitializer},
         period_token: TokenIndex,
         name_token: TokenIndex,
         expr: *Node,
@@ -1670,7 +1670,7 @@ pub const Node = struct {
     };
 
     pub const SuffixOp = struct {
-        base: Node,
+        base: Node = Node {.id = .SuffixOp},
         lhs: Lhs,
         op: Op,
         rtoken: TokenIndex,
@@ -1766,7 +1766,7 @@ pub const Node = struct {
     };
 
     pub const GroupedExpression = struct {
-        base: Node,
+        base: Node = Node {.id = .GroupedExpression},
         lparen: TokenIndex,
         expr: *Node,
         rparen: TokenIndex,
@@ -1790,7 +1790,7 @@ pub const Node = struct {
     };
 
     pub const ControlFlowExpression = struct {
-        base: Node,
+        base: Node = Node {.id = .ControlFlowExpression},
         ltoken: TokenIndex,
         kind: Kind,
         rhs: ?*Node,
@@ -1856,7 +1856,7 @@ pub const Node = struct {
     };
 
     pub const Suspend = struct {
-        base: Node,
+        base: Node = Node {.id = .Suspend},
         suspend_token: TokenIndex,
         body: ?*Node,
 
@@ -1885,7 +1885,7 @@ pub const Node = struct {
     };
 
     pub const IntegerLiteral = struct {
-        base: Node,
+        base: Node = Node {.id = .IntegerLiteral},
         token: TokenIndex,
 
         pub fn iterate(self: *IntegerLiteral, index: usize) ?*Node {
@@ -1902,7 +1902,7 @@ pub const Node = struct {
     };
 
     pub const EnumLiteral = struct {
-        base: Node,
+        base: Node = Node {.id = .EnumLiteral},
         dot: TokenIndex,
         name: TokenIndex,
 
@@ -1920,7 +1920,7 @@ pub const Node = struct {
     };
 
     pub const FloatLiteral = struct {
-        base: Node,
+        base: Node = Node {.id = .FloatLiteral},
         token: TokenIndex,
 
         pub fn iterate(self: *FloatLiteral, index: usize) ?*Node {
@@ -1937,7 +1937,7 @@ pub const Node = struct {
     };
 
     pub const BuiltinCall = struct {
-        base: Node,
+        base: Node = Node {.id = .BuiltinCall},
         builtin_token: TokenIndex,
         params: ParamList,
         rparen_token: TokenIndex,
@@ -1963,7 +1963,7 @@ pub const Node = struct {
     };
 
     pub const StringLiteral = struct {
-        base: Node,
+        base: Node = Node {.id = .StringLiteral},
         token: TokenIndex,
 
         pub fn iterate(self: *StringLiteral, index: usize) ?*Node {
@@ -1980,7 +1980,7 @@ pub const Node = struct {
     };
 
     pub const MultilineStringLiteral = struct {
-        base: Node,
+        base: Node = Node {.id = .MultilineStringLiteral},
         lines: LineList,
 
         pub const LineList = SegmentedList(TokenIndex, 4);
@@ -1999,7 +1999,7 @@ pub const Node = struct {
     };
 
     pub const CharLiteral = struct {
-        base: Node,
+        base: Node = Node {.id = .CharLiteral},
         token: TokenIndex,
 
         pub fn iterate(self: *CharLiteral, index: usize) ?*Node {
@@ -2016,7 +2016,7 @@ pub const Node = struct {
     };
 
     pub const BoolLiteral = struct {
-        base: Node,
+        base: Node = Node {.id = .BoolLiteral},
         token: TokenIndex,
 
         pub fn iterate(self: *BoolLiteral, index: usize) ?*Node {
@@ -2033,7 +2033,7 @@ pub const Node = struct {
     };
 
     pub const NullLiteral = struct {
-        base: Node,
+        base: Node = Node {.id = .NullLiteral},
         token: TokenIndex,
 
         pub fn iterate(self: *NullLiteral, index: usize) ?*Node {
@@ -2050,7 +2050,7 @@ pub const Node = struct {
     };
 
     pub const UndefinedLiteral = struct {
-        base: Node,
+        base: Node = Node {.id = .UndefinedLiteral},
         token: TokenIndex,
 
         pub fn iterate(self: *UndefinedLiteral, index: usize) ?*Node {
@@ -2067,7 +2067,7 @@ pub const Node = struct {
     };
 
     pub const AsmOutput = struct {
-        base: Node,
+        base: Node = Node {.id = .AsmOutput},
         lbracket: TokenIndex,
         symbolic_name: *Node,
         constraint: *Node,
@@ -2112,7 +2112,7 @@ pub const Node = struct {
     };
 
     pub const AsmInput = struct {
-        base: Node,
+        base: Node = Node {.id = .AsmInput},
         lbracket: TokenIndex,
         symbolic_name: *Node,
         constraint: *Node,
@@ -2144,7 +2144,7 @@ pub const Node = struct {
     };
 
     pub const Asm = struct {
-        base: Node,
+        base: Node = Node {.id = .Asm},
         asm_token: TokenIndex,
         volatile_token: ?TokenIndex,
         template: *Node,
@@ -2179,7 +2179,7 @@ pub const Node = struct {
     };
 
     pub const Unreachable = struct {
-        base: Node,
+        base: Node = Node {.id = .Unreachable},
         token: TokenIndex,
 
         pub fn iterate(self: *Unreachable, index: usize) ?*Node {
@@ -2196,7 +2196,7 @@ pub const Node = struct {
     };
 
     pub const ErrorType = struct {
-        base: Node,
+        base: Node = Node {.id = .ErrorType},
         token: TokenIndex,
 
         pub fn iterate(self: *ErrorType, index: usize) ?*Node {
@@ -2230,7 +2230,7 @@ pub const Node = struct {
     };
 
     pub const DocComment = struct {
-        base: Node,
+        base: Node = Node {.id = .DocComment},
         lines: LineList,
 
         pub const LineList = SegmentedList(TokenIndex, 4);
@@ -2249,7 +2249,7 @@ pub const Node = struct {
     };
 
     pub const TestDecl = struct {
-        base: Node,
+        base: Node = Node {.id = .TestDecl},
         doc_comments: ?*DocComment,
         test_token: TokenIndex,
         name: *Node,
lib/std/zig/parse.zig
@@ -56,7 +56,6 @@ pub fn parse(allocator: *Allocator, source: []const u8) !*Tree {
 fn parseRoot(arena: *Allocator, it: *TokenIterator, tree: *Tree) Allocator.Error!*Node.Root {
     const node = try arena.create(Node.Root);
     node.* = Node.Root{
-        .base = Node{ .id = .Root },
         .decls = undefined,
         .eof_token = undefined,
     };
@@ -176,7 +175,6 @@ fn parseContainerDocComments(arena: *Allocator, it: *TokenIterator, tree: *Tree)
 
     const node = try arena.create(Node.DocComment);
     node.* = Node.DocComment{
-        .base = Node{ .id = .DocComment },
         .lines = lines,
     };
     return &node.base;
@@ -194,7 +192,6 @@ fn parseTestDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
     const test_node = try arena.create(Node.TestDecl);
     test_node.* = Node.TestDecl{
-        .base = Node{ .id = .TestDecl },
         .doc_comments = null,
         .test_token = test_token,
         .name = name_node,
@@ -217,7 +214,6 @@ fn parseTopLevelComptime(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*
 
     const comptime_node = try arena.create(Node.Comptime);
     comptime_node.* = Node.Comptime{
-        .base = Node{ .id = .Comptime },
         .doc_comments = null,
         .comptime_token = tok,
         .expr = block_node,
@@ -338,7 +334,6 @@ fn parseFnProto(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
     const fn_proto_node = try arena.create(Node.FnProto);
     fn_proto_node.* = Node.FnProto{
-        .base = Node{ .id = .FnProto },
         .doc_comments = null,
         .visib_token = null,
         .fn_token = fn_token,
@@ -389,7 +384,6 @@ fn parseVarDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
     const node = try arena.create(Node.VarDecl);
     node.* = Node.VarDecl{
-        .base = Node{ .id = .VarDecl },
         .doc_comments = null,
         .visib_token = null,
         .thread_local_token = null,
@@ -477,7 +471,6 @@ fn parseStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!?*No
 
         const node = try arena.create(Node.Comptime);
         node.* = Node.Comptime{
-            .base = Node{ .id = .Comptime },
             .doc_comments = null,
             .comptime_token = token,
             .expr = block_expr,
@@ -496,7 +489,6 @@ fn parseStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!?*No
 
         const node = try arena.create(Node.Suspend);
         node.* = Node.Suspend{
-            .base = Node{ .id = .Suspend },
             .suspend_token = suspend_token,
             .body = body_node,
         };
@@ -510,7 +502,6 @@ fn parseStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) Error!?*No
         });
         const node = try arena.create(Node.Defer);
         node.* = Node.Defer{
-            .base = Node{ .id = .Defer },
             .defer_token = token,
             .expr = expr_node,
         };
@@ -558,7 +549,6 @@ fn parseIfStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
 
         const node = try arena.create(Node.Else);
         node.* = Node.Else{
-            .base = Node{ .id = .Else },
             .else_token = else_token,
             .payload = payload,
             .body = else_body,
@@ -652,7 +642,6 @@ fn parseForStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
 
             const else_node = try arena.create(Node.Else);
             else_node.* = Node.Else{
-                .base = Node{ .id = .Else },
                 .else_token = else_token,
                 .payload = null,
                 .body = statement_node,
@@ -677,7 +666,6 @@ fn parseForStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
 
             const else_node = try arena.create(Node.Else);
             else_node.* = Node.Else{
-                .base = Node{ .id = .Else },
                 .else_token = else_token,
                 .payload = null,
                 .body = statement_node,
@@ -714,7 +702,6 @@ fn parseWhileStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*No
 
             const else_node = try arena.create(Node.Else);
             else_node.* = Node.Else{
-                .base = Node{ .id = .Else },
                 .else_token = else_token,
                 .payload = payload,
                 .body = statement_node,
@@ -741,7 +728,6 @@ fn parseWhileStatement(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*No
 
             const else_node = try arena.create(Node.Else);
             else_node.* = Node.Else{
-                .base = Node{ .id = .Else },
                 .else_token = else_token,
                 .payload = payload,
                 .body = statement_node,
@@ -870,7 +856,6 @@ fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
         const expr_node = try parseExpr(arena, it, tree);
         const node = try arena.create(Node.ControlFlowExpression);
         node.* = Node.ControlFlowExpression{
-            .base = Node{ .id = .ControlFlowExpression },
             .ltoken = token,
             .kind = Node.ControlFlowExpression.Kind{ .Break = label },
             .rhs = expr_node,
@@ -884,7 +869,6 @@ fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
         });
         const node = try arena.create(Node.Comptime);
         node.* = Node.Comptime{
-            .base = Node{ .id = .Comptime },
             .doc_comments = null,
             .comptime_token = token,
             .expr = expr_node,
@@ -896,7 +880,6 @@ fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
         const label = try parseBreakLabel(arena, it, tree);
         const node = try arena.create(Node.ControlFlowExpression);
         node.* = Node.ControlFlowExpression{
-            .base = Node{ .id = .ControlFlowExpression },
             .ltoken = token,
             .kind = Node.ControlFlowExpression.Kind{ .Continue = label },
             .rhs = null,
@@ -910,7 +893,6 @@ fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
         });
         const node = try arena.create(Node.PrefixOp);
         node.* = Node.PrefixOp{
-            .base = Node{ .id = .PrefixOp },
             .op_token = token,
             .op = Node.PrefixOp.Op.Resume,
             .rhs = expr_node,
@@ -922,7 +904,6 @@ fn parsePrimaryExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
         const expr_node = try parseExpr(arena, it, tree);
         const node = try arena.create(Node.ControlFlowExpression);
         node.* = Node.ControlFlowExpression{
-            .base = Node{ .id = .ControlFlowExpression },
             .ltoken = token,
             .kind = Node.ControlFlowExpression.Kind.Return,
             .rhs = expr_node,
@@ -970,7 +951,6 @@ fn parseBlock(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
     const block_node = try arena.create(Node.Block);
     block_node.* = Node.Block{
-        .base = Node{ .id = .Block },
         .label = null,
         .lbrace = lbrace,
         .statements = statements,
@@ -1020,7 +1000,6 @@ fn parseForExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
         const else_node = try arena.create(Node.Else);
         else_node.* = Node.Else{
-            .base = Node{ .id = .Else },
             .else_token = else_token,
             .payload = null,
             .body = body,
@@ -1050,7 +1029,6 @@ fn parseWhileExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
         const else_node = try arena.create(Node.Else);
         else_node.* = Node.Else{
-            .base = Node{ .id = .Else },
             .else_token = else_token,
             .payload = payload,
             .body = body,
@@ -1102,7 +1080,6 @@ fn parseInitList(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node.Suf
 
     const node = try arena.create(Node.SuffixOp);
     node.* = Node.SuffixOp{
-        .base = Node{ .id = .SuffixOp },
         .lhs = .{ .node = undefined }, // set by caller
         .op = op,
         .rtoken = try expectToken(it, tree, .RBrace),
@@ -1171,7 +1148,6 @@ fn parseSuffixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
         };
         const node = try arena.create(Node.SuffixOp);
         node.* = Node.SuffixOp{
-            .base = Node{ .id = .SuffixOp },
             .lhs = .{ .node = res },
             .op = Node.SuffixOp.Op{
                 .Call = Node.SuffixOp.Op.Call{
@@ -1199,7 +1175,6 @@ fn parseSuffixExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
             if (try parseFnCallArguments(arena, it, tree)) |params| {
                 const call = try arena.create(Node.SuffixOp);
                 call.* = Node.SuffixOp{
-                    .base = Node{ .id = .SuffixOp },
                     .lhs = .{ .node = res },
                     .op = Node.SuffixOp.Op{
                         .Call = Node.SuffixOp.Op.Call{
@@ -1248,7 +1223,6 @@ fn parsePrimaryTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*N
     if (eatToken(it, .CharLiteral)) |token| {
         const node = try arena.create(Node.CharLiteral);
         node.* = Node.CharLiteral{
-            .base = Node{ .id = .CharLiteral },
             .token = token,
         };
         return &node.base;
@@ -1267,7 +1241,6 @@ fn parsePrimaryTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*N
         const expr = (try parseTypeExpr(arena, it, tree)) orelse return null;
         const node = try arena.create(Node.Comptime);
         node.* = Node.Comptime{
-            .base = Node{ .id = .Comptime },
             .doc_comments = null,
             .comptime_token = token,
             .expr = expr,
@@ -1282,7 +1255,6 @@ fn parsePrimaryTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*N
         const global_error_set = try createLiteral(arena, Node.ErrorType, token);
         const node = try arena.create(Node.InfixOp);
         node.* = Node.InfixOp{
-            .base = Node{ .id = .InfixOp },
             .op_token = period,
             .lhs = global_error_set,
             .op = Node.InfixOp.Op.Period,
@@ -1295,7 +1267,6 @@ fn parsePrimaryTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*N
     if (eatToken(it, .Keyword_anyframe)) |token| {
         const node = try arena.create(Node.AnyFrameType);
         node.* = Node.AnyFrameType{
-            .base = Node{ .id = .AnyFrameType },
             .anyframe_token = token,
             .result = null,
         };
@@ -1337,7 +1308,6 @@ fn parseErrorSetDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
 
     const node = try arena.create(Node.ErrorSetDecl);
     node.* = Node.ErrorSetDecl{
-        .base = Node{ .id = .ErrorSetDecl },
         .error_token = error_token,
         .decls = decls,
         .rbrace_token = rbrace,
@@ -1355,7 +1325,6 @@ fn parseGroupedExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
 
     const node = try arena.create(Node.GroupedExpression);
     node.* = Node.GroupedExpression{
-        .base = Node{ .id = .GroupedExpression },
         .lparen = lparen,
         .expr = expr,
         .rparen = rparen,
@@ -1438,7 +1407,6 @@ fn parseForTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
 
         const else_node = try arena.create(Node.Else);
         else_node.* = Node.Else{
-            .base = Node{ .id = .Else },
             .else_token = else_token,
             .payload = null,
             .body = else_expr,
@@ -1469,7 +1437,6 @@ fn parseWhileTypeExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Nod
 
         const else_node = try arena.create(Node.Else);
         else_node.* = Node.Else{
-            .base = Node{ .id = .Else },
             .else_token = else_token,
             .payload = null,
             .body = else_expr,
@@ -1495,7 +1462,6 @@ fn parseSwitchExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
     const node = try arena.create(Node.Switch);
     node.* = Node.Switch{
-        .base = Node{ .id = .Switch },
         .switch_token = switch_token,
         .expr = expr_node,
         .cases = cases,
@@ -1515,7 +1481,6 @@ fn parseAsmExpr(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
     const node = try arena.create(Node.Asm);
     node.* = Node.Asm{
-        .base = Node{ .id = .Asm },
         .asm_token = asm_token,
         .volatile_token = volatile_token,
         .template = template,
@@ -1538,7 +1503,6 @@ fn parseAnonLiteral(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
     if (eatToken(it, .Identifier)) |name| {
         const node = try arena.create(Node.EnumLiteral);
         node.* = Node.EnumLiteral{
-            .base = Node{ .id = .EnumLiteral },
             .dot = dot,
             .name = name,
         };
@@ -1591,7 +1555,6 @@ fn parseAsmOutputItem(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Nod
 
     const node = try arena.create(Node.AsmOutput);
     node.* = Node.AsmOutput{
-        .base = Node{ .id = .AsmOutput },
         .lbracket = lbracket,
         .symbolic_name = name,
         .constraint = constraint,
@@ -1628,7 +1591,6 @@ fn parseAsmInputItem(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
 
     const node = try arena.create(Node.AsmInput);
     node.* = Node.AsmInput{
-        .base = Node{ .id = .AsmInput },
         .lbracket = lbracket,
         .symbolic_name = name,
         .constraint = constraint,
@@ -1687,7 +1649,6 @@ fn parseFieldInit(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
     const node = try arena.create(Node.FieldInitializer);
     node.* = Node.FieldInitializer{
-        .base = Node{ .id = .FieldInitializer },
         .period_token = period_token,
         .name_token = name_token,
         .expr = expr_node,
@@ -1760,7 +1721,6 @@ fn parseParamDecl(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
     const param_decl = try arena.create(Node.ParamDecl);
     param_decl.* = Node.ParamDecl{
-        .base = Node{ .id = .ParamDecl },
         .doc_comments = doc_comments,
         .comptime_token = comptime_token,
         .noalias_token = noalias_token,
@@ -1807,7 +1767,6 @@ fn parseIfPrefix(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
     const node = try arena.create(Node.If);
     node.* = Node.If{
-        .base = Node{ .id = .If },
         .if_token = if_token,
         .condition = condition,
         .payload = payload,
@@ -1832,7 +1791,6 @@ fn parseWhilePrefix(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
 
     const node = try arena.create(Node.While);
     node.* = Node.While{
-        .base = Node{ .id = .While },
         .label = null,
         .inline_token = null,
         .while_token = while_token,
@@ -1861,7 +1819,6 @@ fn parseForPrefix(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
     const node = try arena.create(Node.For);
     node.* = Node.For{
-        .base = Node{ .id = .For },
         .label = null,
         .inline_token = null,
         .for_token = for_token,
@@ -1883,7 +1840,6 @@ fn parsePayload(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
     const node = try arena.create(Node.Payload);
     node.* = Node.Payload{
-        .base = Node{ .id = .Payload },
         .lpipe = lpipe,
         .error_symbol = identifier,
         .rpipe = rpipe,
@@ -1902,7 +1858,6 @@ fn parsePtrPayload(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
     const node = try arena.create(Node.PointerPayload);
     node.* = Node.PointerPayload{
-        .base = Node{ .id = .PointerPayload },
         .lpipe = lpipe,
         .ptr_token = asterisk,
         .value_symbol = identifier,
@@ -1930,7 +1885,6 @@ fn parsePtrIndexPayload(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*N
 
     const node = try arena.create(Node.PointerIndexPayload);
     node.* = Node.PointerIndexPayload{
-        .base = Node{ .id = .PointerIndexPayload },
         .lpipe = lpipe,
         .ptr_token = asterisk,
         .value_symbol = identifier,
@@ -1972,7 +1926,6 @@ fn parseSwitchCase(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
     } else if (eatToken(it, .Keyword_else)) |else_token| {
         const else_node = try arena.create(Node.SwitchElse);
         else_node.* = Node.SwitchElse{
-            .base = Node{ .id = .SwitchElse },
             .token = else_token,
         };
         try list.push(&else_node.base);
@@ -1980,7 +1933,6 @@ fn parseSwitchCase(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
     const node = try arena.create(Node.SwitchCase);
     node.* = Node.SwitchCase{
-        .base = Node{ .id = .SwitchCase },
         .items = list,
         .arrow_token = undefined, // set by caller
         .payload = null,
@@ -1999,7 +1951,6 @@ fn parseSwitchItem(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
         const node = try arena.create(Node.InfixOp);
         node.* = Node.InfixOp{
-            .base = Node{ .id = .InfixOp },
             .op_token = token,
             .lhs = expr,
             .op = Node.InfixOp.Op{ .Range = {} },
@@ -2052,7 +2003,6 @@ fn parseAssignOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
     const node = try arena.create(Node.InfixOp);
     node.* = Node.InfixOp{
-        .base = Node{ .id = .InfixOp },
         .op_token = token.index,
         .lhs = undefined, // set by caller
         .op = op,
@@ -2212,7 +2162,6 @@ fn parsePrefixOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
     const node = try arena.create(Node.PrefixOp);
     node.* = Node.PrefixOp{
-        .base = Node{ .id = .PrefixOp },
         .op_token = token.index,
         .op = op,
         .rhs = undefined,
@@ -2236,7 +2185,6 @@ fn parsePrefixTypeOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
     if (eatToken(it, .QuestionMark)) |token| {
         const node = try arena.create(Node.PrefixOp);
         node.* = Node.PrefixOp{
-            .base = Node{ .id = .PrefixOp },
             .op_token = token,
             .op = Node.PrefixOp.Op.OptionalType,
             .rhs = undefined, // set by caller
@@ -2255,7 +2203,6 @@ fn parsePrefixTypeOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
         };
         const node = try arena.create(Node.AnyFrameType);
         node.* = Node.AnyFrameType{
-            .base = Node{ .id = .AnyFrameType },
             .anyframe_token = token,
             .result = Node.AnyFrameType.Result{
                 .arrow_token = arrow,
@@ -2430,7 +2377,6 @@ fn parseSuffixOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
                 // this grammar rule be altered?
                 const node = try arena.create(Node.InfixOp);
                 node.* = Node.InfixOp{
-                    .base = Node{ .id = .InfixOp },
                     .op_token = period,
                     .lhs = undefined, // set by caller
                     .op = Node.InfixOp.Op.Period,
@@ -2452,7 +2398,6 @@ fn parseSuffixOp(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
     const node = try arena.create(Node.SuffixOp);
     node.* = Node.SuffixOp{
-        .base = Node{ .id = .SuffixOp },
         .lhs = undefined, // set by caller
         .op = op_and_token.op,
         .rtoken = op_and_token.token,
@@ -2506,7 +2451,6 @@ fn parseArrayTypeStart(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*No
 
     const node = try arena.create(Node.PrefixOp);
     node.* = Node.PrefixOp{
-        .base = Node{ .id = .PrefixOp },
         .op_token = lbracket,
         .op = op,
         .rhs = undefined, // set by caller
@@ -2656,7 +2600,6 @@ fn parseContainerDeclType(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?
 
     const node = try arena.create(Node.ContainerDecl);
     node.* = Node.ContainerDecl{
-        .base = Node{ .id = .ContainerDecl },
         .layout_token = null,
         .kind_token = kind_token.index,
         .init_arg_expr = init_arg_expr,
@@ -2729,7 +2672,6 @@ fn SimpleBinOpParseFn(comptime token: Token.Id, comptime op: Node.InfixOp.Op) No
             const op_token = eatToken(it, token) orelse return null;
             const node = try arena.create(Node.InfixOp);
             node.* = Node.InfixOp{
-                .base = Node{ .id = .InfixOp },
                 .op_token = op_token,
                 .lhs = undefined, // set by caller
                 .op = op,
@@ -2752,7 +2694,6 @@ fn parseBuiltinCall(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
     };
     const node = try arena.create(Node.BuiltinCall);
     node.* = Node.BuiltinCall{
-        .base = Node{ .id = .BuiltinCall },
         .builtin_token = token,
         .params = params.list,
         .rparen_token = params.rparen,
@@ -2766,7 +2707,6 @@ fn parseErrorTag(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
 
     const node = try arena.create(Node.ErrorTag);
     node.* = Node.ErrorTag{
-        .base = Node{ .id = .ErrorTag },
         .doc_comments = doc_comments,
         .name_token = token,
     };
@@ -2777,7 +2717,6 @@ fn parseIdentifier(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
     const token = eatToken(it, .Identifier) orelse return null;
     const node = try arena.create(Node.Identifier);
     node.* = Node.Identifier{
-        .base = Node{ .id = .Identifier },
         .token = token,
     };
     return &node.base;
@@ -2787,7 +2726,6 @@ fn parseVarType(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
     const token = eatToken(it, .Keyword_var) orelse return null;
     const node = try arena.create(Node.VarType);
     node.* = Node.VarType{
-        .base = Node{ .id = .VarType },
         .token = token,
     };
     return &node.base;
@@ -2807,7 +2745,6 @@ fn parseStringLiteral(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Nod
     if (eatToken(it, .StringLiteral)) |token| {
         const node = try arena.create(Node.StringLiteral);
         node.* = Node.StringLiteral{
-            .base = Node{ .id = .StringLiteral },
             .token = token,
         };
         return &node.base;
@@ -2816,7 +2753,6 @@ fn parseStringLiteral(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Nod
     if (eatToken(it, .MultilineStringLiteralLine)) |first_line| {
         const node = try arena.create(Node.MultilineStringLiteral);
         node.* = Node.MultilineStringLiteral{
-            .base = Node{ .id = .MultilineStringLiteral },
             .lines = Node.MultilineStringLiteral.LineList.init(arena),
         };
         try node.lines.push(first_line);
@@ -2833,7 +2769,6 @@ fn parseIntegerLiteral(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*No
     const token = eatToken(it, .IntegerLiteral) orelse return null;
     const node = try arena.create(Node.IntegerLiteral);
     node.* = Node.IntegerLiteral{
-        .base = Node{ .id = .IntegerLiteral },
         .token = token,
     };
     return &node.base;
@@ -2843,7 +2778,6 @@ fn parseFloatLiteral(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node
     const token = eatToken(it, .FloatLiteral) orelse return null;
     const node = try arena.create(Node.FloatLiteral);
     node.* = Node.FloatLiteral{
-        .base = Node{ .id = .FloatLiteral },
         .token = token,
     };
     return &node.base;
@@ -2853,7 +2787,6 @@ fn parseTry(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
     const token = eatToken(it, .Keyword_try) orelse return null;
     const node = try arena.create(Node.PrefixOp);
     node.* = Node.PrefixOp{
-        .base = Node{ .id = .PrefixOp },
         .op_token = token,
         .op = Node.PrefixOp.Op.Try,
         .rhs = undefined, // set by caller
@@ -2865,7 +2798,6 @@ fn parseUse(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node {
     const token = eatToken(it, .Keyword_usingnamespace) orelse return null;
     const node = try arena.create(Node.Use);
     node.* = Node.Use{
-        .base = Node{ .id = .Use },
         .doc_comments = null,
         .visib_token = null,
         .use_token = token,
@@ -2891,7 +2823,6 @@ fn parseIf(arena: *Allocator, it: *TokenIterator, tree: *Tree, bodyParseFn: Node
     });
     const else_node = try arena.create(Node.Else);
     else_node.* = Node.Else{
-        .base = Node{ .id = .Else },
         .else_token = else_token,
         .payload = payload,
         .body = else_expr,
@@ -2912,7 +2843,6 @@ fn parseDocComment(arena: *Allocator, it: *TokenIterator, tree: *Tree) !?*Node.D
 
     const node = try arena.create(Node.DocComment);
     node.* = Node.DocComment{
-        .base = Node{ .id = .DocComment },
         .lines = lines,
     };
     return node;
@@ -2924,7 +2854,6 @@ fn parseAppendedDocComment(arena: *Allocator, it: *TokenIterator, tree: *Tree, a
     if (tree.tokensOnSameLine(after_token, comment_token)) {
         const node = try arena.create(Node.DocComment);
         node.* = Node.DocComment{
-            .base = Node{ .id = .DocComment },
             .lines = Node.DocComment.LineList.init(arena),
         };
         try node.lines.push(comment_token);
@@ -3031,7 +2960,6 @@ fn parseBinOpExpr(
 fn createInfixOp(arena: *Allocator, index: TokenIndex, op: Node.InfixOp.Op) !*Node {
     const node = try arena.create(Node.InfixOp);
     node.* = Node.InfixOp{
-        .base = Node{ .id = .InfixOp },
         .op_token = index,
         .lhs = undefined,
         .op = op,
src-self-hosted/translate_c.zig
@@ -381,7 +381,6 @@ fn visitVarDecl(c: *Context, var_decl: *const ZigClangVarDecl) Error!void {
 
     const node = try c.a().create(ast.Node.VarDecl);
     node.* = ast.Node.VarDecl{
-        .base = ast.Node{ .id = .VarDecl },
         .doc_comments = null,
         .visib_token = visib_tok,
         .thread_local_token = thread_local_token,
@@ -421,7 +420,6 @@ fn resolveTypeDef(c: *Context, typedef_decl: *const ZigClangTypedefNameDecl) Err
 
     const node = try c.a().create(ast.Node.VarDecl);
     node.* = ast.Node.VarDecl{
-        .base = ast.Node{ .id = .VarDecl },
         .doc_comments = null,
         .visib_token = visib_tok,
         .thread_local_token = null,
@@ -473,7 +471,6 @@ fn resolveRecordDecl(c: *Context, record_decl: *const ZigClangRecordDecl) Error!
 
     const node = try c.a().create(ast.Node.VarDecl);
     node.* = ast.Node.VarDecl{
-        .base = ast.Node{ .id = .VarDecl },
         .doc_comments = null,
         .visib_token = visib_tok,
         .thread_local_token = null,
@@ -504,7 +501,6 @@ fn createAlias(c: *Context, alias: var) !void {
 
     const node = try c.a().create(ast.Node.VarDecl);
     node.* = ast.Node.VarDecl{
-        .base = ast.Node{ .id = .VarDecl },
         .doc_comments = null,
         .visib_token = visib_tok,
         .thread_local_token = null,
@@ -810,7 +806,6 @@ fn transDeclStmt(rp: RestorePoint, parent_scope: *Scope, stmt: *const ZigClangDe
 
                 const node = try c.a().create(ast.Node.VarDecl);
                 node.* = ast.Node.VarDecl{
-                    .base = ast.Node{ .id = .VarDecl },
                     .doc_comments = null,
                     .visib_token = null,
                     .thread_local_token = thread_local_token,
@@ -978,7 +973,6 @@ fn transStringLiteral(
             const token = try appendToken(rp.c, .StringLiteral, buf);
             const node = try rp.c.a().create(ast.Node.StringLiteral);
             node.* = ast.Node.StringLiteral{
-                .base = ast.Node{ .id = .StringLiteral },
                 .token = token,
             };
             const res = TransResult{
@@ -1145,7 +1139,6 @@ fn transInitListExpr(
         const mul_tok = try appendToken(rp.c, .AsteriskAsterisk, "**");
         const mul_node = try rp.c.a().create(ast.Node.InfixOp);
         mul_node.* = .{
-            .base = .{ .id = .InfixOp },
             .op_token = mul_tok,
             .lhs = &filler_init_node.base,
             .op = .ArrayMult,
@@ -1164,7 +1157,6 @@ fn transInitListExpr(
 
     const cat_node = try rp.c.a().create(ast.Node.InfixOp);
     cat_node.* = .{
-        .base = .{ .id = .InfixOp },
         .op_token = cat_tok,
         .lhs = &init_node.base,
         .op = .ArrayCat,
@@ -1296,7 +1288,6 @@ fn maybeSuppressResult(
     const op_token = try appendToken(rp.c, .Equal, "=");
     const op_node = try rp.c.a().create(ast.Node.InfixOp);
     op_node.* = ast.Node.InfixOp{
-        .base = ast.Node{ .id = .InfixOp },
         .op_token = op_token,
         .lhs = lhs,
         .op = .Assign,
@@ -1352,7 +1343,6 @@ fn transRecordDecl(c: *Context, record_decl: *const ZigClangRecordDecl) TypeErro
 
     const container_node = try c.a().create(ast.Node.ContainerDecl);
     container_node.* = .{
-        .base = ast.Node{ .id = .ContainerDecl },
         .layout_token = extern_tok,
         .kind_token = container_tok,
         .init_arg_expr = .None,
@@ -1523,7 +1513,6 @@ fn transCreateNodeAssign(
 
         const node = try rp.c.a().create(ast.Node.InfixOp);
         node.* = ast.Node.InfixOp{
-            .base = ast.Node{ .id = .InfixOp },
             .op_token = eq_token,
             .lhs = lhs_node.node,
             .op = .Assign,
@@ -1553,7 +1542,6 @@ fn transCreateNodeBuiltinFnCall(c: *Context, name: []const u8) !*ast.Node.Builti
     _ = try appendToken(c, .LParen, "(");
     const node = try c.a().create(ast.Node.BuiltinCall);
     node.* = ast.Node.BuiltinCall{
-        .base = ast.Node{ .id = .BuiltinCall },
         .builtin_token = builtin_token,
         .params = ast.Node.BuiltinCall.ParamList.init(c.a()),
         .rparen_token = undefined, // set after appending args
@@ -1565,7 +1553,6 @@ fn transCreateNodeFnCall(c: *Context, fn_expr: *ast.Node) !*ast.Node.SuffixOp {
     _ = try appendToken(c, .LParen, "(");
     const node = try c.a().create(ast.Node.SuffixOp);
     node.* = ast.Node.SuffixOp{
-        .base = ast.Node{ .id = .SuffixOp },
         .lhs = fn_expr,
         .op = ast.Node.SuffixOp.Op{
             .Call = ast.Node.SuffixOp.Op.Call{
@@ -1586,7 +1573,6 @@ fn transCreateNodePrefixOp(
 ) !*ast.Node.PrefixOp {
     const node = try c.a().create(ast.Node.PrefixOp);
     node.* = ast.Node.PrefixOp{
-        .base = ast.Node{ .id = .PrefixOp },
         .op_token = try appendToken(c, op_tok_id, bytes),
         .op = op,
         .rhs = undefined, // translate and set afterward
@@ -1609,7 +1595,6 @@ fn transCreateNodeInfixOp(
     const rhs = try transExpr(rp, scope, ZigClangBinaryOperator_getRHS(stmt), .used, .r_value);
     const node = try rp.c.a().create(ast.Node.InfixOp);
     node.* = ast.Node.InfixOp{
-        .base = ast.Node{ .id = .InfixOp },
         .op_token = op_token,
         .lhs = lhs.node,
         .op = op,
@@ -1619,7 +1604,6 @@ fn transCreateNodeInfixOp(
     const rparen = try appendToken(rp.c, .RParen, ")");
     const grouped_expr = try rp.c.a().create(ast.Node.GroupedExpression);
     grouped_expr.* = ast.Node.GroupedExpression{
-        .base = ast.Node{ .id = .GroupedExpression },
         .lparen = lparen,
         .expr = &node.base,
         .rparen = rparen,
@@ -1652,7 +1636,6 @@ fn transCreateNodePtrType(
         else => unreachable,
     };
     node.* = ast.Node.PrefixOp{
-        .base = ast.Node{ .id = .PrefixOp },
         .op_token = op_token,
         .op = ast.Node.PrefixOp.Op{
             .PtrType = .{
@@ -1679,7 +1662,6 @@ fn transCreateNodeAPInt(c: *Context, int: ?*const ZigClangAPSInt) !*ast.Node {
     const token = try appendToken(c, .IntegerLiteral, str);
     const node = try c.a().create(ast.Node.IntegerLiteral);
     node.* = ast.Node.IntegerLiteral{
-        .base = ast.Node{ .id = .IntegerLiteral },
         .token = token,
     };
     return &node.base;
@@ -1689,7 +1671,6 @@ fn transCreateNodeReturnExpr(c: *Context) !*ast.Node {
     const ltoken = try appendToken(c, .Keyword_return, "return");
     const node = try c.a().create(ast.Node.ControlFlowExpression);
     node.* = ast.Node.ControlFlowExpression{
-        .base = ast.Node{ .id = .ControlFlowExpression },
         .ltoken = ltoken,
         .kind = .Return,
         .rhs = null,
@@ -1701,7 +1682,6 @@ fn transCreateNodeUndefinedLiteral(c: *Context) !*ast.Node {
     const token = try appendToken(c, .Keyword_undefined, "undefined");
     const node = try c.a().create(ast.Node.UndefinedLiteral);
     node.* = ast.Node.UndefinedLiteral{
-        .base = ast.Node{ .id = .UndefinedLiteral },
         .token = token,
     };
     return &node.base;
@@ -1711,7 +1691,6 @@ fn transCreateNodeNullLiteral(c: *Context) !*ast.Node {
     const token = try appendToken(c, .Keyword_null, "null");
     const node = try c.a().create(ast.Node.NullLiteral);
     node.* = ast.Node.NullLiteral{
-        .base = ast.Node{ .id = .NullLiteral },
         .token = token,
     };
     return &node.base;
@@ -1724,7 +1703,6 @@ fn transCreateNodeBoolLiteral(c: *Context, value: bool) !*ast.Node {
         try appendToken(c, .Keyword_false, "false");
     const node = try c.a().create(ast.Node.BoolLiteral);
     node.* = ast.Node.BoolLiteral{
-        .base = ast.Node{ .id = .BoolLiteral },
         .token = token,
     };
     return &node.base;
@@ -1734,7 +1712,6 @@ fn transCreateNodeArrayInitializer(c: *Context, dot_tok: ast.TokenIndex) !*ast.N
     _ = try appendToken(c, .LBrace, "{");
     const node = try c.a().create(ast.Node.SuffixOp);
     node.* = ast.Node.SuffixOp{
-        .base = ast.Node{ .id = .SuffixOp },
         .lhs = .{ .dot = dot_tok },
         .op = .{
             .ArrayInitializer = ast.Node.SuffixOp.Op.InitList.init(c.a()),
@@ -1748,7 +1725,6 @@ fn transCreateNodeInt(c: *Context, int: var) !*ast.Node {
     const token = try appendTokenFmt(c, .IntegerLiteral, "{}", .{int});
     const node = try c.a().create(ast.Node.IntegerLiteral);
     node.* = ast.Node.IntegerLiteral{
-        .base = ast.Node{ .id = .IntegerLiteral },
         .token = token,
     };
     return &node.base;