Commit b2682237db

Andrew Kelley <andrew@ziglang.org>
2021-03-19 06:19:28
stage2: get Module and Sema compiling again
There are some `@panic("TODO")` in there but I'm trying to get the branch to the point where collaborators can jump in. Next is to repair the seam between LazySrcLoc and codegen's expected absolute file offsets.
1 parent f5aca4a
src/codegen/c.zig
@@ -14,6 +14,7 @@ const TypedValue = @import("../TypedValue.zig");
 const C = link.File.C;
 const Decl = Module.Decl;
 const trace = @import("../tracy.zig").trace;
+const LazySrcLoc = Module.LazySrcLoc;
 
 const Mutability = enum { Const, Mut };
 
@@ -145,11 +146,10 @@ pub const DeclGen = struct {
     error_msg: ?*Module.ErrorMsg,
     typedefs: TypedefMap,
 
-    fn fail(dg: *DeclGen, src: usize, comptime format: []const u8, args: anytype) error{ AnalysisFail, OutOfMemory } {
-        dg.error_msg = try Module.ErrorMsg.create(dg.module.gpa, .{
-            .file_scope = dg.decl.getFileScope(),
-            .byte_offset = src,
-        }, format, args);
+    fn fail(dg: *DeclGen, src: LazySrcLoc, comptime format: []const u8, args: anytype) error{ AnalysisFail, OutOfMemory } {
+        @setCold(true);
+        const src_loc = src.toSrcLocWithDecl(dg.decl);
+        dg.error_msg = try Module.ErrorMsg.create(dg.module.gpa, src_loc, format, args);
         return error.AnalysisFail;
     }
 
@@ -160,7 +160,7 @@ pub const DeclGen = struct {
         val: Value,
     ) error{ OutOfMemory, AnalysisFail }!void {
         if (val.isUndef()) {
-            return dg.fail(dg.decl.src(), "TODO: C backend: properly handle undefined in all cases (with debug safety?)", .{});
+            return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: properly handle undefined in all cases (with debug safety?)", .{});
         }
         switch (t.zigTypeTag()) {
             .Int => {
@@ -193,7 +193,7 @@ pub const DeclGen = struct {
                     try writer.print("{s}", .{decl.name});
                 },
                 else => |e| return dg.fail(
-                    dg.decl.src(),
+                    .{ .node_offset = 0 },
                     "TODO: C backend: implement Pointer value {s}",
                     .{@tagName(e)},
                 ),
@@ -276,7 +276,7 @@ pub const DeclGen = struct {
                     try writer.writeAll(", .error = 0 }");
                 }
             },
-            else => |e| return dg.fail(dg.decl.src(), "TODO: C backend: implement value {s}", .{
+            else => |e| return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement value {s}", .{
                 @tagName(e),
             }),
         }
@@ -350,7 +350,7 @@ pub const DeclGen = struct {
                                 break;
                             }
                         } else {
-                            return dg.fail(dg.decl.src(), "TODO: C backend: implement integer types larger than 128 bits", .{});
+                            return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement integer types larger than 128 bits", .{});
                         }
                     },
                     else => unreachable,
@@ -358,7 +358,7 @@ pub const DeclGen = struct {
             },
             .Pointer => {
                 if (t.isSlice()) {
-                    return dg.fail(dg.decl.src(), "TODO: C backend: implement slices", .{});
+                    return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement slices", .{});
                 } else {
                     try dg.renderType(w, t.elemType());
                     try w.writeAll(" *");
@@ -431,7 +431,7 @@ pub const DeclGen = struct {
                 dg.typedefs.putAssumeCapacityNoClobber(t, .{ .name = name, .rendered = rendered });
             },
             .Null, .Undefined => unreachable, // must be const or comptime
-            else => |e| return dg.fail(dg.decl.src(), "TODO: C backend: implement type {s}", .{
+            else => |e| return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement type {s}", .{
                 @tagName(e),
             }),
         }
@@ -575,7 +575,7 @@ pub fn genBody(o: *Object, body: ir.Body) error{ AnalysisFail, OutOfMemory }!voi
             .unwrap_errunion_err_ptr => try genUnwrapErrUnionErr(o, inst.castTag(.unwrap_errunion_err_ptr).?),
             .wrap_errunion_payload => try genWrapErrUnionPay(o, inst.castTag(.wrap_errunion_payload).?),
             .wrap_errunion_err => try genWrapErrUnionErr(o, inst.castTag(.wrap_errunion_err).?),
-            else => |e| return o.dg.fail(o.dg.decl.src(), "TODO: C backend: implement codegen for {}", .{e}),
+            else => |e| return o.dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement codegen for {}", .{e}),
         };
         switch (result_value) {
             .none => {},
@@ -756,7 +756,7 @@ fn genCall(o: *Object, inst: *Inst.Call) !CValue {
         try writer.writeAll(");\n");
         return result_local;
     } else {
-        return o.dg.fail(o.dg.decl.src(), "TODO: C backend: implement function pointers", .{});
+        return o.dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement function pointers", .{});
     }
 }
 
@@ -913,13 +913,13 @@ fn genAsm(o: *Object, as: *Inst.Assembly) !CValue {
             try o.writeCValue(writer, arg_c_value);
             try writer.writeAll(";\n");
         } else {
-            return o.dg.fail(o.dg.decl.src(), "TODO non-explicit inline asm regs", .{});
+            return o.dg.fail(.{ .node_offset = 0 }, "TODO non-explicit inline asm regs", .{});
         }
     }
     const volatile_string: []const u8 = if (as.is_volatile) "volatile " else "";
     try writer.print("__asm {s}(\"{s}\"", .{ volatile_string, as.asm_source });
     if (as.output) |_| {
-        return o.dg.fail(o.dg.decl.src(), "TODO inline asm output", .{});
+        return o.dg.fail(.{ .node_offset = 0 }, "TODO inline asm output", .{});
     }
     if (as.inputs.len > 0) {
         if (as.output == null) {
@@ -945,7 +945,7 @@ fn genAsm(o: *Object, as: *Inst.Assembly) !CValue {
     if (as.base.isUnused())
         return CValue.none;
 
-    return o.dg.fail(o.dg.decl.src(), "TODO: C backend: inline asm expression result used", .{});
+    return o.dg.fail(.{ .node_offset = 0 }, "TODO: C backend: inline asm expression result used", .{});
 }
 
 fn genIsNull(o: *Object, inst: *Inst.UnOp) !CValue {
src/codegen/wasm.zig
@@ -14,6 +14,7 @@ const Type = @import("../type.zig").Type;
 const Value = @import("../value.zig").Value;
 const Compilation = @import("../Compilation.zig");
 const AnyMCValue = @import("../codegen.zig").AnyMCValue;
+const LazySrcLoc = Module.LazySrcLoc;
 
 /// Wasm Value, created when generating an instruction
 const WValue = union(enum) {
@@ -70,11 +71,9 @@ pub const Context = struct {
     }
 
     /// Sets `err_msg` on `Context` and returns `error.CodegemFail` which is caught in link/Wasm.zig
-    fn fail(self: *Context, src: usize, comptime fmt: []const u8, args: anytype) InnerError {
-        self.err_msg = try Module.ErrorMsg.create(self.gpa, .{
-            .file_scope = self.decl.getFileScope(),
-            .byte_offset = src,
-        }, fmt, args);
+    fn fail(self: *Context, src: LazySrcLoc, comptime fmt: []const u8, args: anytype) InnerError {
+        const src_loc = src.toSrcLocWithDecl(self.decl);
+        self.err_msg = try Module.ErrorMsg.create(self.gpa, src_loc, fmt, args);
         return error.CodegenFail;
     }
 
@@ -91,7 +90,7 @@ pub const Context = struct {
     }
 
     /// Using a given `Type`, returns the corresponding wasm value type
-    fn genValtype(self: *Context, src: usize, ty: Type) InnerError!u8 {
+    fn genValtype(self: *Context, src: LazySrcLoc, ty: Type) InnerError!u8 {
         return switch (ty.tag()) {
             .f32 => wasm.valtype(.f32),
             .f64 => wasm.valtype(.f64),
@@ -104,7 +103,7 @@ pub const Context = struct {
     /// Using a given `Type`, returns the corresponding wasm value type
     /// Differently from `genValtype` this also allows `void` to create a block
     /// with no return type
-    fn genBlockType(self: *Context, src: usize, ty: Type) InnerError!u8 {
+    fn genBlockType(self: *Context, src: LazySrcLoc, ty: Type) InnerError!u8 {
         return switch (ty.tag()) {
             .void, .noreturn => wasm.block_empty,
             else => self.genValtype(src, ty),
@@ -139,7 +138,7 @@ pub const Context = struct {
             ty.fnParamTypes(params);
             for (params) |param_type| {
                 // Can we maybe get the source index of each param?
-                const val_type = try self.genValtype(self.decl.src(), param_type);
+                const val_type = try self.genValtype(.{ .node_offset = 0 }, param_type);
                 try writer.writeByte(val_type);
             }
         }
@@ -151,7 +150,7 @@ pub const Context = struct {
             else => |ret_type| {
                 try leb.writeULEB128(writer, @as(u32, 1));
                 // Can we maybe get the source index of the return type?
-                const val_type = try self.genValtype(self.decl.src(), return_type);
+                const val_type = try self.genValtype(.{ .node_offset = 0 }, return_type);
                 try writer.writeByte(val_type);
             },
         }
@@ -168,7 +167,7 @@ pub const Context = struct {
         const mod_fn = blk: {
             if (tv.val.castTag(.function)) |func| break :blk func.data;
             if (tv.val.castTag(.extern_fn)) |ext_fn| return; // don't need codegen for extern functions
-            return self.fail(self.decl.src(), "TODO: Wasm codegen for decl type '{s}'", .{tv.ty.tag()});
+            return self.fail(.{ .node_offset = 0 }, "TODO: Wasm codegen for decl type '{s}'", .{tv.ty.tag()});
         };
 
         // Reserve space to write the size after generating the code as well as space for locals count
src/astgen.zig
@@ -58,20 +58,14 @@ pub const ResultLoc = union(enum) {
     };
 };
 
-pub fn typeExpr(mod: *Module, scope: *Scope, type_node: ast.Node.Index) InnerError!*zir.Inst {
-    const tree = scope.tree();
-    const token_starts = tree.tokens.items(.start);
+const void_inst: zir.Inst.Ref = @enumToInt(zir.Const.void_value);
 
-    const type_src = token_starts[tree.firstToken(type_node)];
-    const type_type = try addZIRInstConst(mod, scope, type_src, .{
-        .ty = Type.initTag(.type),
-        .val = Value.initTag(.type_type),
-    });
-    const type_rl: ResultLoc = .{ .ty = type_type };
+pub fn typeExpr(mod: *Module, scope: *Scope, type_node: ast.Node.Index) InnerError!zir.Inst.Ref {
+    const type_rl: ResultLoc = .{ .ty = @enumToInt(zir.Const.type_type) };
     return expr(mod, scope, type_rl, type_node);
 }
 
-fn lvalExpr(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst {
+fn lvalExpr(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!zir.Inst.Ref {
     const tree = scope.tree();
     const node_tags = tree.nodes.items(.tag);
     const main_tokens = tree.nodes.items(.main_token);
@@ -265,7 +259,7 @@ fn lvalExpr(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.I
 /// When `rl` is discard, ptr, inferred_ptr, bitcasted_ptr, or inferred_ptr, the
 /// result instruction can be used to inspect whether it is isNoReturn() but that is it,
 /// it must otherwise not be used.
-pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!*zir.Inst {
+pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!zir.Inst.Ref {
     const tree = scope.tree();
     const main_tokens = tree.nodes.items(.main_token);
     const token_tags = tree.tokens.items(.tag);
@@ -294,20 +288,62 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
         .asm_output => unreachable, // Handled in `asmExpr`.
         .asm_input => unreachable, // Handled in `asmExpr`.
 
-        .assign => return rvalueVoid(mod, scope, rl, node, try assign(mod, scope, node)),
-        .assign_bit_and => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .bit_and)),
-        .assign_bit_or => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .bit_or)),
-        .assign_bit_shift_left => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .shl)),
-        .assign_bit_shift_right => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .shr)),
-        .assign_bit_xor => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .xor)),
-        .assign_div => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .div)),
-        .assign_sub => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .sub)),
-        .assign_sub_wrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .subwrap)),
-        .assign_mod => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .mod_rem)),
-        .assign_add => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .add)),
-        .assign_add_wrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .addwrap)),
-        .assign_mul => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .mul)),
-        .assign_mul_wrap => return rvalueVoid(mod, scope, rl, node, try assignOp(mod, scope, node, .mulwrap)),
+        .assign => {
+            try assign(mod, scope, node);
+            return rvalue(mod, scope, rl, void_inst, node);
+        },
+        .assign_bit_and => {
+            try assignOp(mod, scope, node, .bit_and);
+            return rvalue(mod, scope, rl, void_inst, node);
+        },
+        .assign_bit_or => {
+            try assignOp(mod, scope, node, .bit_or);
+            return rvalue(mod, scope, rl, void_inst, node);
+        },
+        .assign_bit_shift_left => {
+            try assignOp(mod, scope, node, .shl);
+            return rvalue(mod, scope, rl, void_inst, node);
+        },
+        .assign_bit_shift_right => {
+            try assignOp(mod, scope, node, .shr);
+            return rvalue(mod, scope, rl, void_inst, node);
+        },
+        .assign_bit_xor => {
+            try assignOp(mod, scope, node, .xor);
+            return rvalue(mod, scope, rl, void_inst, node);
+        },
+        .assign_div => {
+            try assignOp(mod, scope, node, .div);
+            return rvalue(mod, scope, rl, void_inst, node);
+        },
+        .assign_sub => {
+            try assignOp(mod, scope, node, .sub);
+            return rvalue(mod, scope, rl, void_inst, node);
+        },
+        .assign_sub_wrap => {
+            try assignOp(mod, scope, node, .subwrap);
+            return rvalue(mod, scope, rl, void_inst, node);
+        },
+        .assign_mod => {
+            try assignOp(mod, scope, node, .mod_rem);
+            return rvalue(mod, scope, rl, void_inst, node);
+        },
+        .assign_add => {
+            try assignOp(mod, scope, node, .add);
+            return rvalue(mod, scope, rl, void_inst, node);
+        },
+        .assign_add_wrap => {
+            try assignOp(mod, scope, node, .addwrap);
+            return rvalue(mod, scope, rl, void_inst, node);
+        },
+        .assign_mul => {
+            try assignOp(mod, scope, node, .mul);
+            return rvalue(mod, scope, rl, void_inst, node);
+        },
+        .assign_mul_wrap => {
+            try assignOp(mod, scope, node, .mulwrap);
+            return rvalue(mod, scope, rl, void_inst, node);
+        },
 
         .add => return simpleBinOp(mod, scope, rl, node, .add),
         .add_wrap => return simpleBinOp(mod, scope, rl, node, .addwrap),
@@ -336,10 +372,14 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
         .bool_and => return boolBinOp(mod, scope, rl, node, true),
         .bool_or => return boolBinOp(mod, scope, rl, node, false),
 
-        .bool_not => return rvalue(mod, scope, rl, try boolNot(mod, scope, node)),
-        .bit_not => return rvalue(mod, scope, rl, try bitNot(mod, scope, node)),
-        .negation => return rvalue(mod, scope, rl, try negation(mod, scope, node, .sub)),
-        .negation_wrap => return rvalue(mod, scope, rl, try negation(mod, scope, node, .subwrap)),
+        .bool_not => @panic("TODO"),
+        .bit_not => @panic("TODO"),
+        .negation => @panic("TODO"),
+        .negation_wrap => @panic("TODO"),
+        //.bool_not => return rvalue(mod, scope, rl, try boolNot(mod, scope, node)),
+        //.bit_not => return rvalue(mod, scope, rl, try bitNot(mod, scope, node)),
+        //.negation => return rvalue(mod, scope, rl, try negation(mod, scope, node, .sub)),
+        //.negation_wrap => return rvalue(mod, scope, rl, try negation(mod, scope, node, .subwrap)),
 
         .identifier => return identifier(mod, scope, rl, node),
 
@@ -377,6 +417,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
         },
 
         .unreachable_literal => {
+            if (true) @panic("TODO update for zir-memory-layout");
             const main_token = main_tokens[node];
             const src = token_starts[main_token];
             return addZIRNoOp(mod, scope, src, .unreachable_safe);
@@ -402,16 +443,19 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
         .slice_sentinel => return sliceExpr(mod, scope, rl, tree.sliceSentinel(node)),
 
         .deref => {
+            if (true) @panic("TODO update for zir-memory-layout");
             const lhs = try expr(mod, scope, .none, node_datas[node].lhs);
             const src = token_starts[main_tokens[node]];
             const result = try addZIRUnOp(mod, scope, src, .deref, lhs);
             return rvalue(mod, scope, rl, result);
         },
         .address_of => {
+            if (true) @panic("TODO update for zir-memory-layout");
             const result = try expr(mod, scope, .ref, node_datas[node].lhs);
             return rvalue(mod, scope, rl, result);
         },
         .undefined_literal => {
+            if (true) @panic("TODO update for zir-memory-layout");
             const main_token = main_tokens[node];
             const src = token_starts[main_token];
             const result = try addZIRInstConst(mod, scope, src, .{
@@ -421,6 +465,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
             return rvalue(mod, scope, rl, result);
         },
         .true_literal => {
+            if (true) @panic("TODO update for zir-memory-layout");
             const main_token = main_tokens[node];
             const src = token_starts[main_token];
             const result = try addZIRInstConst(mod, scope, src, .{
@@ -430,6 +475,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
             return rvalue(mod, scope, rl, result);
         },
         .false_literal => {
+            if (true) @panic("TODO update for zir-memory-layout");
             const main_token = main_tokens[node];
             const src = token_starts[main_token];
             const result = try addZIRInstConst(mod, scope, src, .{
@@ -439,6 +485,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
             return rvalue(mod, scope, rl, result);
         },
         .null_literal => {
+            if (true) @panic("TODO update for zir-memory-layout");
             const main_token = main_tokens[node];
             const src = token_starts[main_token];
             const result = try addZIRInstConst(mod, scope, src, .{
@@ -448,12 +495,14 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
             return rvalue(mod, scope, rl, result);
         },
         .optional_type => {
+            if (true) @panic("TODO update for zir-memory-layout");
             const src = token_starts[main_tokens[node]];
             const operand = try typeExpr(mod, scope, node_datas[node].lhs);
             const result = try addZIRUnOp(mod, scope, src, .optional_type, operand);
             return rvalue(mod, scope, rl, result);
         },
         .unwrap_optional => {
+            if (true) @panic("TODO update for zir-memory-layout");
             const src = token_starts[main_tokens[node]];
             switch (rl) {
                 .ref => return addZIRUnOp(
@@ -473,6 +522,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
             }
         },
         .block_two, .block_two_semicolon => {
+            if (true) @panic("TODO update for zir-memory-layout");
             const statements = [2]ast.Node.Index{ node_datas[node].lhs, node_datas[node].rhs };
             if (node_datas[node].lhs == 0) {
                 return blockExpr(mod, scope, rl, node, statements[0..0]);
@@ -483,10 +533,12 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
             }
         },
         .block, .block_semicolon => {
+            if (true) @panic("TODO update for zir-memory-layout");
             const statements = tree.extra_data[node_datas[node].lhs..node_datas[node].rhs];
             return blockExpr(mod, scope, rl, node, statements);
         },
         .enum_literal => {
+            if (true) @panic("TODO update for zir-memory-layout");
             const ident_token = main_tokens[node];
             const gen_zir = scope.getGenZir();
             const string_bytes = &gen_zir.zir_exec.string_bytes;
@@ -497,6 +549,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
             return rvalue(mod, scope, rl, result);
         },
         .error_value => {
+            if (true) @panic("TODO update for zir-memory-layout");
             const ident_token = node_datas[node].rhs;
             const name = try mod.identifierTokenString(scope, ident_token);
             const src = token_starts[ident_token];
@@ -504,6 +557,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
             return rvalue(mod, scope, rl, result);
         },
         .error_union => {
+            if (true) @panic("TODO update for zir-memory-layout");
             const error_set = try typeExpr(mod, scope, node_datas[node].lhs);
             const payload = try typeExpr(mod, scope, node_datas[node].rhs);
             const src = token_starts[main_tokens[node]];
@@ -511,6 +565,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
             return rvalue(mod, scope, rl, result);
         },
         .merge_error_sets => {
+            if (true) @panic("TODO update for zir-memory-layout");
             const lhs = try typeExpr(mod, scope, node_datas[node].lhs);
             const rhs = try typeExpr(mod, scope, node_datas[node].rhs);
             const src = token_starts[main_tokens[node]];
@@ -518,6 +573,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
             return rvalue(mod, scope, rl, result);
         },
         .anyframe_literal => {
+            if (true) @panic("TODO update for zir-memory-layout");
             const main_token = main_tokens[node];
             const src = token_starts[main_token];
             const result = try addZIRInstConst(mod, scope, src, .{
@@ -527,12 +583,14 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
             return rvalue(mod, scope, rl, result);
         },
         .anyframe_type => {
+            if (true) @panic("TODO update for zir-memory-layout");
             const src = token_starts[node_datas[node].lhs];
             const return_type = try typeExpr(mod, scope, node_datas[node].rhs);
             const result = try addZIRUnOp(mod, scope, src, .anyframe_type, return_type);
             return rvalue(mod, scope, rl, result);
         },
         .@"catch" => {
+            if (true) @panic("TODO update for zir-memory-layout");
             const catch_token = main_tokens[node];
             const payload_token: ?ast.TokenIndex = if (token_tags[catch_token + 1] == .pipe)
                 catch_token + 2
@@ -631,9 +689,11 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) In
         .@"switch", .switch_comma => return switchExpr(mod, scope, rl, node),
 
         .@"nosuspend" => return nosuspendExpr(mod, scope, rl, node),
-        .@"suspend" => return rvalue(mod, scope, rl, try suspendExpr(mod, scope, node)),
+        .@"suspend" => @panic("TODO"),
+        //.@"suspend" => return rvalue(mod, scope, rl, try suspendExpr(mod, scope, node)),
         .@"await" => return awaitExpr(mod, scope, rl, node),
-        .@"resume" => return rvalue(mod, scope, rl, try resumeExpr(mod, scope, node)),
+        .@"resume" => @panic("TODO"),
+        //.@"resume" => return rvalue(mod, scope, rl, try resumeExpr(mod, scope, node)),
 
         .@"defer" => return mod.failNode(scope, node, "TODO implement astgen.expr for .defer", .{}),
         .@"errdefer" => return mod.failNode(scope, node, "TODO implement astgen.expr for .errdefer", .{}),
@@ -673,20 +733,22 @@ pub fn comptimeExpr(
     parent_scope: *Scope,
     rl: ResultLoc,
     node: ast.Node.Index,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout branch");
+
     // If we are already in a comptime scope, no need to make another one.
     if (parent_scope.isComptime()) {
         return expr(mod, parent_scope, rl, node);
     }
 
+    const gz = parent_scope.getGenZir();
     const tree = parent_scope.tree();
     const token_starts = tree.tokens.items(.start);
 
     // Make a scope to collect generated instructions in the sub-expression.
     var block_scope: Scope.GenZir = .{
         .parent = parent_scope,
-        .decl = parent_scope.ownerDecl().?,
-        .arena = parent_scope.arena(),
+        .zir_code = gz.zir_code,
         .force_comptime = true,
         .instructions = .{},
     };
@@ -698,7 +760,7 @@ pub fn comptimeExpr(
 
     const src = token_starts[tree.firstToken(node)];
     const block = try addZIRInstBlock(mod, parent_scope, src, .block_comptime_flat, .{
-        .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
+        .instructions = try block_scope.arena.dupe(zir.Inst.Ref, block_scope.instructions.items),
     });
 
     return &block.base;
@@ -709,7 +771,8 @@ fn breakExpr(
     parent_scope: *Scope,
     rl: ResultLoc,
     node: ast.Node.Index,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = parent_scope.tree();
     const node_datas = tree.nodes.items(.data);
     const main_tokens = tree.nodes.items(.main_token);
@@ -787,7 +850,8 @@ fn continueExpr(
     parent_scope: *Scope,
     rl: ResultLoc,
     node: ast.Node.Index,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = parent_scope.tree();
     const node_datas = tree.nodes.items(.data);
     const main_tokens = tree.nodes.items(.main_token);
@@ -843,7 +907,7 @@ pub fn blockExpr(
     rl: ResultLoc,
     block_node: ast.Node.Index,
     statements: []const ast.Node.Index,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
     const tracy = trace(@src());
     defer tracy.end();
 
@@ -859,7 +923,7 @@ pub fn blockExpr(
     }
 
     try blockExprStmts(mod, scope, block_node, statements);
-    return rvalueVoid(mod, scope, rl, block_node, {});
+    return rvalue(mod, scope, rl, void_inst, block_node);
 }
 
 fn checkLabelRedefinition(mod: *Module, parent_scope: *Scope, label: ast.TokenIndex) !void {
@@ -875,21 +939,18 @@ fn checkLabelRedefinition(mod: *Module, parent_scope: *Scope, label: ast.TokenIn
                         const main_tokens = tree.nodes.items(.main_token);
                         const token_starts = tree.tokens.items(.start);
 
-                        const label_src = token_starts[label];
-                        const prev_label_src = token_starts[prev_label.token];
-
                         const label_name = try mod.identifierTokenString(parent_scope, label);
                         const msg = msg: {
                             const msg = try mod.errMsg(
                                 parent_scope,
-                                label_src,
+                                gen_zir.tokSrcLoc(label),
                                 "redefinition of label '{s}'",
                                 .{label_name},
                             );
                             errdefer msg.destroy(mod.gpa);
                             try mod.errNote(
                                 parent_scope,
-                                prev_label_src,
+                                gen_zir.tokSrcLoc(prev_label.token),
                                 msg,
                                 "previous definition is here",
                                 .{},
@@ -917,7 +978,7 @@ fn labeledBlockExpr(
     block_node: ast.Node.Index,
     statements: []const ast.Node.Index,
     zir_tag: zir.Inst.Tag,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
     const tracy = trace(@src());
     defer tracy.end();
 
@@ -1285,6 +1346,7 @@ fn assignOp(
     infix_node: ast.Node.Index,
     op_inst_tag: zir.Inst.Tag,
 ) InnerError!void {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = scope.tree();
     const node_datas = tree.nodes.items(.data);
     const main_tokens = tree.nodes.items(.main_token);
@@ -1299,7 +1361,7 @@ fn assignOp(
     _ = try addZIRBinOp(mod, scope, src, .store, lhs_ptr, result);
 }
 
-fn boolNot(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst {
+fn boolNot(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!zir.Inst.Ref {
     const tree = scope.tree();
     const node_datas = tree.nodes.items(.data);
     const main_tokens = tree.nodes.items(.main_token);
@@ -1314,7 +1376,7 @@ fn boolNot(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.In
     return addZIRUnOp(mod, scope, src, .bool_not, operand);
 }
 
-fn bitNot(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst {
+fn bitNot(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!zir.Inst.Ref {
     const tree = scope.tree();
     const node_datas = tree.nodes.items(.data);
     const main_tokens = tree.nodes.items(.main_token);
@@ -1330,7 +1392,7 @@ fn negation(
     scope: *Scope,
     node: ast.Node.Index,
     op_inst_tag: zir.Inst.Tag,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
     const tree = scope.tree();
     const node_datas = tree.nodes.items(.data);
     const main_tokens = tree.nodes.items(.main_token);
@@ -1350,7 +1412,8 @@ fn ptrType(
     scope: *Scope,
     rl: ResultLoc,
     ptr_info: ast.full.PtrType,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = scope.tree();
     const token_starts = tree.tokens.items(.start);
 
@@ -1394,7 +1457,8 @@ fn ptrType(
     return rvalue(mod, scope, rl, result);
 }
 
-fn arrayType(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !*zir.Inst {
+fn arrayType(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = scope.tree();
     const main_tokens = tree.nodes.items(.main_token);
     const node_datas = tree.nodes.items(.data);
@@ -1421,7 +1485,8 @@ fn arrayType(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !
     }
 }
 
-fn arrayTypeSentinel(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !*zir.Inst {
+fn arrayTypeSentinel(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = scope.tree();
     const main_tokens = tree.nodes.items(.main_token);
     const token_starts = tree.tokens.items(.start);
@@ -1454,7 +1519,8 @@ fn containerDecl(
     scope: *Scope,
     rl: ResultLoc,
     container_decl: ast.full.ContainerDecl,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     return mod.failTok(scope, container_decl.ast.main_token, "TODO implement container decls", .{});
 }
 
@@ -1463,7 +1529,8 @@ fn errorSetDecl(
     scope: *Scope,
     rl: ResultLoc,
     node: ast.Node.Index,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = scope.tree();
     const main_tokens = tree.nodes.items(.main_token);
     const token_tags = tree.tokens.items(.tag);
@@ -1516,7 +1583,9 @@ fn orelseCatchExpr(
     unwrap_code_op: zir.Inst.Tag,
     rhs: ast.Node.Index,
     payload_token: ?ast.TokenIndex,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
+
     const tree = scope.tree();
     const token_starts = tree.tokens.items(.start);
 
@@ -1548,7 +1617,7 @@ fn orelseCatchExpr(
     }, .{});
 
     const block = try addZIRInstBlock(mod, scope, src, .block, .{
-        .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
+        .instructions = try block_scope.arena.dupe(zir.Inst.Ref, block_scope.instructions.items),
     });
 
     var then_scope: Scope.GenZir = .{
@@ -1624,11 +1693,11 @@ fn finishThenElseBlock(
     else_body: *zir.Body,
     then_src: usize,
     else_src: usize,
-    then_result: *zir.Inst,
+    then_result: zir.Inst.Ref,
     else_result: ?*zir.Inst,
-    main_block: *zir.Inst.Block,
-    then_break_block: *zir.Inst.Block,
-) InnerError!*zir.Inst {
+    main_block: zir.Inst.Ref.Block,
+    then_break_block: zir.Inst.Ref.Block,
+) InnerError!zir.Inst.Ref {
     // We now have enough information to decide whether the result instruction should
     // be communicated via result location pointer or break instructions.
     const strat = rlStrategy(rl, block_scope);
@@ -1699,7 +1768,8 @@ fn tokenIdentEql(mod: *Module, scope: *Scope, token1: ast.TokenIndex, token2: as
     return mem.eql(u8, ident_name_1, ident_name_2);
 }
 
-pub fn fieldAccess(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!*zir.Inst {
+pub fn fieldAccess(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = scope.tree();
     const token_starts = tree.tokens.items(.start);
     const main_tokens = tree.nodes.items(.main_token);
@@ -1727,7 +1797,8 @@ fn arrayAccess(
     scope: *Scope,
     rl: ResultLoc,
     node: ast.Node.Index,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = scope.tree();
     const main_tokens = tree.nodes.items(.main_token);
     const token_starts = tree.tokens.items(.start);
@@ -1756,7 +1827,8 @@ fn sliceExpr(
     scope: *Scope,
     rl: ResultLoc,
     slice: ast.full.Slice,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = scope.tree();
     const token_starts = tree.tokens.items(.start);
 
@@ -1805,7 +1877,8 @@ fn simpleBinOp(
     rl: ResultLoc,
     infix_node: ast.Node.Index,
     op_inst_tag: zir.Inst.Tag,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = scope.tree();
     const node_datas = tree.nodes.items(.data);
     const main_tokens = tree.nodes.items(.main_token);
@@ -1824,7 +1897,8 @@ fn boolBinOp(
     rl: ResultLoc,
     infix_node: ast.Node.Index,
     is_bool_and: bool,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = scope.tree();
     const node_datas = tree.nodes.items(.data);
     const main_tokens = tree.nodes.items(.main_token);
@@ -1853,7 +1927,7 @@ fn boolBinOp(
     }, .{});
 
     const block = try addZIRInstBlock(mod, scope, src, .block, .{
-        .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
+        .instructions = try block_scope.arena.dupe(zir.Inst.Ref, block_scope.instructions.items),
     });
 
     var rhs_scope: Scope.GenZir = .{
@@ -1893,15 +1967,15 @@ fn boolBinOp(
         //     break rhs
         // else
         //     break false
-        condbr.positionals.then_body = .{ .instructions = try rhs_scope.arena.dupe(*zir.Inst, rhs_scope.instructions.items) };
-        condbr.positionals.else_body = .{ .instructions = try const_scope.arena.dupe(*zir.Inst, const_scope.instructions.items) };
+        condbr.positionals.then_body = .{ .instructions = try rhs_scope.arena.dupe(zir.Inst.Ref, rhs_scope.instructions.items) };
+        condbr.positionals.else_body = .{ .instructions = try const_scope.arena.dupe(zir.Inst.Ref, const_scope.instructions.items) };
     } else {
         // if lhs // OR
         //     break true
         // else
         //     break rhs
-        condbr.positionals.then_body = .{ .instructions = try const_scope.arena.dupe(*zir.Inst, const_scope.instructions.items) };
-        condbr.positionals.else_body = .{ .instructions = try rhs_scope.arena.dupe(*zir.Inst, rhs_scope.instructions.items) };
+        condbr.positionals.then_body = .{ .instructions = try const_scope.arena.dupe(zir.Inst.Ref, const_scope.instructions.items) };
+        condbr.positionals.else_body = .{ .instructions = try rhs_scope.arena.dupe(zir.Inst.Ref, rhs_scope.instructions.items) };
     }
 
     return rvalue(mod, scope, rl, &block.base);
@@ -1912,7 +1986,8 @@ fn ifExpr(
     scope: *Scope,
     rl: ResultLoc,
     if_full: ast.full.If,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     var block_scope: Scope.GenZir = .{
         .parent = scope,
         .decl = scope.ownerDecl().?,
@@ -1951,7 +2026,7 @@ fn ifExpr(
     }, .{});
 
     const block = try addZIRInstBlock(mod, scope, if_src, .block, .{
-        .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
+        .instructions = try block_scope.arena.dupe(zir.Inst.Ref, block_scope.instructions.items),
     });
 
     const then_src = token_starts[tree.lastToken(if_full.ast.then_expr)];
@@ -2016,7 +2091,7 @@ fn ifExpr(
 /// Expects to find exactly 1 .store_to_block_ptr instruction.
 fn copyBodyWithElidedStoreBlockPtr(body: *zir.Body, scope: Module.Scope.GenZir) !void {
     body.* = .{
-        .instructions = try scope.arena.alloc(*zir.Inst, scope.instructions.items.len - 1),
+        .instructions = try scope.arena.alloc(zir.Inst.Ref, scope.instructions.items.len - 1),
     };
     var dst_index: usize = 0;
     for (scope.instructions.items) |src_inst| {
@@ -2030,7 +2105,7 @@ fn copyBodyWithElidedStoreBlockPtr(body: *zir.Body, scope: Module.Scope.GenZir)
 
 fn copyBodyNoEliding(body: *zir.Body, scope: Module.Scope.GenZir) !void {
     body.* = .{
-        .instructions = try scope.arena.dupe(*zir.Inst, scope.instructions.items),
+        .instructions = try scope.arena.dupe(zir.Inst.Ref, scope.instructions.items),
     };
 }
 
@@ -2039,7 +2114,8 @@ fn whileExpr(
     scope: *Scope,
     rl: ResultLoc,
     while_full: ast.full.While,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     if (while_full.label_token) |label_token| {
         try checkLabelRedefinition(mod, scope, label_token);
     }
@@ -2096,7 +2172,7 @@ fn whileExpr(
         .else_body = undefined, // populated below
     }, .{});
     const cond_block = try addZIRInstBlock(mod, &loop_scope.base, while_src, .block, .{
-        .instructions = try loop_scope.arena.dupe(*zir.Inst, continue_scope.instructions.items),
+        .instructions = try loop_scope.arena.dupe(zir.Inst.Ref, continue_scope.instructions.items),
     });
     // TODO avoid emitting the continue expr when there
     // are no jumps to it. This happens when the last statement of a while body is noreturn
@@ -2113,13 +2189,13 @@ fn whileExpr(
         },
         .positionals = .{
             .body = .{
-                .instructions = try scope.arena().dupe(*zir.Inst, loop_scope.instructions.items),
+                .instructions = try scope.arena().dupe(zir.Inst.Ref, loop_scope.instructions.items),
             },
         },
         .kw_args = .{},
     };
     const while_block = try addZIRInstBlock(mod, scope, while_src, .block, .{
-        .instructions = try scope.arena().dupe(*zir.Inst, &[1]*zir.Inst{&loop.base}),
+        .instructions = try scope.arena().dupe(zir.Inst.Ref, &[1]zir.Inst.Ref{&loop.base}),
     });
     loop_scope.break_block = while_block;
     loop_scope.continue_block = cond_block;
@@ -2195,7 +2271,8 @@ fn forExpr(
     scope: *Scope,
     rl: ResultLoc,
     for_full: ast.full.While,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     if (for_full.label_token) |label_token| {
         try checkLabelRedefinition(mod, scope, label_token);
     }
@@ -2258,7 +2335,7 @@ fn forExpr(
         .else_body = undefined, // populated below
     }, .{});
     const cond_block = try addZIRInstBlock(mod, &loop_scope.base, for_src, .block, .{
-        .instructions = try loop_scope.arena.dupe(*zir.Inst, cond_scope.instructions.items),
+        .instructions = try loop_scope.arena.dupe(zir.Inst.Ref, cond_scope.instructions.items),
     });
 
     // increment index variable
@@ -2278,13 +2355,13 @@ fn forExpr(
         },
         .positionals = .{
             .body = .{
-                .instructions = try scope.arena().dupe(*zir.Inst, loop_scope.instructions.items),
+                .instructions = try scope.arena().dupe(zir.Inst.Ref, loop_scope.instructions.items),
             },
         },
         .kw_args = .{},
     };
     const for_block = try addZIRInstBlock(mod, scope, for_src, .block, .{
-        .instructions = try scope.arena().dupe(*zir.Inst, &[1]*zir.Inst{&loop.base}),
+        .instructions = try scope.arena().dupe(zir.Inst.Ref, &[1]zir.Inst.Ref{&loop.base}),
     });
     loop_scope.break_block = for_block;
     loop_scope.continue_block = cond_block;
@@ -2407,7 +2484,8 @@ fn switchExpr(
     scope: *Scope,
     rl: ResultLoc,
     switch_node: ast.Node.Index,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = scope.tree();
     const node_datas = tree.nodes.items(.data);
     const main_tokens = tree.nodes.items(.main_token);
@@ -2432,7 +2510,7 @@ fn switchExpr(
     setBlockResultLoc(&block_scope, rl);
     defer block_scope.instructions.deinit(mod.gpa);
 
-    var items = std.ArrayList(*zir.Inst).init(mod.gpa);
+    var items = std.ArrayList(zir.Inst.Ref).init(mod.gpa);
     defer items.deinit();
 
     // First we gather all the switch items and check else/'_' prongs.
@@ -2549,13 +2627,13 @@ fn switchExpr(
     const switch_inst = try addZirInstT(mod, &block_scope.base, switch_src, zir.Inst.SwitchBr, rl_and_tag.tag, .{
         .target = target,
         .cases = cases,
-        .items = try block_scope.arena.dupe(*zir.Inst, items.items),
+        .items = try block_scope.arena.dupe(zir.Inst.Ref, items.items),
         .else_body = undefined, // populated below
         .range = first_range,
         .special_prong = special_prong,
     });
     const block = try addZIRInstBlock(mod, scope, switch_src, .block, .{
-        .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
+        .instructions = try block_scope.arena.dupe(zir.Inst.Ref, block_scope.instructions.items),
     });
 
     var case_scope: Scope.GenZir = .{
@@ -2611,7 +2689,7 @@ fn switchExpr(
 
             cases[case_index] = .{
                 .item = item,
-                .body = .{ .instructions = try scope.arena().dupe(*zir.Inst, case_scope.instructions.items) },
+                .body = .{ .instructions = try scope.arena().dupe(zir.Inst.Ref, case_scope.instructions.items) },
             };
             case_index += 1;
             continue;
@@ -2658,14 +2736,14 @@ fn switchExpr(
             .else_body = undefined, // populated below
         }, .{});
         const cond_block = try addZIRInstBlock(mod, &else_scope.base, case_src, .block, .{
-            .instructions = try scope.arena().dupe(*zir.Inst, case_scope.instructions.items),
+            .instructions = try scope.arena().dupe(zir.Inst.Ref, case_scope.instructions.items),
         });
 
         // reset cond_scope for then_body
         case_scope.instructions.items.len = 0;
         try switchCaseExpr(mod, &case_scope.base, block_scope.break_result_loc, block, case, target);
         condbr.positionals.then_body = .{
-            .instructions = try scope.arena().dupe(*zir.Inst, case_scope.instructions.items),
+            .instructions = try scope.arena().dupe(zir.Inst.Ref, case_scope.instructions.items),
         };
 
         // reset cond_scope for else_body
@@ -2674,7 +2752,7 @@ fn switchExpr(
             .block = cond_block,
         }, .{});
         condbr.positionals.else_body = .{
-            .instructions = try scope.arena().dupe(*zir.Inst, case_scope.instructions.items),
+            .instructions = try scope.arena().dupe(zir.Inst.Ref, case_scope.instructions.items),
         };
     }
 
@@ -2686,7 +2764,7 @@ fn switchExpr(
         _ = try addZIRNoOp(mod, &else_scope.base, switch_src, .unreachable_unsafe);
     }
     switch_inst.positionals.else_body = .{
-        .instructions = try block_scope.arena.dupe(*zir.Inst, else_scope.instructions.items),
+        .instructions = try block_scope.arena.dupe(zir.Inst.Ref, else_scope.instructions.items),
     };
 
     return &block.base;
@@ -2698,7 +2776,7 @@ fn switchCaseExpr(
     rl: ResultLoc,
     block: *zir.Inst.Block,
     case: ast.full.SwitchCase,
-    target: *zir.Inst,
+    target: zir.Inst.Ref,
 ) !void {
     const tree = scope.tree();
     const node_datas = tree.nodes.items(.data);
@@ -2733,27 +2811,22 @@ fn switchCaseExpr(
     }
 }
 
-fn ret(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst {
+fn ret(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!zir.Inst.Ref {
     const tree = scope.tree();
     const node_datas = tree.nodes.items(.data);
     const main_tokens = tree.nodes.items(.main_token);
-    const token_starts = tree.tokens.items(.start);
 
-    const src = token_starts[main_tokens[node]];
-    const rhs_node = node_datas[node].lhs;
-    if (rhs_node != 0) {
-        if (nodeMayNeedMemoryLocation(scope, rhs_node)) {
-            const ret_ptr = try addZIRNoOp(mod, scope, src, .ret_ptr);
-            const operand = try expr(mod, scope, .{ .ptr = ret_ptr }, rhs_node);
-            return addZIRUnOp(mod, scope, src, .@"return", operand);
-        } else {
-            const fn_ret_ty = try addZIRNoOp(mod, scope, src, .ret_type);
-            const operand = try expr(mod, scope, .{ .ty = fn_ret_ty }, rhs_node);
-            return addZIRUnOp(mod, scope, src, .@"return", operand);
-        }
-    } else {
-        return addZIRNoOp(mod, scope, src, .return_void);
-    }
+    const operand_node = node_datas[node].lhs;
+    const gz = scope.getGenZir();
+    const operand: zir.Inst.Ref = if (operand_node != 0) operand: {
+        const rl: ResultLoc = if (nodeMayNeedMemoryLocation(scope, operand_node)) .{
+            .ptr = try gz.addNode(.ret_ptr, node),
+        } else .{
+            .ty = try gz.addNode(.ret_type, node),
+        };
+        break :operand try expr(mod, scope, rl, operand_node);
+    } else void_inst;
+    return gz.addUnNode(.ret_node, operand, node);
 }
 
 fn identifier(
@@ -2761,7 +2834,8 @@ fn identifier(
     scope: *Scope,
     rl: ResultLoc,
     ident: ast.Node.Index,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tracy = trace(@src());
     defer tracy.end();
 
@@ -2882,7 +2956,8 @@ fn stringLiteral(
     scope: *Scope,
     rl: ResultLoc,
     str_lit: ast.Node.Index,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = scope.tree();
     const main_tokens = tree.nodes.items(.main_token);
     const token_starts = tree.tokens.items(.start);
@@ -2899,7 +2974,8 @@ fn multilineStringLiteral(
     scope: *Scope,
     rl: ResultLoc,
     str_lit: ast.Node.Index,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = scope.tree();
     const node_datas = tree.nodes.items(.data);
     const main_tokens = tree.nodes.items(.main_token);
@@ -2943,7 +3019,8 @@ fn multilineStringLiteral(
     return rvalue(mod, scope, rl, str_inst);
 }
 
-fn charLiteral(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !*zir.Inst {
+fn charLiteral(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) !zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = scope.tree();
     const main_tokens = tree.nodes.items(.main_token);
     const main_token = main_tokens[node];
@@ -2970,11 +3047,11 @@ fn integerLiteral(
     mod: *Module,
     scope: *Scope,
     rl: ResultLoc,
-    int_lit: ast.Node.Index,
-) InnerError!*zir.Inst {
+    node: ast.Node.Index,
+) InnerError!zir.Inst.Ref {
     const tree = scope.tree();
     const main_tokens = tree.nodes.items(.main_token);
-    const int_token = main_tokens[int_lit];
+    const int_token = main_tokens[node];
     const prefixed_bytes = tree.tokenSlice(int_token);
     const gz = scope.getGenZir();
     if (std.fmt.parseInt(u64, prefixed_bytes, 0)) |small_int| {
@@ -2983,9 +3060,9 @@ fn integerLiteral(
             1 => @enumToInt(zir.Const.one),
             else => try gz.addInt(small_int),
         };
-        return rvalue(mod, scope, rl, result);
+        return rvalue(mod, scope, rl, result, node);
     } else |err| {
-        return mod.failTok(scope, int_token, "TODO implement int literals that don't fit in a u64", .{});
+        return mod.failNode(scope, node, "TODO implement int literals that don't fit in a u64", .{});
     }
 }
 
@@ -2994,7 +3071,8 @@ fn floatLiteral(
     scope: *Scope,
     rl: ResultLoc,
     float_lit: ast.Node.Index,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const arena = scope.arena();
     const tree = scope.tree();
     const main_tokens = tree.nodes.items(.main_token);
@@ -3016,7 +3094,8 @@ fn floatLiteral(
     return rvalue(mod, scope, rl, result);
 }
 
-fn asmExpr(mod: *Module, scope: *Scope, rl: ResultLoc, full: ast.full.Asm) InnerError!*zir.Inst {
+fn asmExpr(mod: *Module, scope: *Scope, rl: ResultLoc, full: ast.full.Asm) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const arena = scope.arena();
     const tree = scope.tree();
     const main_tokens = tree.nodes.items(.main_token);
@@ -3028,7 +3107,7 @@ fn asmExpr(mod: *Module, scope: *Scope, rl: ResultLoc, full: ast.full.Asm) Inner
     }
 
     const inputs = try arena.alloc([]const u8, full.inputs.len);
-    const args = try arena.alloc(*zir.Inst, full.inputs.len);
+    const args = try arena.alloc(zir.Inst.Ref, full.inputs.len);
 
     const src = token_starts[full.ast.asm_token];
     const str_type = try addZIRInstConst(mod, scope, src, .{
@@ -3068,7 +3147,7 @@ fn as(
     src: usize,
     lhs: ast.Node.Index,
     rhs: ast.Node.Index,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
     const dest_type = try typeExpr(mod, scope, lhs);
     switch (rl) {
         .none, .discard, .ref, .ty => {
@@ -3099,10 +3178,10 @@ fn asRlPtr(
     scope: *Scope,
     rl: ResultLoc,
     src: usize,
-    result_ptr: *zir.Inst,
+    result_ptr: zir.Inst.Ref,
     operand_node: ast.Node.Index,
-    dest_type: *zir.Inst,
-) InnerError!*zir.Inst {
+    dest_type: zir.Inst.Ref,
+) InnerError!zir.Inst.Ref {
     // Detect whether this expr() call goes into rvalue() to store the result into the
     // result location. If it does, elide the coerce_result_ptr instruction
     // as well as the store instruction, instead passing the result as an rvalue.
@@ -3146,7 +3225,7 @@ fn bitCast(
     src: usize,
     lhs: ast.Node.Index,
     rhs: ast.Node.Index,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
     const dest_type = try typeExpr(mod, scope, lhs);
     switch (rl) {
         .none => {
@@ -3193,7 +3272,7 @@ fn typeOf(
     builtin_token: ast.TokenIndex,
     src: usize,
     params: []const ast.Node.Index,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
     if (params.len < 1) {
         return mod.failTok(scope, builtin_token, "expected at least 1 argument, found 0", .{});
     }
@@ -3201,7 +3280,7 @@ fn typeOf(
         return rvalue(mod, scope, rl, try addZIRUnOp(mod, scope, src, .typeof, try expr(mod, scope, .none, params[0])));
     }
     const arena = scope.arena();
-    var items = try arena.alloc(*zir.Inst, params.len);
+    var items = try arena.alloc(zir.Inst.Ref, params.len);
     for (params) |param, param_i|
         items[param_i] = try expr(mod, scope, .none, param);
     return rvalue(mod, scope, rl, try addZIRInst(mod, scope, src, zir.Inst.TypeOfPeer, .{ .items = items }, .{}));
@@ -3213,7 +3292,8 @@ fn builtinCall(
     rl: ResultLoc,
     call: ast.Node.Index,
     params: []const ast.Node.Index,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = scope.tree();
     const main_tokens = tree.nodes.items(.main_token);
     const token_starts = tree.tokens.items(.start);
@@ -3284,7 +3364,7 @@ fn builtinCall(
         },
         .compile_log => {
             const arena = scope.arena();
-            var targets = try arena.alloc(*zir.Inst, params.len);
+            var targets = try arena.alloc(zir.Inst.Ref, params.len);
             for (params) |param, param_i|
                 targets[param_i] = try expr(mod, scope, .none, param);
             const result = try addZIRInst(mod, scope, src, zir.Inst.CompileLog, .{ .to_log = targets }, .{});
@@ -3414,7 +3494,7 @@ fn callExpr(
     rl: ResultLoc,
     node: ast.Node.Index,
     call: ast.full.Call,
-) InnerError!*zir.Inst {
+) InnerError!zir.Inst.Ref {
     if (true) {
         @panic("TODO update for zir-memory-layout branch");
     }
@@ -3459,7 +3539,7 @@ fn callExpr(
     return rvalue(mod, scope, rl, result); // TODO function call with result location
 }
 
-fn suspendExpr(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst {
+fn suspendExpr(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!zir.Inst.Ref {
     const tree = scope.tree();
     const src = tree.tokens.items(.start)[tree.nodes.items(.main_token)[node]];
 
@@ -3504,12 +3584,13 @@ fn suspendExpr(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zi
     }
 
     const block = try addZIRInstBlock(mod, scope, src, .suspend_block, .{
-        .instructions = try scope.arena().dupe(*zir.Inst, suspend_scope.instructions.items),
+        .instructions = try scope.arena().dupe(zir.Inst.Ref, suspend_scope.instructions.items),
     });
     return &block.base;
 }
 
-fn nosuspendExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!*zir.Inst {
+fn nosuspendExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = scope.tree();
     var child_scope = Scope.Nosuspend{
         .parent = scope,
@@ -3520,7 +3601,8 @@ fn nosuspendExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Inde
     return expr(mod, &child_scope.base, rl, tree.nodes.items(.data)[node].lhs);
 }
 
-fn awaitExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!*zir.Inst {
+fn awaitExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) InnerError!zir.Inst.Ref {
+    if (true) @panic("TODO update for zir-memory-layout");
     const tree = scope.tree();
     const src = tree.tokens.items(.start)[tree.nodes.items(.main_token)[node]];
     const is_nosuspend = scope.getNosuspend() != null;
@@ -3542,7 +3624,7 @@ fn awaitExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: ast.Node.Index) I
     return addZIRUnOp(mod, scope, src, if (is_nosuspend) .nosuspend_await else .@"await", operand);
 }
 
-fn resumeExpr(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!*zir.Inst {
+fn resumeExpr(mod: *Module, scope: *Scope, node: ast.Node.Index) InnerError!zir.Inst.Ref {
     const tree = scope.tree();
     const src = tree.tokens.items(.start)[tree.nodes.items(.main_token)[node]];
 
@@ -3828,7 +3910,7 @@ fn rvalue(
             // We need a pointer but we have a value.
             const tree = scope.tree();
             const src_token = tree.firstToken(src_node);
-            return gz.addUnTok(.ref, result, src_tok);
+            return gz.addUnTok(.ref, result, src_token);
         },
         .ty => |ty_inst| return gz.addBin(.as, ty_inst, result),
         .ptr => |ptr_inst| {
@@ -3844,31 +3926,12 @@ fn rvalue(
         },
         .block_ptr => |block_scope| {
             block_scope.rvalue_rl_count += 1;
-            _ = try gz.addBin(.store_to_block_ptr, block_scope.rl_ptr.?, result);
+            _ = try gz.addBin(.store_to_block_ptr, block_scope.rl_ptr, result);
             return result;
         },
     }
 }
 
-/// TODO when reworking ZIR memory layout, make the void value correspond to a hard coded
-/// index; that way this does not actually need to allocate anything.
-fn rvalueVoid(
-    mod: *Module,
-    scope: *Scope,
-    rl: ResultLoc,
-    node: ast.Node.Index,
-    result: void,
-) InnerError!*zir.Inst {
-    const tree = scope.tree();
-    const main_tokens = tree.nodes.items(.main_token);
-    const src = tree.tokens.items(.start)[tree.firstToken(node)];
-    const void_inst = try addZIRInstConst(mod, scope, src, .{
-        .ty = Type.initTag(.void),
-        .val = Value.initTag(.void_value),
-    });
-    return rvalue(mod, scope, rl, void_inst);
-}
-
 fn rlStrategy(rl: ResultLoc, block_scope: *Scope.GenZir) ResultLoc.Strategy {
     var elide_store_to_block_ptr_instructions = false;
     switch (rl) {
@@ -3953,190 +4016,3 @@ fn setBlockResultLoc(block_scope: *Scope.GenZir, parent_rl: ResultLoc) void {
         },
     }
 }
-
-pub fn addZirInstTag(
-    mod: *Module,
-    scope: *Scope,
-    src: usize,
-    comptime tag: zir.Inst.Tag,
-    positionals: std.meta.fieldInfo(tag.Type(), .positionals).field_type,
-) !*zir.Inst {
-    const gen_zir = scope.getGenZir();
-    try gen_zir.instructions.ensureCapacity(mod.gpa, gen_zir.instructions.items.len + 1);
-    const inst = try gen_zir.arena.create(tag.Type());
-    inst.* = .{
-        .base = .{
-            .tag = tag,
-            .src = src,
-        },
-        .positionals = positionals,
-        .kw_args = .{},
-    };
-    gen_zir.instructions.appendAssumeCapacity(&inst.base);
-    return &inst.base;
-}
-
-pub fn addZirInstT(
-    mod: *Module,
-    scope: *Scope,
-    src: usize,
-    comptime T: type,
-    tag: zir.Inst.Tag,
-    positionals: std.meta.fieldInfo(T, .positionals).field_type,
-) !*T {
-    const gen_zir = scope.getGenZir();
-    try gen_zir.instructions.ensureCapacity(mod.gpa, gen_zir.instructions.items.len + 1);
-    const inst = try gen_zir.arena.create(T);
-    inst.* = .{
-        .base = .{
-            .tag = tag,
-            .src = src,
-        },
-        .positionals = positionals,
-        .kw_args = .{},
-    };
-    gen_zir.instructions.appendAssumeCapacity(&inst.base);
-    return inst;
-}
-
-pub fn addZIRInstSpecial(
-    mod: *Module,
-    scope: *Scope,
-    src: usize,
-    comptime T: type,
-    positionals: std.meta.fieldInfo(T, .positionals).field_type,
-    kw_args: std.meta.fieldInfo(T, .kw_args).field_type,
-) !*T {
-    const gen_zir = scope.getGenZir();
-    try gen_zir.instructions.ensureCapacity(mod.gpa, gen_zir.instructions.items.len + 1);
-    const inst = try gen_zir.arena.create(T);
-    inst.* = .{
-        .base = .{
-            .tag = T.base_tag,
-            .src = src,
-        },
-        .positionals = positionals,
-        .kw_args = kw_args,
-    };
-    gen_zir.instructions.appendAssumeCapacity(&inst.base);
-    return inst;
-}
-
-pub fn addZIRNoOpT(mod: *Module, scope: *Scope, src: usize, tag: zir.Inst.Tag) !*zir.Inst.NoOp {
-    const gen_zir = scope.getGenZir();
-    try gen_zir.instructions.ensureCapacity(mod.gpa, gen_zir.instructions.items.len + 1);
-    const inst = try gen_zir.arena.create(zir.Inst.NoOp);
-    inst.* = .{
-        .base = .{
-            .tag = tag,
-            .src = src,
-        },
-        .positionals = .{},
-        .kw_args = .{},
-    };
-    gen_zir.instructions.appendAssumeCapacity(&inst.base);
-    return inst;
-}
-
-pub fn addZIRNoOp(mod: *Module, scope: *Scope, src: usize, tag: zir.Inst.Tag) !*zir.Inst {
-    const inst = try addZIRNoOpT(mod, scope, src, tag);
-    return &inst.base;
-}
-
-pub fn addZIRUnOp(
-    mod: *Module,
-    scope: *Scope,
-    src: usize,
-    tag: zir.Inst.Tag,
-    operand: *zir.Inst,
-) !*zir.Inst {
-    const gen_zir = scope.getGenZir();
-    try gen_zir.instructions.ensureCapacity(mod.gpa, gen_zir.instructions.items.len + 1);
-    const inst = try gen_zir.arena.create(zir.Inst.UnOp);
-    inst.* = .{
-        .base = .{
-            .tag = tag,
-            .src = src,
-        },
-        .positionals = .{
-            .operand = operand,
-        },
-        .kw_args = .{},
-    };
-    gen_zir.instructions.appendAssumeCapacity(&inst.base);
-    return &inst.base;
-}
-
-pub fn addZIRBinOp(
-    mod: *Module,
-    scope: *Scope,
-    src: usize,
-    tag: zir.Inst.Tag,
-    lhs: *zir.Inst,
-    rhs: *zir.Inst,
-) !*zir.Inst {
-    const gen_zir = scope.getGenZir();
-    try gen_zir.instructions.ensureCapacity(mod.gpa, gen_zir.instructions.items.len + 1);
-    const inst = try gen_zir.arena.create(zir.Inst.BinOp);
-    inst.* = .{
-        .base = .{
-            .tag = tag,
-            .src = src,
-        },
-        .positionals = .{
-            .lhs = lhs,
-            .rhs = rhs,
-        },
-        .kw_args = .{},
-    };
-    gen_zir.instructions.appendAssumeCapacity(&inst.base);
-    return &inst.base;
-}
-
-pub fn addZIRInstBlock(
-    mod: *Module,
-    scope: *Scope,
-    src: usize,
-    tag: zir.Inst.Tag,
-    body: zir.Body,
-) !*zir.Inst.Block {
-    const gen_zir = scope.getGenZir();
-    try gen_zir.instructions.ensureCapacity(mod.gpa, gen_zir.instructions.items.len + 1);
-    const inst = try gen_zir.arena.create(zir.Inst.Block);
-    inst.* = .{
-        .base = .{
-            .tag = tag,
-            .src = src,
-        },
-        .positionals = .{
-            .body = body,
-        },
-        .kw_args = .{},
-    };
-    gen_zir.instructions.appendAssumeCapacity(&inst.base);
-    return inst;
-}
-
-pub fn addZIRInst(
-    mod: *Module,
-    scope: *Scope,
-    src: usize,
-    comptime T: type,
-    positionals: std.meta.fieldInfo(T, .positionals).field_type,
-    kw_args: std.meta.fieldInfo(T, .kw_args).field_type,
-) !*zir.Inst {
-    const inst_special = try addZIRInstSpecial(mod, scope, src, T, positionals, kw_args);
-    return &inst_special.base;
-}
-
-/// TODO The existence of this function is a workaround for a bug in stage1.
-pub fn addZIRInstConst(mod: *Module, scope: *Scope, src: usize, typed_value: TypedValue) !*zir.Inst {
-    const P = std.meta.fieldInfo(zir.Inst.Const, .positionals).field_type;
-    return addZIRInst(mod, scope, src, zir.Inst.Const, P{ .typed_value = typed_value }, .{});
-}
-
-/// TODO The existence of this function is a workaround for a bug in stage1.
-pub fn addZIRInstLoop(mod: *Module, scope: *Scope, src: usize, body: zir.Body) !*zir.Inst.Loop {
-    const P = std.meta.fieldInfo(zir.Inst.Loop, .positionals).field_type;
-    return addZIRInstSpecial(mod, scope, src, zir.Inst.Loop, P{ .body = body }, .{});
-}
src/codegen.zig
@@ -499,7 +499,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
             defer function.stack.deinit(bin_file.allocator);
             defer function.exitlude_jump_relocs.deinit(bin_file.allocator);
 
-            var call_info = function.resolveCallingConventionValues(src_loc.byte_offset, fn_type) catch |err| switch (err) {
+            var call_info = function.resolveCallingConventionValues(src_loc.lazy, fn_type) catch |err| switch (err) {
                 error.CodegenFail => return Result{ .fail = function.err_msg.? },
                 else => |e| return e,
             };
@@ -2850,7 +2850,7 @@ fn Function(comptime arch: std.Target.Cpu.Arch) type {
                         return self.fail(inst.base.src, "TODO implement support for more x86 assembly instructions", .{});
                     }
 
-                    if (inst.output) |output| {
+                    if (inst.output_name) |output| {
                         if (output.len < 4 or output[0] != '=' or output[1] != '{' or output[output.len - 1] != '}') {
                             return self.fail(inst.base.src, "unrecognized asm output constraint: '{s}'", .{output});
                         }
src/Module.zig
@@ -462,11 +462,11 @@ pub const Scope = struct {
         switch (scope.tag) {
             .file => return &scope.cast(File).?.tree,
             .block => return &scope.cast(Block).?.src_decl.container.file_scope.tree,
-            .gen_zir => return &scope.cast(GenZir).?.decl.container.file_scope.tree,
+            .gen_zir => return &scope.cast(GenZir).?.zir_code.decl.container.file_scope.tree,
             .local_val => return &scope.cast(LocalVal).?.gen_zir.zir_code.decl.container.file_scope.tree,
             .local_ptr => return &scope.cast(LocalPtr).?.gen_zir.zir_code.decl.container.file_scope.tree,
             .container => return &scope.cast(Container).?.file_scope.tree,
-            .gen_suspend => return &scope.cast(GenZir).?.decl.container.file_scope.tree,
+            .gen_suspend => return &scope.cast(GenZir).?.zir_code.decl.container.file_scope.tree,
             .gen_nosuspend => return &scope.cast(Nosuspend).?.gen_zir.zir_code.decl.container.file_scope.tree,
             .decl_ref => return &scope.cast(DeclRef).?.decl.container.file_scope.tree,
         }
@@ -968,18 +968,42 @@ pub const Scope = struct {
             used: bool = false,
         };
 
+        /// Only valid to call on the top of the `GenZir` stack. Completes the
+        /// `WipZirCode` into a `zir.Code`. Leaves the `WipZirCode` in an
+        /// initialized, but empty, state.
+        pub fn finish(gz: *GenZir) !zir.Code {
+            const gpa = gz.zir_code.gpa;
+            const root_start = @intCast(u32, gz.zir_code.extra.items.len);
+            const root_len = @intCast(u32, gz.instructions.items.len);
+            try gz.zir_code.extra.appendSlice(gpa, gz.instructions.items);
+            return zir.Code{
+                .instructions = gz.zir_code.instructions.toOwnedSlice(),
+                .string_bytes = gz.zir_code.string_bytes.toOwnedSlice(gpa),
+                .extra = gz.zir_code.extra.toOwnedSlice(gpa),
+                .root_start = root_start,
+                .root_len = root_len,
+            };
+        }
+
+        pub fn tokSrcLoc(gz: *GenZir, token_index: ast.TokenIndex) LazySrcLoc {
+            const decl_token = gz.zir_code.decl.srcToken();
+            return .{ .token_offset = token_index - decl_token };
+        }
+
         pub fn addFnTypeCc(gz: *GenZir, args: struct {
             param_types: []const zir.Inst.Ref,
             ret_ty: zir.Inst.Ref,
             cc: zir.Inst.Ref,
         }) !zir.Inst.Index {
+            assert(args.ret_ty != 0);
+            assert(args.cc != 0);
             const gpa = gz.zir_code.gpa;
             try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
             try gz.zir_code.instructions.ensureCapacity(gpa, gz.zir_code.instructions.len + 1);
-            try gz.zir_code.extra.ensureCapacity(gpa, gz.zir_code.extra.len +
+            try gz.zir_code.extra.ensureCapacity(gpa, gz.zir_code.extra.items.len +
                 @typeInfo(zir.Inst.FnTypeCc).Struct.fields.len + args.param_types.len);
 
-            const payload_index = gz.addExtra(zir.Inst.FnTypeCc, .{
+            const payload_index = gz.zir_code.addExtra(zir.Inst.FnTypeCc{
                 .cc = args.cc,
                 .param_types_len = @intCast(u32, args.param_types.len),
             }) catch unreachable; // Capacity is ensured above.
@@ -989,7 +1013,7 @@ pub const Scope = struct {
             gz.zir_code.instructions.appendAssumeCapacity(.{
                 .tag = .fn_type_cc,
                 .data = .{ .fn_type = .{
-                    .return_type = ret_ty,
+                    .return_type = args.ret_ty,
                     .payload_index = payload_index,
                 } },
             });
@@ -1003,13 +1027,14 @@ pub const Scope = struct {
             ret_ty: zir.Inst.Ref,
             param_types: []const zir.Inst.Ref,
         ) !zir.Inst.Index {
+            assert(ret_ty != 0);
             const gpa = gz.zir_code.gpa;
             try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
             try gz.zir_code.instructions.ensureCapacity(gpa, gz.zir_code.instructions.len + 1);
-            try gz.zir_code.extra.ensureCapacity(gpa, gz.zir_code.extra.len +
+            try gz.zir_code.extra.ensureCapacity(gpa, gz.zir_code.extra.items.len +
                 @typeInfo(zir.Inst.FnType).Struct.fields.len + param_types.len);
 
-            const payload_index = gz.addExtra(zir.Inst.FnTypeCc, .{
+            const payload_index = gz.zir_code.addExtra(zir.Inst.FnType{
                 .param_types_len = @intCast(u32, param_types.len),
             }) catch unreachable; // Capacity is ensured above.
             gz.zir_code.extra.appendSliceAssumeCapacity(param_types);
@@ -1027,42 +1052,11 @@ pub const Scope = struct {
             return result;
         }
 
-        pub fn addRetTok(
-            gz: *GenZir,
-            operand: zir.Inst.Ref,
-            /// Absolute token index. This function does the conversion to Decl offset.
-            abs_tok_index: ast.TokenIndex,
-        ) !zir.Inst.Index {
-            const gpa = gz.zir_code.gpa;
-            try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
-            try gz.zir_code.instructions.ensureCapacity(gpa, gz.zir_code.instructions.len + 1);
-
-            const new_index = gz.zir_code.instructions.len;
-            gz.zir_code.instructions.appendAssumeCapacity(.{
-                .tag = .ret_tok,
-                .data = .{ .fn_type = .{
-                    .operand = operand,
-                    .src_tok = abs_tok_index - gz.zir_code.decl.srcToken(),
-                } },
-            });
-            const result = @intCast(zir.Inst.Ref, new_index + gz.zir_code.ref_start_index);
-            gz.instructions.appendAssumeCapacity(result);
-            return result;
-        }
-
         pub fn addInt(gz: *GenZir, integer: u64) !zir.Inst.Index {
-            const gpa = gz.zir_code.gpa;
-            try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
-            try gz.zir_code.instructions.ensureCapacity(gpa, gz.zir_code.instructions.len + 1);
-
-            const new_index = gz.zir_code.instructions.len;
-            gz.zir_code.instructions.appendAssumeCapacity(.{
+            return gz.add(.{
                 .tag = .int,
                 .data = .{ .int = integer },
             });
-            const result = @intCast(zir.Inst.Ref, new_index + gz.zir_code.ref_start_index);
-            gz.instructions.appendAssumeCapacity(result);
-            return result;
         }
 
         pub fn addUnNode(
@@ -1072,21 +1066,14 @@ pub const Scope = struct {
             /// Absolute node index. This function does the conversion to offset from Decl.
             abs_node_index: ast.Node.Index,
         ) !zir.Inst.Ref {
-            const gpa = gz.zir_code.gpa;
-            try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
-            try gz.zir_code.instructions.ensureCapacity(gpa, gz.zir_code.instructions.len + 1);
-
-            const new_index = gz.zir_code.instructions.len;
-            gz.zir_code.instructions.appendAssumeCapacity(.{
+            assert(operand != 0);
+            return gz.add(.{
                 .tag = tag,
                 .data = .{ .un_node = .{
                     .operand = operand,
                     .src_node = abs_node_index - gz.zir_code.decl.srcNode(),
                 } },
             });
-            const result = @intCast(zir.Inst.Ref, new_index + gz.zir_code.ref_start_index);
-            gz.instructions.appendAssumeCapacity(result);
-            return result;
         }
 
         pub fn addUnTok(
@@ -1096,21 +1083,14 @@ pub const Scope = struct {
             /// Absolute token index. This function does the conversion to Decl offset.
             abs_tok_index: ast.TokenIndex,
         ) !zir.Inst.Ref {
-            const gpa = gz.zir_code.gpa;
-            try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
-            try gz.zir_code.instructions.ensureCapacity(gpa, gz.zir_code.instructions.len + 1);
-
-            const new_index = gz.zir_code.instructions.len;
-            gz.zir_code.instructions.appendAssumeCapacity(.{
+            assert(operand != 0);
+            return gz.add(.{
                 .tag = tag,
                 .data = .{ .un_tok = .{
                     .operand = operand,
                     .src_tok = abs_tok_index - gz.zir_code.decl.srcToken(),
                 } },
             });
-            const result = @intCast(zir.Inst.Ref, new_index + gz.zir_code.ref_start_index);
-            gz.instructions.appendAssumeCapacity(result);
-            return result;
         }
 
         pub fn addBin(
@@ -1119,18 +1099,52 @@ pub const Scope = struct {
             lhs: zir.Inst.Ref,
             rhs: zir.Inst.Ref,
         ) !zir.Inst.Ref {
-            const gpa = gz.zir_code.gpa;
-            try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
-            try gz.zir_code.instructions.ensureCapacity(gpa, gz.zir_code.instructions.len + 1);
-
-            const new_index = gz.zir_code.instructions.len;
-            gz.zir_code.instructions.appendAssumeCapacity(.{
+            assert(lhs != 0);
+            assert(rhs != 0);
+            return gz.add(.{
                 .tag = tag,
                 .data = .{ .bin = .{
                     .lhs = lhs,
                     .rhs = rhs,
                 } },
             });
+        }
+
+        pub fn addNode(
+            gz: *GenZir,
+            tag: zir.Inst.Tag,
+            /// Absolute node index. This function does the conversion to offset from Decl.
+            abs_node_index: ast.Node.Index,
+        ) !zir.Inst.Ref {
+            return gz.add(.{
+                .tag = tag,
+                .data = .{ .node = abs_node_index - gz.zir_code.decl.srcNode() },
+            });
+        }
+
+        /// Asserts that `str` is 8 or fewer bytes.
+        pub fn addSmallStr(
+            gz: *GenZir,
+            tag: zir.Inst.Tag,
+            str: []const u8,
+        ) !zir.Inst.Ref {
+            var buf: [9]u8 = undefined;
+            mem.copy(u8, &buf, str);
+            buf[str.len] = 0;
+
+            return gz.add(.{
+                .tag = tag,
+                .data = .{ .small_str = .{ .bytes = buf[0..8].* } },
+            });
+        }
+
+        fn add(gz: *GenZir, inst: zir.Inst) !zir.Inst.Ref {
+            const gpa = gz.zir_code.gpa;
+            try gz.instructions.ensureCapacity(gpa, gz.instructions.items.len + 1);
+            try gz.zir_code.instructions.ensureCapacity(gpa, gz.zir_code.instructions.len + 1);
+
+            const new_index = gz.zir_code.instructions.len;
+            gz.zir_code.instructions.appendAssumeCapacity(inst);
             const result = @intCast(zir.Inst.Ref, new_index + gz.zir_code.ref_start_index);
             gz.instructions.appendAssumeCapacity(result);
             return result;
@@ -1183,6 +1197,7 @@ pub const Scope = struct {
 /// A Work-In-Progress `zir.Code`. This is a shared parent of all
 /// `GenZir` scopes. Once the `zir.Code` is produced, this struct
 /// is deinitialized.
+/// The `GenZir.finish` function converts this to a `zir.Code`.
 pub const WipZirCode = struct {
     instructions: std.MultiArrayList(zir.Inst) = .{},
     string_bytes: std.ArrayListUnmanaged(u8) = .{},
@@ -1194,9 +1209,20 @@ pub const WipZirCode = struct {
     gpa: *Allocator,
     arena: *Allocator,
 
-    fn deinit(wip_zir_code: *WipZirCode) void {
-        wip_zir_code.instructions.deinit(wip_zir_code.gpa);
-        wip_zir_code.extra.deinit(wip_zir_code.gpa);
+    pub fn addExtra(wzc: *WipZirCode, extra: anytype) Allocator.Error!u32 {
+        const fields = std.meta.fields(@TypeOf(extra));
+        try wzc.extra.ensureCapacity(wzc.gpa, wzc.extra.items.len + fields.len);
+        const result = @intCast(u32, wzc.extra.items.len);
+        inline for (fields) |field| {
+            comptime assert(field.field_type == u32);
+            wzc.extra.appendAssumeCapacity(@field(extra, field.name));
+        }
+        return result;
+    }
+
+    pub fn deinit(wzc: *WipZirCode) void {
+        wzc.instructions.deinit(wzc.gpa);
+        wzc.extra.deinit(wzc.gpa);
     }
 };
 
@@ -1763,18 +1789,22 @@ fn astgenAndSemaDecl(mod: *Module, decl: *Decl) !bool {
                     .gpa = mod.gpa,
                 };
                 defer wip_zir_code.deinit();
+
                 var gen_scope: Scope.GenZir = .{
                     .force_comptime = true,
                     .parent = &decl.container.base,
                     .zir_code = &wip_zir_code,
                 };
+                defer gen_scope.instructions.deinit(mod.gpa);
 
                 const block_expr = node_datas[decl_node].lhs;
                 _ = try astgen.comptimeExpr(mod, &gen_scope.base, .none, block_expr);
+
+                const code = try gen_scope.finish();
                 if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
-                    zir.dumpZir(mod.gpa, "comptime_block", decl.name, gen_scope.instructions.items) catch {};
+                    zir.dumpZir(mod.gpa, "comptime_block", decl.name, code) catch {};
                 }
-                break :blk wip_zir_code.finish();
+                break :blk code;
             };
 
             var sema: Sema = .{
@@ -1836,11 +1866,13 @@ fn astgenAndSemaFn(
         .gpa = mod.gpa,
     };
     defer fn_type_wip_zir_exec.deinit();
+
     var fn_type_scope: Scope.GenZir = .{
         .force_comptime = true,
         .parent = &decl.container.base,
         .zir_code = &fn_type_wip_zir_exec,
     };
+    defer fn_type_scope.instructions.deinit(mod.gpa);
 
     decl.is_pub = fn_proto.visib_token != null;
 
@@ -1855,7 +1887,7 @@ fn astgenAndSemaFn(
         }
         break :blk count;
     };
-    const param_types = try fn_type_scope_arena.allocator.alloc(zir.Inst.Index, param_count);
+    const param_types = try fn_type_scope_arena.allocator.alloc(zir.Inst.Ref, param_count);
     const type_type_rl: astgen.ResultLoc = .{ .ty = @enumToInt(zir.Const.type_type) };
 
     var is_var_args = false;
@@ -1970,11 +2002,11 @@ fn astgenAndSemaFn(
             .ty = @enumToInt(zir.Const.enum_literal_type),
         }, fn_proto.ast.callconv_expr)
     else if (is_extern) // note: https://github.com/ziglang/zig/issues/5269
-        try fn_type_scope.addStrBytes(.enum_literal, "C")
+        try fn_type_scope.addSmallStr(.enum_literal_small, "C")
     else
         0;
 
-    const fn_type_inst: zir.Inst.Index = if (cc != 0) fn_type: {
+    const fn_type_inst: zir.Inst.Ref = if (cc != 0) fn_type: {
         const tag: zir.Inst.Tag = if (is_var_args) .fn_type_cc_var_args else .fn_type_cc;
         break :fn_type try fn_type_scope.addFnTypeCc(.{
             .ret_ty = return_type_inst,
@@ -1983,22 +2015,19 @@ fn astgenAndSemaFn(
         });
     } else fn_type: {
         const tag: zir.Inst.Tag = if (is_var_args) .fn_type_var_args else .fn_type;
-        break :fn_type try fn_type_scope.addFnType(.{
-            .ret_ty = return_type_inst,
-            .param_types = param_types,
-        });
+        break :fn_type try fn_type_scope.addFnType(return_type_inst, param_types);
     };
 
-    if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
-        zir.dumpZir(mod.gpa, "fn_type", decl.name, fn_type_scope.instructions.items) catch {};
-    }
-
     // We need the memory for the Type to go into the arena for the Decl
     var decl_arena = std.heap.ArenaAllocator.init(mod.gpa);
     errdefer decl_arena.deinit();
     const decl_arena_state = try decl_arena.allocator.create(std.heap.ArenaAllocator.State);
 
-    const fn_type_code = fn_type_wip_zir_exec.finish();
+    const fn_type_code = try fn_type_scope.finish();
+    if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
+        zir.dumpZir(mod.gpa, "fn_type", decl.name, fn_type_code) catch {};
+    }
+
     var fn_type_sema: Sema = .{
         .mod = mod,
         .gpa = mod.gpa,
@@ -2021,7 +2050,7 @@ fn astgenAndSemaFn(
     };
     defer block_scope.instructions.deinit(mod.gpa);
 
-    const fn_type = try fn_type_sema.rootAsType(mod, &block_scope, fn_type_inst);
+    const fn_type = try fn_type_sema.rootAsType(&block_scope, fn_type_inst);
     if (body_node == 0) {
         if (!is_extern) {
             return mod.failNode(&block_scope.base, fn_proto.ast.fn_token, "non-extern function has no body", .{});
@@ -2063,13 +2092,12 @@ fn astgenAndSemaFn(
     const new_func = try decl_arena.allocator.create(Fn);
     const fn_payload = try decl_arena.allocator.create(Value.Payload.Function);
 
-    const fn_zir: zir.Body = blk: {
+    const fn_zir: zir.Code = blk: {
         // We put the ZIR inside the Decl arena.
         var wip_zir_code: WipZirCode = .{
             .decl = decl,
             .arena = &decl_arena.allocator,
             .gpa = mod.gpa,
-            .arg_count = param_count,
         };
         defer wip_zir_code.deinit();
 
@@ -2078,6 +2106,8 @@ fn astgenAndSemaFn(
             .parent = &decl.container.base,
             .zir_code = &wip_zir_code,
         };
+        defer gen_scope.instructions.deinit(mod.gpa);
+
         // Iterate over the parameters. We put the param names as the first N
         // items inside `extra` so that debug info later can refer to the parameter names
         // even while the respective source code is unloaded.
@@ -2095,7 +2125,7 @@ fn astgenAndSemaFn(
                 .gen_zir = &gen_scope,
                 .name = param_name,
                 // Implicit const list first, then implicit arg list.
-                .inst = zir.const_inst_list.len + i,
+                .inst = @intCast(u32, zir.const_inst_list.len + i),
             };
             params_scope = &sub_scope.base;
 
@@ -2111,18 +2141,19 @@ fn astgenAndSemaFn(
         _ = try astgen.expr(mod, params_scope, .none, body_node);
 
         if (gen_scope.instructions.items.len == 0 or
-            !gen_scope.instructions.items[gen_scope.instructions.items.len - 1].tag.isNoReturn())
+            !wip_zir_code.instructions.items(.tag)[gen_scope.instructions.items.len - 1]
+            .isNoReturn())
         {
-            _ = try gen_scope.addRetTok(@enumToInt(zir.Const.void_value), tree.lastToken(body_node));
+            const void_operand = @enumToInt(zir.Const.void_value);
+            _ = try gen_scope.addUnTok(.ret_tok, void_operand, tree.lastToken(body_node));
         }
 
+        const code = try gen_scope.finish();
         if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
-            zir.dumpZir(mod.gpa, "fn_body", decl.name, gen_scope.instructions.items) catch {};
+            zir.dumpZir(mod.gpa, "fn_body", decl.name, code) catch {};
         }
 
-        break :blk .{
-            .instructions = try gen_scope.arena.dupe(*zir.Inst, gen_scope.instructions.items),
-        };
+        break :blk code;
     };
 
     const is_inline = fn_type.fnCallingConvention() == .Inline;
@@ -2190,7 +2221,8 @@ fn astgenAndSemaFn(
                     .{},
                 );
             }
-            const export_src = token_starts[maybe_export_token];
+            // TODO use a Decl-local source location instead.
+            const export_src: LazySrcLoc = .{ .token_abs = maybe_export_token };
             const name = tree.tokenSlice(fn_proto.name_token.?); // TODO identifierTokenString
             // The scope needs to have the decl in it.
             try mod.analyzeExport(&block_scope.base, export_src, name, decl);
@@ -2294,7 +2326,7 @@ fn astgenAndSemaVarDecl(
             init_result_loc,
             var_decl.ast.init_node,
         );
-        const code = wip_zir_code.finish();
+        const code = try gen_scope.finish();
         if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
             zir.dumpZir(mod.gpa, "var_init", decl.name, code) catch {};
         }
@@ -2324,13 +2356,13 @@ fn astgenAndSemaVarDecl(
         try sema.root(&block_scope);
 
         // The result location guarantees the type coercion.
-        const analyzed_init_inst = sema.resolveInst(&block_scope, init_inst);
+        const analyzed_init_inst = try sema.resolveInst(init_inst);
         // The is_comptime in the Scope.Block guarantees the result is comptime-known.
         const val = analyzed_init_inst.value().?;
 
         break :vi .{
-            .ty = try analyzed_init_inst.ty.copy(decl_arena),
-            .val = try val.copy(decl_arena),
+            .ty = try analyzed_init_inst.ty.copy(&decl_arena.allocator),
+            .val = try val.copy(&decl_arena.allocator),
         };
     } else if (!is_extern) {
         return mod.failTok(
@@ -2358,7 +2390,7 @@ fn astgenAndSemaVarDecl(
         defer type_scope.instructions.deinit(mod.gpa);
 
         const var_type = try astgen.typeExpr(mod, &type_scope.base, var_decl.ast.type_node);
-        const code = wip_zir_code.finish();
+        const code = try type_scope.finish();
         if (std.builtin.mode == .Debug and mod.comp.verbose_ir) {
             zir.dumpZir(mod.gpa, "var_type", decl.name, code) catch {};
         }
@@ -2388,7 +2420,7 @@ fn astgenAndSemaVarDecl(
         const ty = try sema.rootAsType(&block_scope, var_type);
 
         break :vi .{
-            .ty = try ty.copy(decl_arena),
+            .ty = try ty.copy(&decl_arena.allocator),
             .val = null,
         };
     } else {
@@ -2441,7 +2473,8 @@ fn astgenAndSemaVarDecl(
 
     if (var_decl.extern_export_token) |maybe_export_token| {
         if (token_tags[maybe_export_token] == .keyword_export) {
-            const export_src = token_starts[maybe_export_token];
+            // TODO make this src relative to containing Decl
+            const export_src: LazySrcLoc = .{ .token_abs = maybe_export_token };
             const name_token = var_decl.ast.mut_token + 1;
             const name = tree.tokenSlice(name_token); // TODO identifierTokenString
             // The scope needs to have the decl in it.
src/Sema.zig
@@ -12,7 +12,7 @@ gpa: *Allocator,
 arena: *Allocator,
 code: zir.Code,
 /// Maps ZIR to TZIR.
-inst_map: []*const Inst,
+inst_map: []*Inst,
 /// When analyzing an inline function call, owner_decl is the Decl of the caller
 /// and `src_decl` of `Scope.Block` is the `Decl` of the callee.
 /// This `Decl` owns the arena memory of this `Sema`.
@@ -58,15 +58,10 @@ pub fn root(sema: *Sema, root_block: *Scope.Block) !void {
     return sema.analyzeBody(root_block, root_body);
 }
 
-pub fn rootAsType(
-    sema: *Sema,
-    root_block: *Scope.Block,
-    zir_result_inst: zir.Inst.Index,
-) !Type {
+pub fn rootAsType(sema: *Sema, root_block: *Scope.Block, result_inst: zir.Inst.Ref) !Type {
     const root_body = sema.code.extra[sema.code.root_start..][0..sema.code.root_len];
     try sema.analyzeBody(root_block, root_body);
 
-    const result_inst = sema.inst_map[zir_result_inst];
     // Source location is unneeded because resolveConstValue must have already
     // been successfully called when coercing the value to a type, from the
     // result location.
@@ -203,6 +198,7 @@ pub fn analyzeBody(sema: *Sema, block: *Scope.Block, body: []const zir.Inst.Inde
             .array_type => try sema.zirArrayType(block, zir_inst),
             .array_type_sentinel => try sema.zirArrayTypeSentinel(block, zir_inst),
             .enum_literal => try sema.zirEnumLiteral(block, zir_inst),
+            .enum_literal_small => try sema.zirEnumLiteralSmall(block, zir_inst),
             .merge_error_sets => try sema.zirMergeErrorSets(block, zir_inst),
             .error_union_type => try sema.zirErrorUnionType(block, zir_inst),
             .anyframe_type => try sema.zirAnyframeType(block, zir_inst),
@@ -232,7 +228,7 @@ pub fn analyzeBody(sema: *Sema, block: *Scope.Block, body: []const zir.Inst.Inde
 
 /// TODO when we rework TZIR memory layout, this function will no longer have a possible error.
 pub fn resolveInst(sema: *Sema, zir_ref: zir.Inst.Ref) error{OutOfMemory}!*ir.Inst {
-    var i = zir_ref;
+    var i: usize = zir_ref;
 
     // First section of indexes correspond to a set number of constant values.
     if (i < zir.const_inst_list.len) {
@@ -1435,6 +1431,19 @@ fn zirEnumLiteral(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerE
     });
 }
 
+fn zirEnumLiteralSmall(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
+    const tracy = trace(@src());
+    defer tracy.end();
+
+    const name = sema.code.instructions.items(.data)[inst].small_str.get();
+    const src: LazySrcLoc = .unneeded;
+    const duped_name = try sema.arena.dupe(u8, name);
+    return sema.mod.constInst(sema.arena, src, .{
+        .ty = Type.initTag(.enum_literal),
+        .val = try Value.Tag.enum_literal.create(sema.arena, duped_name),
+    });
+}
+
 /// Pointer in, pointer out.
 fn zirOptionalPayloadPtr(
     sema: *Sema,
src/type.zig
@@ -3150,7 +3150,7 @@ pub const Type = extern union {
             => unreachable,
 
             .empty_struct => self.castTag(.empty_struct).?.data,
-            .@"opaque" => &self.castTag(.@"opaque").?.scope,
+            .@"opaque" => &self.castTag(.@"opaque").?.data,
         };
     }
 
src/zir.zig
@@ -35,7 +35,7 @@ pub const Code = struct {
     extra: []u32,
     /// First ZIR instruction in this `Code`.
     /// `extra` at this index contains a `Ref` for every root member.
-    root_start: Inst.Index,
+    root_start: u32,
     /// Number of ZIR instructions in the implicit root block of the `Code`.
     root_len: u32,
 
@@ -138,204 +138,205 @@ pub const Const = enum {
     bool_false,
 };
 
-pub const const_inst_list = enumArray(Const, .{
-    .u8_type = @as(TypedValue, .{
+pub const const_inst_list = std.enums.directEnumArray(Const, TypedValue, 0, .{
+    .unused = undefined,
+    .u8_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.u8_type),
-    }),
-    .i8_type = @as(TypedValue, .{
+    },
+    .i8_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.i8_type),
-    }),
-    .u16_type = @as(TypedValue, .{
+    },
+    .u16_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.u16_type),
-    }),
-    .i16_type = @as(TypedValue, .{
+    },
+    .i16_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.i16_type),
-    }),
-    .u32_type = @as(TypedValue, .{
+    },
+    .u32_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.u32_type),
-    }),
-    .i32_type = @as(TypedValue, .{
+    },
+    .i32_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.i32_type),
-    }),
-    .u64_type = @as(TypedValue, .{
+    },
+    .u64_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.u64_type),
-    }),
-    .i64_type = @as(TypedValue, .{
+    },
+    .i64_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.i64_type),
-    }),
-    .usize_type = @as(TypedValue, .{
+    },
+    .usize_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.usize_type),
-    }),
-    .isize_type = @as(TypedValue, .{
+    },
+    .isize_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.isize_type),
-    }),
-    .c_short_type = @as(TypedValue, .{
+    },
+    .c_short_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.c_short_type),
-    }),
-    .c_ushort_type = @as(TypedValue, .{
+    },
+    .c_ushort_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.c_ushort_type),
-    }),
-    .c_int_type = @as(TypedValue, .{
+    },
+    .c_int_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.c_int_type),
-    }),
-    .c_uint_type = @as(TypedValue, .{
+    },
+    .c_uint_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.c_uint_type),
-    }),
-    .c_long_type = @as(TypedValue, .{
+    },
+    .c_long_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.c_long_type),
-    }),
-    .c_ulong_type = @as(TypedValue, .{
+    },
+    .c_ulong_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.c_ulong_type),
-    }),
-    .c_longlong_type = @as(TypedValue, .{
+    },
+    .c_longlong_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.c_longlong_type),
-    }),
-    .c_ulonglong_type = @as(TypedValue, .{
+    },
+    .c_ulonglong_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.c_ulonglong_type),
-    }),
-    .c_longdouble_type = @as(TypedValue, .{
+    },
+    .c_longdouble_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.c_longdouble_type),
-    }),
-    .f16_type = @as(TypedValue, .{
+    },
+    .f16_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.f16_type),
-    }),
-    .f32_type = @as(TypedValue, .{
+    },
+    .f32_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.f32_type),
-    }),
-    .f64_type = @as(TypedValue, .{
+    },
+    .f64_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.f64_type),
-    }),
-    .f128_type = @as(TypedValue, .{
+    },
+    .f128_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.f128_type),
-    }),
-    .c_void_type = @as(TypedValue, .{
+    },
+    .c_void_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.c_void_type),
-    }),
-    .bool_type = @as(TypedValue, .{
+    },
+    .bool_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.bool_type),
-    }),
-    .void_type = @as(TypedValue, .{
+    },
+    .void_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.void_type),
-    }),
-    .type_type = @as(TypedValue, .{
+    },
+    .type_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.type_type),
-    }),
-    .anyerror_type = @as(TypedValue, .{
+    },
+    .anyerror_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.anyerror_type),
-    }),
-    .comptime_int_type = @as(TypedValue, .{
+    },
+    .comptime_int_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.comptime_int_type),
-    }),
-    .comptime_float_type = @as(TypedValue, .{
+    },
+    .comptime_float_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.comptime_float_type),
-    }),
-    .noreturn_type = @as(TypedValue, .{
+    },
+    .noreturn_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.noreturn_type),
-    }),
-    .null_type = @as(TypedValue, .{
+    },
+    .null_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.null_type),
-    }),
-    .undefined_type = @as(TypedValue, .{
+    },
+    .undefined_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.undefined_type),
-    }),
-    .fn_noreturn_no_args_type = @as(TypedValue, .{
+    },
+    .fn_noreturn_no_args_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.fn_noreturn_no_args_type),
-    }),
-    .fn_void_no_args_type = @as(TypedValue, .{
+    },
+    .fn_void_no_args_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.fn_void_no_args_type),
-    }),
-    .fn_naked_noreturn_no_args_type = @as(TypedValue, .{
+    },
+    .fn_naked_noreturn_no_args_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.fn_naked_noreturn_no_args_type),
-    }),
-    .fn_ccc_void_no_args_type = @as(TypedValue, .{
+    },
+    .fn_ccc_void_no_args_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.fn_ccc_void_no_args_type),
-    }),
-    .single_const_pointer_to_comptime_int_type = @as(TypedValue, .{
+    },
+    .single_const_pointer_to_comptime_int_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.single_const_pointer_to_comptime_int_type),
-    }),
-    .const_slice_u8_type = @as(TypedValue, .{
+    },
+    .const_slice_u8_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.const_slice_u8_type),
-    }),
-    .enum_literal_type = @as(TypedValue, .{
+    },
+    .enum_literal_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.enum_literal_type),
-    }),
-    .anyframe_type = @as(TypedValue, .{
+    },
+    .anyframe_type = .{
         .ty = Type.initTag(.type),
         .val = Value.initTag(.anyframe_type),
-    }),
+    },
 
-    .undef = @as(TypedValue, .{
+    .undef = .{
         .ty = Type.initTag(.@"undefined"),
         .val = Value.initTag(.undef),
-    }),
-    .zero = @as(TypedValue, .{
+    },
+    .zero = .{
         .ty = Type.initTag(.comptime_int),
         .val = Value.initTag(.zero),
-    }),
-    .one = @as(TypedValue, .{
+    },
+    .one = .{
         .ty = Type.initTag(.comptime_int),
         .val = Value.initTag(.one),
-    }),
-    .void_value = @as(TypedValue, .{
+    },
+    .void_value = .{
         .ty = Type.initTag(.void),
         .val = Value.initTag(.void_value),
-    }),
-    .unreachable_value = @as(TypedValue, .{
+    },
+    .unreachable_value = .{
         .ty = Type.initTag(.noreturn),
         .val = Value.initTag(.unreachable_value),
-    }),
-    .null_value = @as(TypedValue, .{
+    },
+    .null_value = .{
         .ty = Type.initTag(.@"null"),
         .val = Value.initTag(.null_value),
-    }),
-    .bool_true = @as(TypedValue, .{
+    },
+    .bool_true = .{
         .ty = Type.initTag(.bool),
         .val = Value.initTag(.bool_true),
-    }),
-    .bool_false = @as(TypedValue, .{
+    },
+    .bool_false = .{
         .ty = Type.initTag(.bool),
         .val = Value.initTag(.bool_false),
-    }),
+    },
 });
 
 /// These are untyped instructions generated from an Abstract Syntax Tree.
@@ -633,7 +634,7 @@ pub const Inst = struct {
         /// Sends control flow back to the function's callee.
         /// Includes an operand as the return value.
         /// Includes a token source location.
-        /// Uses the un_tok union field.
+        /// Uses the `un_tok` union field.
         ret_tok,
         /// Changes the maximum number of backwards branches that compile-time
         /// code execution can use before giving up and making a compile error.
@@ -755,6 +756,9 @@ pub const Inst = struct {
         ensure_err_payload_void,
         /// An enum literal. Uses the `str_tok` union field.
         enum_literal,
+        /// An enum literal 8 or fewer bytes. No source location.
+        /// Uses the `small_str` field.
+        enum_literal_small,
         /// Suspend an async function. The suspend block has 0 or 1 statements in it.
         /// Uses the `un_node` union field.
         suspend_block_one,
@@ -816,6 +820,7 @@ pub const Inst = struct {
                 .indexable_ptr_len,
                 .as,
                 .@"asm",
+                .asm_volatile,
                 .bit_and,
                 .bitcast,
                 .bitcast_ref,
@@ -831,12 +836,9 @@ pub const Inst = struct {
                 .breakpoint,
                 .call,
                 .call_async_kw,
-                .call_never_tail,
-                .call_never_inline,
                 .call_no_async,
-                .call_always_tail,
-                .call_always_inline,
                 .call_compile_time,
+                .call_none,
                 .cmp_lt,
                 .cmp_lte,
                 .cmp_eq,
@@ -845,13 +847,15 @@ pub const Inst = struct {
                 .cmp_neq,
                 .coerce_result_ptr,
                 .@"const",
-                .dbg_stmt,
+                .dbg_stmt_node,
                 .decl_ref,
                 .decl_val,
                 .deref_node,
                 .div,
                 .elem_ptr,
                 .elem_val,
+                .elem_ptr_node,
+                .elem_val_node,
                 .ensure_result_used,
                 .ensure_result_non_error,
                 .floatcast,
@@ -882,14 +886,6 @@ pub const Inst = struct {
                 .ret_type,
                 .shl,
                 .shr,
-                .single_const_ptr_type,
-                .single_mut_ptr_type,
-                .many_const_ptr_type,
-                .many_mut_ptr_type,
-                .c_const_ptr_type,
-                .c_mut_ptr_type,
-                .mut_slice_type,
-                .const_slice_type,
                 .store,
                 .store_to_block_ptr,
                 .store_to_inferred_ptr,
@@ -914,20 +910,21 @@ pub const Inst = struct {
                 .ptr_type_simple,
                 .ensure_err_payload_void,
                 .enum_literal,
+                .enum_literal_small,
                 .merge_error_sets,
                 .anyframe_type,
                 .error_union_type,
                 .bit_not,
                 .error_set,
                 .error_value,
-                .slice,
                 .slice_start,
+                .slice_end,
+                .slice_sentinel,
                 .import,
                 .typeof_peer,
                 .resolve_inferred_alloc,
                 .set_eval_branch_quota,
                 .compile_log,
-                .switch_range,
                 .@"resume",
                 .@"await",
                 .nosuspend_await,
@@ -942,11 +939,8 @@ pub const Inst = struct {
                 .unreachable_unsafe,
                 .unreachable_safe,
                 .loop,
-                .container_field_named,
-                .container_field_typed,
-                .container_field,
-                .@"suspend",
                 .suspend_block,
+                .suspend_block_one,
                 => true,
             };
         }
@@ -1017,6 +1011,17 @@ pub const Inst = struct {
                 return code.string_bytes[self.start..][0..self.len];
             }
         },
+        /// Strings 8 or fewer bytes which may not contain null bytes.
+        small_str: struct {
+            bytes: [8]u8,
+
+            pub fn get(self: @This()) []const u8 {
+                const end = for (self.bytes) |byte, i| {
+                    if (byte == 0) break i;
+                } else self.bytes.len;
+                return self.bytes[0..end];
+            }
+        },
         str_tok: struct {
             /// Offset into `string_bytes`. Null-terminated.
             start: u32,
@@ -1205,7 +1210,8 @@ pub const Inst = struct {
 };
 
 /// For debugging purposes, like dumpFn but for unanalyzed zir blocks
-pub fn dumpZir(gpa: *Allocator, kind: []const u8, decl_name: [*:0]const u8, instructions: []*Inst) !void {
+pub fn dumpZir(gpa: *Allocator, kind: []const u8, decl_name: [*:0]const u8, code: Code) !void {
+    if (true) @panic("TODO fix this function for zir-memory-layout branch");
     var fib = std.heap.FixedBufferAllocator.init(&[_]u8{});
     var module = Module{
         .decls = &[_]*Module.Decl{},
BRANCH_TODO
@@ -27,6 +27,8 @@ Performance optimizations to look into:
    and have it reference source code bytes. Another idea: null terminated
    string variants which avoid having to store the length.
    - Look into this for enum literals too
+ * make ret_type and ret_ptr instructions be implied indexes; no need to have
+   tags associated with them.
 
 
 Random snippets of code that I deleted and need to make sure get