Commit 13ced07f23
Changed files (5)
src/astgen.zig
@@ -1055,6 +1055,7 @@ fn blockExprStmts(
.bit_or,
.block,
.block_comptime,
+ .loop,
.bool_br_and,
.bool_br_or,
.bool_not,
@@ -1156,12 +1157,13 @@ fn blockExprStmts(
.ret_tok,
.ret_coerce,
.@"unreachable",
- .loop,
.elided,
.store,
.store_to_block_ptr,
.store_to_inferred_ptr,
.resolve_inferred_alloc,
+ .repeat,
+ .repeat_inline,
=> break :b true,
}
} else switch (maybe_unused_result) {
@@ -2145,20 +2147,16 @@ fn whileExpr(
node: ast.Node.Index,
while_full: ast.full.While,
) InnerError!zir.Inst.Ref {
- if (true) @panic("TODO update for zir-memory-layout");
if (while_full.label_token) |label_token| {
try checkLabelRedefinition(mod, scope, label_token);
}
- if (while_full.inline_token) |inline_token| {
- return mod.failTok(scope, inline_token, "TODO inline while", .{});
- }
-
const parent_gz = scope.getGenZir();
+ const loop_block = try parent_gz.addBlock(.loop, node);
+ try parent_gz.instructions.append(mod.gpa, loop_block);
var loop_scope: Scope.GenZir = .{
.parent = scope,
- .decl = scope.ownerDecl().?,
- .arena = scope.arena(),
+ .zir_code = parent_gz.zir_code,
.force_comptime = parent_gz.force_comptime,
.instructions = .{},
};
@@ -2167,21 +2165,12 @@ fn whileExpr(
var continue_scope: Scope.GenZir = .{
.parent = &loop_scope.base,
- .decl = loop_scope.decl,
- .arena = loop_scope.arena,
+ .zir_code = parent_gz.zir_code,
.force_comptime = loop_scope.force_comptime,
.instructions = .{},
};
defer continue_scope.instructions.deinit(mod.gpa);
- const tree = gz.tree();
- const main_tokens = tree.nodes.items(.main_token);
-
- const while_src = token_starts[while_full.ast.while_token];
- const void_type = try addZIRInstConst(mod, scope, while_src, .{
- .ty = Type.initTag(.type),
- .val = Value.initTag(.void_type),
- });
const cond = c: {
// TODO https://github.com/ziglang/zig/issues/7929
if (while_full.error_token) |error_token| {
@@ -2189,59 +2178,41 @@ fn whileExpr(
} else if (while_full.payload_token) |payload_token| {
return mod.failTok(scope, payload_token, "TODO implement while optional", .{});
} else {
- const bool_type = try addZIRInstConst(mod, &continue_scope.base, while_src, .{
- .ty = Type.initTag(.type),
- .val = Value.initTag(.bool_type),
- });
- break :c try expr(mod, &continue_scope.base, .{ .ty = bool_type }, while_full.ast.cond_expr);
+ const bool_type_rl: ResultLoc = .{ .ty = @enumToInt(zir.Const.bool_type) };
+ break :c try expr(mod, &continue_scope.base, bool_type_rl, while_full.ast.cond_expr);
}
};
- const condbr = try addZIRInstSpecial(mod, &continue_scope.base, while_src, zir.Inst.CondBr, .{
- .condition = cond,
- .then_body = undefined, // populated below
- .else_body = undefined, // populated below
- }, .{});
- const cond_block = try addZIRInstBlock(mod, &loop_scope.base, while_src, .block, .{
- .instructions = try loop_scope.arena.dupe(zir.Inst.Ref, continue_scope.instructions.items),
- });
+ const condbr = try continue_scope.addCondBr(node);
+ const cond_block = try loop_scope.addBlock(.block, node);
+ try loop_scope.instructions.append(mod.gpa, cond_block);
+ try continue_scope.setBlockBody(cond_block);
+
// TODO avoid emitting the continue expr when there
// are no jumps to it. This happens when the last statement of a while body is noreturn
// and there are no `continue` statements.
// The "repeat" at the end of a loop body is implied.
if (while_full.ast.cont_expr != 0) {
- _ = try expr(mod, &loop_scope.base, .{ .ty = void_type }, while_full.ast.cont_expr);
+ const void_type_rl: ResultLoc = .{ .ty = @enumToInt(zir.Const.void_type) };
+ _ = try expr(mod, &loop_scope.base, void_type_rl, while_full.ast.cont_expr);
}
- const loop = try scope.arena().create(zir.Inst.Loop);
- loop.* = .{
- .base = .{
- .tag = .loop,
- .src = while_src,
- },
- .positionals = .{
- .body = .{
- .instructions = try scope.arena().dupe(zir.Inst.Ref, loop_scope.instructions.items),
- },
- },
- .kw_args = .{},
- };
- const while_block = try addZIRInstBlock(mod, scope, while_src, .block, .{
- .instructions = try scope.arena().dupe(zir.Inst.Ref, &[1]zir.Inst.Ref{&loop.base}),
- });
- loop_scope.break_block = while_block;
+ const is_inline = while_full.inline_token != null;
+ const repeat_tag: zir.Inst.Tag = if (is_inline) .repeat_inline else .repeat;
+ _ = try loop_scope.addNode(repeat_tag, node);
+
+ try loop_scope.setBlockBody(loop_block);
+ loop_scope.break_block = loop_block;
loop_scope.continue_block = cond_block;
if (while_full.label_token) |label_token| {
loop_scope.label = @as(?Scope.GenZir.Label, Scope.GenZir.Label{
.token = label_token,
- .block_inst = while_block,
+ .block_inst = loop_block,
});
}
- const then_src = token_starts[tree.lastToken(while_full.ast.then_expr)];
var then_scope: Scope.GenZir = .{
.parent = &continue_scope.base,
- .decl = continue_scope.decl,
- .arena = continue_scope.arena,
+ .zir_code = parent_gz.zir_code,
.force_comptime = continue_scope.force_comptime,
.instructions = .{},
};
@@ -2254,29 +2225,31 @@ fn whileExpr(
var else_scope: Scope.GenZir = .{
.parent = &continue_scope.base,
- .decl = continue_scope.decl,
- .arena = continue_scope.arena,
+ .zir_code = parent_gz.zir_code,
.force_comptime = continue_scope.force_comptime,
.instructions = .{},
};
defer else_scope.instructions.deinit(mod.gpa);
const else_node = while_full.ast.else_expr;
- const else_info: struct { src: usize, result: ?*zir.Inst } = if (else_node != 0) blk: {
+ const else_info: struct {
+ src: ast.Node.Index,
+ result: zir.Inst.Ref,
+ } = if (else_node != 0) blk: {
loop_scope.break_count += 1;
const sub_scope = &else_scope.base;
break :blk .{
- .src = token_starts[tree.lastToken(else_node)],
+ .src = else_node,
.result = try expr(mod, sub_scope, loop_scope.break_result_loc, else_node),
};
} else .{
- .src = token_starts[tree.lastToken(while_full.ast.then_expr)],
- .result = null,
+ .src = while_full.ast.then_expr,
+ .result = 0,
};
if (loop_scope.label) |some| {
if (!some.used) {
- return mod.fail(scope, token_starts[some.token], "unused while loop label", .{});
+ return mod.failTok(scope, some.token, "unused while loop label", .{});
}
}
return finishThenElseBlock(
@@ -2289,11 +2262,11 @@ fn whileExpr(
&else_scope,
condbr,
cond,
- then_src,
+ while_full.ast.then_expr,
else_info.src,
then_result,
else_info.result,
- while_block,
+ loop_block,
cond_block,
);
}
src/ir.zig
@@ -80,22 +80,24 @@ pub const Inst = struct {
condbr,
constant,
dbg_stmt,
- // ?T => bool
+ /// ?T => bool
is_null,
- // ?T => bool (inverted logic)
+ /// ?T => bool (inverted logic)
is_non_null,
- // *?T => bool
+ /// *?T => bool
is_null_ptr,
- // *?T => bool (inverted logic)
+ /// *?T => bool (inverted logic)
is_non_null_ptr,
- // E!T => bool
+ /// E!T => bool
is_err,
- // *E!T => bool
+ /// *E!T => bool
is_err_ptr,
bool_and,
bool_or,
/// Read a value from a pointer.
load,
+ /// A labeled block of code that loops forever. At the end of the body it is implied
+ /// to repeat; no explicit "repeat" instruction terminates loop bodies.
loop,
ptrtoint,
ref,
@@ -112,9 +114,9 @@ pub const Inst = struct {
not,
floatcast,
intcast,
- // ?T => T
+ /// ?T => T
optional_payload,
- // *?T => *T
+ /// *?T => *T
optional_payload_ptr,
wrap_optional,
/// E!T -> T
src/Module.zig
@@ -700,14 +700,7 @@ pub const Scope = struct {
/// It is shared among all the blocks in an inline or comptime called
/// function.
pub const Inlining = struct {
- /// Shared state among the entire inline/comptime call stack.
- shared: *Shared,
merges: Merges,
-
- pub const Shared = struct {
- caller: ?*Fn,
- branch_count: u32,
- };
};
pub const Merges = struct {
@@ -2015,6 +2008,7 @@ fn astgenAndSemaDecl(mod: *Module, decl: *Decl) !bool {
.inst_map = try analysis_arena.allocator.alloc(*ir.Inst, code.instructions.len),
.owner_decl = decl,
.func = null,
+ .owner_func = null,
.param_inst_list = &.{},
};
var block_scope: Scope.Block = .{
@@ -2236,6 +2230,7 @@ fn astgenAndSemaFn(
.inst_map = try fn_type_scope_arena.allocator.alloc(*ir.Inst, fn_type_code.instructions.len),
.owner_decl = decl,
.func = null,
+ .owner_func = null,
.param_inst_list = &.{},
};
var block_scope: Scope.Block = .{
@@ -2544,6 +2539,7 @@ fn astgenAndSemaVarDecl(
.inst_map = try gen_scope_arena.allocator.alloc(*ir.Inst, code.instructions.len),
.owner_decl = decl,
.func = null,
+ .owner_func = null,
.param_inst_list = &.{},
};
var block_scope: Scope.Block = .{
@@ -2608,6 +2604,7 @@ fn astgenAndSemaVarDecl(
.inst_map = try type_scope_arena.allocator.alloc(*ir.Inst, code.instructions.len),
.owner_decl = decl,
.func = null,
+ .owner_func = null,
.param_inst_list = &.{},
};
var block_scope: Scope.Block = .{
@@ -3192,6 +3189,7 @@ pub fn analyzeFnBody(mod: *Module, decl: *Decl, func: *Fn) !void {
.inst_map = try mod.gpa.alloc(*ir.Inst, func.zir.instructions.len),
.owner_decl = decl,
.func = func,
+ .owner_func = func,
.param_inst_list = param_inst_list,
};
defer mod.gpa.free(sema.inst_map);
@@ -3681,20 +3679,11 @@ pub fn failWithOwnedErrorMsg(mod: *Module, scope: *Scope, err_msg: *ErrorMsg) In
switch (scope.tag) {
.block => {
const block = scope.cast(Scope.Block).?;
- if (block.inlining) |inlining| {
- if (inlining.shared.caller) |func| {
- func.state = .sema_failure;
- } else {
- block.sema.owner_decl.analysis = .sema_failure;
- block.sema.owner_decl.generation = mod.generation;
- }
+ if (block.sema.owner_func) |func| {
+ func.state = .sema_failure;
} else {
- if (block.sema.func) |func| {
- func.state = .sema_failure;
- } else {
- block.sema.owner_decl.analysis = .sema_failure;
- block.sema.owner_decl.generation = mod.generation;
- }
+ block.sema.owner_decl.analysis = .sema_failure;
+ block.sema.owner_decl.generation = mod.generation;
}
mod.failed_decls.putAssumeCapacityNoClobber(block.sema.owner_decl, err_msg);
},
src/Sema.zig
@@ -17,6 +17,12 @@ inst_map: []*Inst,
/// and `src_decl` of `Scope.Block` is the `Decl` of the callee.
/// This `Decl` owns the arena memory of this `Sema`.
owner_decl: *Decl,
+/// For an inline or comptime function call, this will be the root parent function
+/// which contains the callsite. Corresponds to `owner_decl`.
+owner_func: ?*Module.Fn,
+/// The function this ZIR code is the body of, according to the source code.
+/// This starts out the same as `owner_func` and then diverges in the case of
+/// an inline or comptime function call.
func: ?*Module.Fn,
/// For now, TZIR requires arg instructions to be the first N instructions in the
/// TZIR code. We store references here for the purpose of `resolveInst`.
@@ -26,6 +32,7 @@ func: ?*Module.Fn,
/// > param_count: u32
param_inst_list: []const *ir.Inst,
branch_quota: u32 = 1000,
+branch_count: u32 = 0,
/// This field is updated when a new source location becomes active, so that
/// instructions which do not have explicitly mapped source locations still have
/// access to the source location set by the previous instruction which did
@@ -86,6 +93,7 @@ pub fn analyzeBody(sema: *Sema, block: *Scope.Block, body: []const zir.Inst.Inde
const map = block.sema.inst_map;
const tags = block.sema.code.instructions.items(.tag);
+ const datas = block.sema.code.instructions.items(.data);
// We use a while(true) loop here to avoid a redundant way of breaking out of
// the loop. The only way to break out of the loop is with a `noreturn`
@@ -178,6 +186,7 @@ pub fn analyzeBody(sema: *Sema, block: *Scope.Block, body: []const zir.Inst.Inde
.is_non_null_ptr => try sema.zirIsNullPtr(block, inst, true),
.is_null => try sema.zirIsNull(block, inst, false),
.is_null_ptr => try sema.zirIsNullPtr(block, inst, false),
+ .loop => try sema.zirLoop(block, inst),
.merge_error_sets => try sema.zirMergeErrorSets(block, inst),
.mod_rem => try sema.zirArithmetic(block, inst),
.mul => try sema.zirArithmetic(block, inst),
@@ -225,7 +234,7 @@ pub fn analyzeBody(sema: *Sema, block: *Scope.Block, body: []const zir.Inst.Inde
.ret_node => return sema.zirRetNode(block, inst),
.ret_tok => return sema.zirRetTok(block, inst, false),
.@"unreachable" => return sema.zirUnreachable(block, inst),
- .loop => return sema.zirLoop(block, inst),
+ .repeat => return sema.zirRepeat(block, inst),
// Instructions that we know can *never* be noreturn based solely on
// their tag. We avoid needlessly checking if they are noreturn and
@@ -276,6 +285,14 @@ pub fn analyzeBody(sema: *Sema, block: *Scope.Block, body: []const zir.Inst.Inde
try sema.zirResolveInferredAlloc(block, inst);
continue;
},
+
+ // Special case: send comptime control flow back to the beginning of this block.
+ .repeat_inline => {
+ const src: LazySrcLoc = .{ .node_offset = datas[inst].node };
+ try sema.emitBackwardBranch(block, src);
+ i = 0;
+ continue;
+ },
};
if (map[inst].ty.isNoReturn())
return always_noreturn;
@@ -764,14 +781,50 @@ fn zirCompileLog(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerEr
}
}
-fn zirLoop(sema: *Sema, parent_block: *Scope.Block, inst: zir.Inst.Index) InnerError!zir.Inst.Ref {
+fn zirRepeat(sema: *Sema, block: *Scope.Block, inst: zir.Inst.Index) InnerError!zir.Inst.Ref {
+ const tracy = trace(@src());
+ defer tracy.end();
+
+ const src_node = sema.code.instructions.items(.data)[inst].node;
+ const src: LazySrcLoc = .{ .node_offset = src_node };
+ try sema.requireRuntimeBlock(block, src);
+ return always_noreturn;
+}
+
+fn zirLoop(sema: *Sema, parent_block: *Scope.Block, inst: zir.Inst.Index) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
const inst_data = sema.code.instructions.items(.data)[inst].pl_node;
const src = inst_data.src();
- const extra = sema.code.extraData(zir.Inst.MultiOp, inst_data.payload_index);
- const body = sema.code.extra[extra.end..][0..extra.data.operands_len];
+ const extra = sema.code.extraData(zir.Inst.Block, inst_data.payload_index);
+ const body = sema.code.extra[extra.end..][0..extra.data.body_len];
+
+ // TZIR expects a block outside the loop block too.
+ const block_inst = try sema.arena.create(Inst.Block);
+ block_inst.* = .{
+ .base = .{
+ .tag = Inst.Block.base_tag,
+ .ty = undefined,
+ .src = src,
+ },
+ .body = undefined,
+ };
+
+ var child_block = parent_block.makeSubBlock();
+ child_block.label = Scope.Block.Label{
+ .zir_block = inst,
+ .merges = .{
+ .results = .{},
+ .br_list = .{},
+ .block_inst = block_inst,
+ },
+ };
+ const merges = &child_block.label.?.merges;
+
+ defer child_block.instructions.deinit(sema.gpa);
+ defer merges.results.deinit(sema.gpa);
+ defer merges.br_list.deinit(sema.gpa);
// Reserve space for a Loop instruction so that generated Break instructions can
// point to it, even if it doesn't end up getting used because the code ends up being
@@ -786,23 +839,17 @@ fn zirLoop(sema: *Sema, parent_block: *Scope.Block, inst: zir.Inst.Index) InnerE
.body = undefined,
};
- var child_block: Scope.Block = .{
- .parent = parent_block,
- .sema = sema,
- .src_decl = parent_block.src_decl,
- .instructions = .{},
- .inlining = parent_block.inlining,
- .is_comptime = parent_block.is_comptime,
- };
- defer child_block.instructions.deinit(sema.gpa);
+ var loop_block = child_block.makeSubBlock();
+ defer loop_block.instructions.deinit(sema.gpa);
- _ = try sema.analyzeBody(&child_block, body);
+ _ = try sema.analyzeBody(&loop_block, body);
// Loop repetition is implied so the last instruction may or may not be a noreturn instruction.
- try parent_block.instructions.append(sema.gpa, &loop_inst.base);
- loop_inst.body = .{ .instructions = try sema.arena.dupe(*Inst, child_block.instructions.items) };
- return always_noreturn;
+ try child_block.instructions.append(sema.gpa, &loop_inst.base);
+ loop_inst.body = .{ .instructions = try sema.arena.dupe(*Inst, loop_block.instructions.items) };
+
+ return sema.analyzeBlockBody(parent_block, &child_block, merges);
}
fn zirBlock(
@@ -1160,16 +1207,9 @@ fn analyzeCall(
},
.body = undefined,
};
- // If this is the top of the inline/comptime call stack, we use this data.
- // Otherwise we pass on the shared data from the parent scope.
- var shared_inlining: Scope.Block.Inlining.Shared = .{
- .branch_count = 0,
- .caller = sema.func,
- };
// This one is shared among sub-blocks within the same callee, but not
// shared among the entire inline/comptime call stack.
var inlining: Scope.Block.Inlining = .{
- .shared = if (block.inlining) |inlining| inlining.shared else &shared_inlining,
.merges = .{
.results = .{},
.br_list = .{},
@@ -1183,8 +1223,11 @@ fn analyzeCall(
.code = module_fn.zir,
.inst_map = try sema.gpa.alloc(*ir.Inst, module_fn.zir.instructions.len),
.owner_decl = sema.owner_decl,
+ .owner_func = sema.owner_func,
.func = module_fn,
.param_inst_list = casted_args,
+ .branch_quota = sema.branch_quota,
+ .branch_count = sema.branch_count,
};
defer sema.gpa.free(inline_sema.inst_map);
@@ -1210,7 +1253,12 @@ fn analyzeCall(
// the block_inst above.
_ = try inline_sema.root(&child_block);
- break :res try inline_sema.analyzeBlockBody(block, &child_block, merges);
+ const result = try inline_sema.analyzeBlockBody(block, &child_block, merges);
+
+ sema.branch_quota = inline_sema.branch_quota;
+ sema.branch_count = inline_sema.branch_count;
+
+ break :res result;
} else res: {
try sema.requireRuntimeBlock(block, call_src);
break :res try block.addCall(call_src, ret_type, func, casted_args);
@@ -3169,9 +3217,8 @@ fn safetyPanic(sema: *Sema, block: *Scope.Block, src: LazySrcLoc, panic_id: Pani
}
fn emitBackwardBranch(sema: *Sema, block: *Scope.Block, src: LazySrcLoc) !void {
- const shared = block.inlining.?.shared;
- shared.branch_count += 1;
- if (shared.branch_count > sema.branch_quota) {
+ sema.branch_count += 1;
+ if (sema.branch_count > sema.branch_quota) {
// TODO show the "called from here" stack
return sema.mod.fail(&block.base, src, "evaluation exceeded {d} backwards branches", .{sema.branch_quota});
}
src/zir.zig
@@ -649,11 +649,19 @@ pub const Inst = struct {
/// Return a boolean true if dereferenced pointer is an error
/// Uses the `un_tok` field.
is_err_ptr,
- /// A labeled block of code that loops forever. At the end of the body it is implied
- /// to repeat; no explicit "repeat" instruction terminates loop bodies.
+ /// A labeled block of code that loops forever. At the end of the body will have either
+ /// a `repeat` instruction or a `repeat_inline` instruction.
/// Uses the `pl_node` field. The AST node is either a for loop or while loop.
+ /// This ZIR instruction is needed because TZIR does not (yet?) match ZIR, and Sema
+ /// needs to emit more than 1 TZIR block for this instruction.
/// The payload is `Block`.
loop,
+ /// Sends runtime control flow back to the beginning of the current block.
+ /// Uses the `node` field.
+ repeat,
+ /// Sends comptime control flow back to the beginning of the current block.
+ /// Uses the `node` field.
+ repeat_inline,
/// Merge two error sets into one, `E1 || E2`.
merge_error_sets,
/// Ambiguously remainder division or modulus. If the computation would possibly have
@@ -736,6 +744,7 @@ pub const Inst = struct {
/// Uses the `pl_node` field. AST node is the slice syntax. Payload is `SliceSentinel`.
slice_sentinel,
/// Write a value to a pointer. For loading, see `deref`.
+ /// Uses the `bin` union field.
store,
/// Same as `store` but the type of the value being stored will be used to infer
/// the block type. The LHS is the pointer to store to.
@@ -902,6 +911,7 @@ pub const Inst = struct {
.bit_or,
.block,
.block_comptime,
+ .loop,
.bool_br_and,
.bool_br_or,
.bool_not,
@@ -1012,7 +1022,8 @@ pub const Inst = struct {
.ret_tok,
.ret_coerce,
.@"unreachable",
- .loop,
+ .repeat,
+ .repeat_inline,
=> true,
};
}
@@ -1355,12 +1366,13 @@ const Writer = struct {
.bit_and,
.bit_or,
.as,
- .@"break",
.coerce_result_ptr,
.elem_ptr,
.elem_val,
.intcast,
.merge_error_sets,
+ .store,
+ .store_to_block_ptr,
=> try self.writeBin(stream, inst),
.alloc,
@@ -1425,6 +1437,7 @@ const Writer = struct {
.elided => try stream.writeAll(")"),
.break_void_node => try self.writeBreakVoidNode(stream, inst),
.int_type => try self.writeIntType(stream, inst),
+ .@"break" => try self.writeBreak(stream, inst),
.@"asm",
.asm_volatile,
@@ -1436,7 +1449,6 @@ const Writer = struct {
.field_ptr_named,
.field_val_named,
.floatcast,
- .loop,
.slice_start,
.slice_end,
.slice_sentinel,
@@ -1473,6 +1485,7 @@ const Writer = struct {
.block,
.block_comptime,
+ .loop,
=> try self.writePlNodeBlock(stream, inst),
.condbr => try self.writePlNodeCondBr(stream, inst),
@@ -1483,6 +1496,8 @@ const Writer = struct {
.dbg_stmt_node,
.ret_ptr,
.ret_type,
+ .repeat,
+ .repeat_inline,
=> try self.writeNode(stream, inst),
.decl_ref,
@@ -1506,8 +1521,6 @@ const Writer = struct {
.bitcast_result_ptr,
.error_union_type,
.error_set,
- .store,
- .store_to_block_ptr,
.store_to_inferred_ptr,
=> try stream.writeAll("TODO)"),
}
@@ -1772,6 +1785,15 @@ const Writer = struct {
try self.writeSrc(stream, int_type.src());
}
+ fn writeBreak(self: *Writer, stream: anytype, inst: Inst.Index) !void {
+ const inst_data = self.code.instructions.items(.data)[inst].@"break";
+
+ try self.writeInstIndex(stream, inst_data.block_inst);
+ try stream.writeAll(", ");
+ try self.writeInstRef(stream, inst_data.operand);
+ try stream.writeAll(")");
+ }
+
fn writeUnreachable(self: *Writer, stream: anytype, inst: Inst.Index) !void {
const inst_data = self.code.instructions.items(.data)[inst].@"unreachable";
const safety_str = if (inst_data.safety) "safe" else "unsafe";