Commit 01842a6ead
Changed files (1)
src
src/AstGen.zig
@@ -117,6 +117,7 @@ pub fn generate(gpa: *Allocator, tree: Ast) Allocator.Error!Zir {
var top_scope: Scope.Top = .{};
+ var gz_instructions: std.ArrayListUnmanaged(Zir.Inst.Index) = .{};
var gen_scope: GenZir = .{
.force_comptime = true,
.in_defer = false,
@@ -125,8 +126,10 @@ pub fn generate(gpa: *Allocator, tree: Ast) Allocator.Error!Zir {
.decl_node_index = 0,
.decl_line = 0,
.astgen = &astgen,
+ .instructions = &gz_instructions,
+ .instructions_top = 0,
};
- defer gen_scope.instructions.deinit(gpa);
+ defer gz_instructions.deinit(gpa);
const container_decl: Ast.full.ContainerDecl = .{
.layout_token = null,
@@ -1041,12 +1044,12 @@ fn suspendExpr(
}
assert(body_node != 0);
- const suspend_inst = try gz.addBlock(.suspend_block, node);
+ const suspend_inst = try gz.makeBlockInst(.suspend_block, node);
try gz.instructions.append(gpa, suspend_inst);
var suspend_scope = gz.makeSubBlock(scope);
suspend_scope.suspend_node = node;
- defer suspend_scope.instructions.deinit(gpa);
+ defer suspend_scope.unstack();
const body_result = try expr(&suspend_scope, &suspend_scope.base, .none, body_node);
if (!gz.refIsNoReturn(body_result)) {
@@ -1101,7 +1104,6 @@ fn fnProtoExpr(
fn_proto: Ast.full.FnProto,
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
- const gpa = astgen.gpa;
const tree = astgen.tree;
const token_tags = tree.tokens.items(.tag);
@@ -1147,14 +1149,14 @@ fn fnProtoExpr(
const param_type_node = param.type_expr;
assert(param_type_node != 0);
var param_gz = gz.makeSubBlock(scope);
- defer param_gz.instructions.deinit(gpa);
+ defer param_gz.unstack();
const param_type = try expr(¶m_gz, scope, coerced_type_rl, param_type_node);
const param_inst_expected = @intCast(u32, astgen.instructions.len + 1);
_ = try param_gz.addBreak(.break_inline, param_inst_expected, param_type);
const main_tokens = tree.nodes.items(.main_token);
const name_token = param.name_token orelse main_tokens[param_type_node];
const tag: Zir.Inst.Tag = if (is_comptime) .param_comptime else .param;
- const param_inst = try gz.addParam(tag, name_token, param_name, param_gz.instructions.items);
+ const param_inst = try gz.addParam(¶m_gz, tag, name_token, param_name);
assert(param_inst_expected == param_inst);
}
}
@@ -1189,16 +1191,16 @@ fn fnProtoExpr(
return astgen.failTok(maybe_bang, "function prototype may not have inferred error set", .{});
}
var ret_gz = gz.makeSubBlock(scope);
- defer ret_gz.instructions.deinit(gpa);
+ defer ret_gz.unstack();
const ret_ty = try expr(&ret_gz, scope, coerced_type_rl, fn_proto.ast.return_type);
const ret_br = try ret_gz.addBreak(.break_inline, 0, ret_ty);
const result = try gz.addFunc(.{
.src_node = fn_proto.ast.proto_node,
.param_block = 0,
- .ret_ty = ret_gz.instructions.items,
+ .ret_gz = &ret_gz,
.ret_br = ret_br,
- .body = &[0]Zir.Inst.Index{},
+ .body_gz = null,
.cc = cc,
.align_inst = align_inst,
.lib_name = 0,
@@ -1376,7 +1378,7 @@ fn arrayInitExprRlPtr(
}
var as_scope = try gz.makeCoercionScope(scope, array_ty, result_ptr);
- defer as_scope.instructions.deinit(gz.astgen.gpa);
+ defer as_scope.unstack();
const result = try arrayInitExprRlPtrInner(&as_scope, scope, node, as_scope.rl_ptr, elements);
return as_scope.finishCoercion(gz, rl, node, result, array_ty);
@@ -1554,7 +1556,7 @@ fn structInitExprRlPtr(
const ty_inst = try typeExpr(gz, scope, struct_init.ast.type_expr);
var as_scope = try gz.makeCoercionScope(scope, ty_inst, result_ptr);
- defer as_scope.instructions.deinit(gz.astgen.gpa);
+ defer as_scope.unstack();
const result = try structInitExprRlPtrInner(&as_scope, scope, node, struct_init, as_scope.rl_ptr);
return as_scope.finishCoercion(gz, rl, node, result, ty_inst);
@@ -1875,7 +1877,7 @@ fn labeledBlockExpr(
// Reserve the Block ZIR instruction index so that we can put it into the GenZir struct
// so that break statements can reference it.
- const block_inst = try gz.addBlock(zir_tag, block_node);
+ const block_inst = try gz.makeBlockInst(zir_tag, block_node);
try gz.instructions.append(astgen.gpa, block_inst);
var block_scope = gz.makeSubBlock(parent_scope);
@@ -1884,7 +1886,7 @@ fn labeledBlockExpr(
.block_inst = block_inst,
};
block_scope.setBreakResultLoc(rl);
- defer block_scope.instructions.deinit(astgen.gpa);
+ defer block_scope.unstack();
defer block_scope.labeled_breaks.deinit(astgen.gpa);
defer block_scope.labeled_store_to_block_ptr_list.deinit(astgen.gpa);
@@ -2489,7 +2491,6 @@ fn varDecl(
) InnerError!*Scope {
try emitDbgNode(gz, node);
const astgen = gz.astgen;
- const gpa = astgen.gpa;
const tree = astgen.tree;
const token_tags = tree.tokens.items(.tag);
const main_tokens = tree.nodes.items(.main_token);
@@ -2550,7 +2551,9 @@ fn varDecl(
// Detect whether the initialization expression actually uses the
// result location pointer.
var init_scope = gz.makeSubBlock(scope);
- defer init_scope.instructions.deinit(gpa);
+ // we may add more instructions to gz before stacking init_scope
+ init_scope.instructions_top = GenZir.unstacked_top;
+ defer init_scope.unstack();
var resolve_inferred_alloc: Zir.Inst.Ref = .none;
var opt_type_inst: Zir.Inst.Ref = .none;
@@ -2558,6 +2561,7 @@ fn varDecl(
const type_inst = try typeExpr(gz, &init_scope.base, var_decl.ast.type_node);
opt_type_inst = type_inst;
if (align_inst == .none) {
+ init_scope.instructions_top = gz.instructions.items.len;
init_scope.rl_ptr = try init_scope.addUnNode(.alloc, type_inst, node);
} else {
init_scope.rl_ptr = try gz.addAllocExtended(.{
@@ -2567,19 +2571,24 @@ fn varDecl(
.is_const = true,
.is_comptime = false,
});
+ init_scope.instructions_top = gz.instructions.items.len;
}
init_scope.rl_ty_inst = type_inst;
} else {
- const alloc = if (align_inst == .none)
- try init_scope.addNode(.alloc_inferred, node)
- else
- try gz.addAllocExtended(.{
+ const alloc = if (align_inst == .none) alloc: {
+ init_scope.instructions_top = gz.instructions.items.len;
+ break :alloc try init_scope.addNode(.alloc_inferred, node);
+ } else alloc: {
+ const ref = try gz.addAllocExtended(.{
.node = node,
.type_inst = .none,
.align_inst = align_inst,
.is_const = true,
.is_comptime = false,
});
+ init_scope.instructions_top = gz.instructions.items.len;
+ break :alloc ref;
+ };
resolve_inferred_alloc = alloc;
init_scope.rl_ptr = alloc;
}
@@ -2589,20 +2598,24 @@ fn varDecl(
const zir_tags = astgen.instructions.items(.tag);
const zir_datas = astgen.instructions.items(.data);
- const parent_zir = &gz.instructions;
if (align_inst == .none and init_scope.rvalue_rl_count == 1) {
// Result location pointer not used. We don't need an alloc for this
// const local, and type inference becomes trivial.
- // Move the init_scope instructions into the parent scope, eliding
- // the alloc instruction and the store_to_block_ptr instruction.
- try parent_zir.ensureUnusedCapacity(gpa, init_scope.instructions.items.len);
- for (init_scope.instructions.items) |src_inst| {
+ // Implicitly move the init_scope instructions into the parent scope,
+ // then elide the alloc instruction and the store_to_block_ptr instruction.
+ var src = init_scope.instructions_top;
+ var dst = src;
+ init_scope.instructions_top = GenZir.unstacked_top;
+ while (src < gz.instructions.items.len) : (src += 1) {
+ const src_inst = gz.instructions.items[src];
if (indexToRef(src_inst) == init_scope.rl_ptr) continue;
if (zir_tags[src_inst] == .store_to_block_ptr) {
if (zir_datas[src_inst].bin.lhs == init_scope.rl_ptr) continue;
}
- parent_zir.appendAssumeCapacity(src_inst);
+ gz.instructions.items[dst] = src_inst;
+ dst += 1;
}
+ gz.instructions.items.len = dst;
const sub_scope = try block_arena.create(Scope.LocalVal);
sub_scope.* = .{
@@ -2617,11 +2630,13 @@ fn varDecl(
}
// The initialization expression took advantage of the result location
// of the const local. In this case we will create an alloc and a LocalPtr for it.
- // Move the init_scope instructions into the parent scope, swapping
+ // Implicitly move the init_scope instructions into the parent scope, then swap
// store_to_block_ptr for store_to_inferred_ptr.
- const expected_len = parent_zir.items.len + init_scope.instructions.items.len;
- try parent_zir.ensureTotalCapacity(gpa, expected_len);
- for (init_scope.instructions.items) |src_inst| {
+
+ var src = init_scope.instructions_top;
+ init_scope.instructions_top = GenZir.unstacked_top;
+ while (src < gz.instructions.items.len) : (src += 1) {
+ const src_inst = gz.instructions.items[src];
if (zir_tags[src_inst] == .store_to_block_ptr) {
if (zir_datas[src_inst].bin.lhs == init_scope.rl_ptr) {
if (var_decl.ast.type_node != 0) {
@@ -2631,9 +2646,7 @@ fn varDecl(
}
}
}
- parent_zir.appendAssumeCapacity(src_inst);
}
- assert(parent_zir.items.len == expected_len);
if (resolve_inferred_alloc != .none) {
_ = try gz.addUnNode(.resolve_inferred_alloc, resolve_inferred_alloc, node);
}
@@ -3120,7 +3133,6 @@ fn fnDecl(
body_node: Ast.Node.Index,
fn_proto: Ast.full.FnProto,
) InnerError!void {
- const gpa = astgen.gpa;
const tree = astgen.tree;
const token_tags = tree.tokens.items(.tag);
@@ -3130,7 +3142,7 @@ fn fnDecl(
// We insert this at the beginning so that its instruction index marks the
// start of the top level declaration.
- const block_inst = try gz.addBlock(.block_inline, fn_proto.ast.proto_node);
+ const block_inst = try gz.makeBlockInst(.block_inline, fn_proto.ast.proto_node);
var decl_gz: GenZir = .{
.force_comptime = true,
@@ -3139,8 +3151,10 @@ fn fnDecl(
.decl_line = gz.calcLine(decl_node),
.parent = scope,
.astgen = astgen,
+ .instructions = gz.instructions,
+ .instructions_top = gz.instructions.items.len,
};
- defer decl_gz.instructions.deinit(gpa);
+ defer decl_gz.unstack();
var fn_gz: GenZir = .{
.force_comptime = false,
@@ -3149,8 +3163,10 @@ fn fnDecl(
.decl_line = decl_gz.decl_line,
.parent = &decl_gz.base,
.astgen = astgen,
+ .instructions = gz.instructions,
+ .instructions_top = GenZir.unstacked_top,
};
- defer fn_gz.instructions.deinit(gpa);
+ defer fn_gz.unstack();
// TODO: support noinline
const is_pub = fn_proto.visib_token != null;
@@ -3216,7 +3232,7 @@ fn fnDecl(
const param_type_node = param.type_expr;
assert(param_type_node != 0);
var param_gz = decl_gz.makeSubBlock(scope);
- defer param_gz.instructions.deinit(gpa);
+ defer param_gz.unstack();
const param_type = try expr(¶m_gz, params_scope, coerced_type_rl, param_type_node);
const param_inst_expected = @intCast(u32, astgen.instructions.len + 1);
_ = try param_gz.addBreak(.break_inline, param_inst_expected, param_type);
@@ -3224,7 +3240,7 @@ fn fnDecl(
const main_tokens = tree.nodes.items(.main_token);
const name_token = param.name_token orelse main_tokens[param_type_node];
const tag: Zir.Inst.Tag = if (is_comptime) .param_comptime else .param;
- const param_inst = try decl_gz.addParam(tag, name_token, param_name, param_gz.instructions.items);
+ const param_inst = try decl_gz.addParam(¶m_gz, tag, name_token, param_name);
assert(param_inst_expected == param_inst);
break :param indexToRef(param_inst);
};
@@ -3289,7 +3305,7 @@ fn fnDecl(
};
var ret_gz = decl_gz.makeSubBlock(params_scope);
- defer ret_gz.instructions.deinit(gpa);
+ defer ret_gz.unstack();
const ret_ty = try expr(&ret_gz, params_scope, coerced_type_rl, fn_proto.ast.return_type);
const ret_br = try ret_gz.addBreak(.break_inline, 0, ret_ty);
@@ -3302,10 +3318,10 @@ fn fnDecl(
}
break :func try decl_gz.addFunc(.{
.src_node = decl_node,
- .ret_ty = ret_gz.instructions.items,
+ .ret_gz = &ret_gz,
.ret_br = ret_br,
.param_block = block_inst,
- .body = &[0]Zir.Inst.Index{},
+ .body_gz = null,
.cc = cc,
.align_inst = .none, // passed in the per-decl data
.lib_name = lib_name,
@@ -3319,6 +3335,9 @@ fn fnDecl(
return astgen.failTok(fn_proto.ast.fn_token, "non-extern function is variadic", .{});
}
+ // as a scope, fn_gz encloses ret_gz, but for instruction list, fn_gz stacks on ret_gz
+ fn_gz.instructions_top = ret_gz.instructions.items.len;
+
const prev_fn_block = astgen.fn_block;
astgen.fn_block = &fn_gz;
defer astgen.fn_block = prev_fn_block;
@@ -3332,14 +3351,7 @@ fn fnDecl(
_ = try expr(&fn_gz, params_scope, .none, body_node);
try checkUsed(gz, &fn_gz.base, params_scope);
- const need_implicit_ret = blk: {
- if (fn_gz.instructions.items.len == 0)
- break :blk true;
- const last = fn_gz.instructions.items[fn_gz.instructions.items.len - 1];
- const zir_tags = astgen.instructions.items(.tag);
- break :blk !zir_tags[last].isNoReturn();
- };
- if (need_implicit_ret) {
+ if (!fn_gz.endsWithNoReturn()) {
// Since we are adding the return instruction here, we must handle the coercion.
// We do this by using the `ret_coerce` instruction.
_ = try fn_gz.addUnTok(.ret_coerce, .void_value, tree.lastToken(body_node));
@@ -3350,9 +3362,9 @@ fn fnDecl(
.lbrace_line = lbrace_line,
.lbrace_column = lbrace_column,
.param_block = block_inst,
- .ret_ty = ret_gz.instructions.items,
+ .ret_gz = &ret_gz,
.ret_br = ret_br,
- .body = fn_gz.instructions.items,
+ .body_gz = &fn_gz,
.cc = cc,
.align_inst = .none, // passed in the per-decl data
.lib_name = lib_name,
@@ -3364,7 +3376,7 @@ fn fnDecl(
};
// We add this at the end so that its instruction index marks the end range
- // of the top level declaration.
+ // of the top level declaration. addFunc already unstacked fn_gz and ret_gz.
_ = try decl_gz.addBreak(.break_inline, block_inst, func_inst);
try decl_gz.setBlockBody(block_inst);
@@ -3396,14 +3408,13 @@ fn globalVarDecl(
node: Ast.Node.Index,
var_decl: Ast.full.VarDecl,
) InnerError!void {
- const gpa = astgen.gpa;
const tree = astgen.tree;
const token_tags = tree.tokens.items(.tag);
const is_mutable = token_tags[var_decl.ast.mut_token] == .keyword_var;
// We do this at the beginning so that the instruction index marks the range start
// of the top level declaration.
- const block_inst = try gz.addBlock(.block_inline, node);
+ const block_inst = try gz.makeBlockInst(.block_inline, node);
const name_token = var_decl.ast.mut_token + 1;
const name_str_index = try astgen.identAsString(name_token);
@@ -3416,8 +3427,10 @@ fn globalVarDecl(
.force_comptime = true,
.in_defer = false,
.anon_name_strategy = .parent,
+ .instructions = gz.instructions,
+ .instructions_top = gz.instructions.items.len,
};
- defer block_scope.instructions.deinit(gpa);
+ defer block_scope.unstack();
const is_pub = var_decl.visib_token != null;
const is_export = blk: {
@@ -3543,14 +3556,13 @@ fn comptimeDecl(
wip_members: *WipMembers,
node: Ast.Node.Index,
) InnerError!void {
- const gpa = astgen.gpa;
const tree = astgen.tree;
const node_datas = tree.nodes.items(.data);
const body_node = node_datas[node].lhs;
// Up top so the ZIR instruction index marks the start range of this
// top-level declaration.
- const block_inst = try gz.addBlock(.block_inline, node);
+ const block_inst = try gz.makeBlockInst(.block_inline, node);
wip_members.nextDecl(false, false, false, false);
var decl_block: GenZir = .{
@@ -3560,11 +3572,13 @@ fn comptimeDecl(
.decl_line = gz.calcLine(node),
.parent = scope,
.astgen = astgen,
+ .instructions = gz.instructions,
+ .instructions_top = gz.instructions.items.len,
};
- defer decl_block.instructions.deinit(gpa);
+ defer decl_block.unstack();
const block_result = try expr(&decl_block, &decl_block.base, .none, body_node);
- if (decl_block.instructions.items.len == 0 or !decl_block.refIsNoReturn(block_result)) {
+ if (decl_block.isEmpty() or !decl_block.refIsNoReturn(block_result)) {
_ = try decl_block.addBreak(.break_inline, block_inst, .void_value);
}
try decl_block.setBlockBody(block_inst);
@@ -3589,7 +3603,6 @@ fn usingnamespaceDecl(
wip_members: *WipMembers,
node: Ast.Node.Index,
) InnerError!void {
- const gpa = astgen.gpa;
const tree = astgen.tree;
const node_datas = tree.nodes.items(.data);
@@ -3602,7 +3615,7 @@ fn usingnamespaceDecl(
};
// Up top so the ZIR instruction index marks the start range of this
// top-level declaration.
- const block_inst = try gz.addBlock(.block_inline, node);
+ const block_inst = try gz.makeBlockInst(.block_inline, node);
wip_members.nextDecl(is_pub, true, false, false);
var decl_block: GenZir = .{
@@ -3612,8 +3625,10 @@ fn usingnamespaceDecl(
.decl_line = gz.calcLine(node),
.parent = scope,
.astgen = astgen,
+ .instructions = gz.instructions,
+ .instructions_top = gz.instructions.items.len,
};
- defer decl_block.instructions.deinit(gpa);
+ defer decl_block.unstack();
const namespace_inst = try typeExpr(&decl_block, &decl_block.base, type_expr);
_ = try decl_block.addBreak(.break_inline, block_inst, namespace_inst);
@@ -3639,14 +3654,13 @@ fn testDecl(
wip_members: *WipMembers,
node: Ast.Node.Index,
) InnerError!void {
- const gpa = astgen.gpa;
const tree = astgen.tree;
const node_datas = tree.nodes.items(.data);
const body_node = node_datas[node].rhs;
// Up top so the ZIR instruction index marks the start range of this
// top-level declaration.
- const block_inst = try gz.addBlock(.block_inline, node);
+ const block_inst = try gz.makeBlockInst(.block_inline, node);
wip_members.nextDecl(false, false, false, false);
@@ -3657,8 +3671,10 @@ fn testDecl(
.decl_line = gz.calcLine(node),
.parent = scope,
.astgen = astgen,
+ .instructions = gz.instructions,
+ .instructions_top = gz.instructions.items.len,
};
- defer decl_block.instructions.deinit(gpa);
+ defer decl_block.unstack();
const test_name: u32 = blk: {
const main_tokens = tree.nodes.items(.main_token);
@@ -3679,8 +3695,10 @@ fn testDecl(
.decl_line = decl_block.decl_line,
.parent = &decl_block.base,
.astgen = astgen,
+ .instructions = decl_block.instructions,
+ .instructions_top = decl_block.instructions.items.len,
};
- defer fn_block.instructions.deinit(gpa);
+ defer fn_block.unstack();
const prev_fn_block = astgen.fn_block;
astgen.fn_block = &fn_block;
@@ -3693,7 +3711,7 @@ fn testDecl(
const lbrace_column = @intCast(u32, astgen.source_column);
const block_result = try expr(&fn_block, &fn_block.base, .none, body_node);
- if (fn_block.instructions.items.len == 0 or !fn_block.refIsNoReturn(block_result)) {
+ if (fn_block.isEmpty() or !fn_block.refIsNoReturn(block_result)) {
// Since we are adding the return instruction here, we must handle the coercion.
// We do this by using the `ret_coerce` instruction.
_ = try fn_block.addUnTok(.ret_coerce, .void_value, tree.lastToken(body_node));
@@ -3704,9 +3722,9 @@ fn testDecl(
.lbrace_line = lbrace_line,
.lbrace_column = lbrace_column,
.param_block = block_inst,
- .ret_ty = &.{},
+ .ret_gz = null,
.ret_br = 0,
- .body = fn_block.instructions.items,
+ .body_gz = &fn_block,
.cc = .none,
.align_inst = .none,
.lib_name = 0,
@@ -3776,8 +3794,10 @@ fn structDeclInner(
.astgen = astgen,
.force_comptime = true,
.in_defer = false,
+ .instructions = gz.instructions,
+ .instructions_top = gz.instructions.items.len,
};
- defer block_scope.instructions.deinit(gpa);
+ defer block_scope.unstack();
const decl_count = try astgen.scanDecls(&namespace, container_decl.ast.members);
const field_count = @intCast(u32, container_decl.ast.members.len - decl_count);
@@ -3829,14 +3849,16 @@ fn structDeclInner(
}
}
- if (block_scope.instructions.items.len != 0) {
+ if (!block_scope.isEmpty()) {
_ = try block_scope.addBreak(.break_inline, decl_inst, .void_value);
}
+ const body = block_scope.instructionsSlice();
+
try gz.setStruct(decl_inst, .{
.src_node = node,
.layout = layout,
- .body_len = @intCast(u32, block_scope.instructions.items.len),
+ .body_len = @intCast(u32, body.len),
.fields_len = field_count,
.decls_len = decl_count,
.known_has_bits = known_has_bits,
@@ -3845,11 +3867,12 @@ fn structDeclInner(
wip_members.finishBits(bits_per_field);
const decls_slice = wip_members.declsSlice();
const fields_slice = wip_members.fieldsSlice();
- try astgen.extra.ensureUnusedCapacity(gpa, decls_slice.len + block_scope.instructions.items.len + fields_slice.len);
+ try astgen.extra.ensureUnusedCapacity(gpa, decls_slice.len + body.len + fields_slice.len);
astgen.extra.appendSliceAssumeCapacity(decls_slice);
- astgen.extra.appendSliceAssumeCapacity(block_scope.instructions.items);
+ astgen.extra.appendSliceAssumeCapacity(body);
astgen.extra.appendSliceAssumeCapacity(fields_slice);
+ block_scope.unstack();
try gz.addNamespaceCaptures(&namespace);
return indexToRef(decl_inst);
}
@@ -3888,8 +3911,10 @@ fn unionDeclInner(
.astgen = astgen,
.force_comptime = true,
.in_defer = false,
+ .instructions = gz.instructions,
+ .instructions_top = gz.instructions.items.len,
};
- defer block_scope.instructions.deinit(gpa);
+ defer block_scope.unstack();
const decl_count = try astgen.scanDecls(&namespace, members);
const field_count = @intCast(u32, members.len - decl_count);
@@ -3972,15 +3997,17 @@ fn unionDeclInner(
return astgen.failNode(node, "union declarations must have at least one tag", .{});
}
- if (block_scope.instructions.items.len != 0) {
+ if (!block_scope.isEmpty()) {
_ = try block_scope.addBreak(.break_inline, decl_inst, .void_value);
}
+ const body = block_scope.instructionsSlice();
+
try gz.setUnion(decl_inst, .{
.src_node = node,
.layout = layout,
.tag_type = arg_inst,
- .body_len = @intCast(u32, block_scope.instructions.items.len),
+ .body_len = @intCast(u32, body.len),
.fields_len = field_count,
.decls_len = decl_count,
.auto_enum_tag = have_auto_enum,
@@ -3989,11 +4016,12 @@ fn unionDeclInner(
wip_members.finishBits(bits_per_field);
const decls_slice = wip_members.declsSlice();
const fields_slice = wip_members.fieldsSlice();
- try astgen.extra.ensureUnusedCapacity(gpa, decls_slice.len + block_scope.instructions.items.len + fields_slice.len);
+ try astgen.extra.ensureUnusedCapacity(gpa, decls_slice.len + body.len + fields_slice.len);
astgen.extra.appendSliceAssumeCapacity(decls_slice);
- astgen.extra.appendSliceAssumeCapacity(block_scope.instructions.items);
+ astgen.extra.appendSliceAssumeCapacity(body);
astgen.extra.appendSliceAssumeCapacity(fields_slice);
+ block_scope.unstack();
try gz.addNamespaceCaptures(&namespace);
return indexToRef(decl_inst);
}
@@ -4163,8 +4191,10 @@ fn containerDecl(
.astgen = astgen,
.force_comptime = true,
.in_defer = false,
+ .instructions = gz.instructions,
+ .instructions_top = gz.instructions.items.len,
};
- defer block_scope.instructions.deinit(gpa);
+ defer block_scope.unstack();
_ = try astgen.scanDecls(&namespace, container_decl.ast.members);
@@ -4215,15 +4245,17 @@ fn containerDecl(
}
}
- if (block_scope.instructions.items.len != 0) {
+ if (!block_scope.isEmpty()) {
_ = try block_scope.addBreak(.break_inline, decl_inst, .void_value);
}
+ const body = block_scope.instructionsSlice();
+
try gz.setEnum(decl_inst, .{
.src_node = node,
.nonexhaustive = nonexhaustive,
.tag_type = arg_inst,
- .body_len = @intCast(u32, block_scope.instructions.items.len),
+ .body_len = @intCast(u32, body.len),
.fields_len = @intCast(u32, counts.total_fields),
.decls_len = @intCast(u32, counts.decls),
});
@@ -4231,11 +4263,12 @@ fn containerDecl(
wip_members.finishBits(bits_per_field);
const decls_slice = wip_members.declsSlice();
const fields_slice = wip_members.fieldsSlice();
- try astgen.extra.ensureUnusedCapacity(gpa, decls_slice.len + block_scope.instructions.items.len + fields_slice.len);
+ try astgen.extra.ensureUnusedCapacity(gpa, decls_slice.len + body.len + fields_slice.len);
astgen.extra.appendSliceAssumeCapacity(decls_slice);
- astgen.extra.appendSliceAssumeCapacity(block_scope.instructions.items);
+ astgen.extra.appendSliceAssumeCapacity(body);
astgen.extra.appendSliceAssumeCapacity(fields_slice);
+ block_scope.unstack();
try gz.addNamespaceCaptures(&namespace);
return rvalue(gz, rl, indexToRef(decl_inst), node);
},
@@ -4453,7 +4486,7 @@ fn tryExpr(
var block_scope = parent_gz.makeSubBlock(scope);
block_scope.setBreakResultLoc(rl);
- defer block_scope.instructions.deinit(astgen.gpa);
+ defer block_scope.unstack();
const operand_rl: ResultLoc = switch (block_scope.break_result_loc) {
.ref => .ref,
@@ -4473,12 +4506,13 @@ fn tryExpr(
const cond = try block_scope.addUnNode(err_ops[0], operand, node);
const condbr = try block_scope.addCondBr(.condbr, node);
- const block = try parent_gz.addBlock(.block, node);
- try parent_gz.instructions.append(astgen.gpa, block);
+ const block = try parent_gz.makeBlockInst(.block, node);
try block_scope.setBlockBody(block);
+ // block_scope unstacked now, can add new instructions to parent_gz
+ try parent_gz.instructions.append(astgen.gpa, block);
var then_scope = parent_gz.makeSubBlock(scope);
- defer then_scope.instructions.deinit(astgen.gpa);
+ defer then_scope.unstack();
block_scope.break_count += 1;
// This could be a pointer or value depending on `err_ops[2]`.
@@ -4488,8 +4522,9 @@ fn tryExpr(
else => try rvalue(&then_scope, block_scope.break_result_loc, unwrapped_payload, node),
};
+ // else_scope will be stacked on then_scope as both are stacked on parent_gz
var else_scope = parent_gz.makeSubBlock(scope);
- defer else_scope.instructions.deinit(astgen.gpa);
+ defer else_scope.unstack();
const err_code = try else_scope.addUnNode(err_ops[1], operand, node);
try genDefers(&else_scope, &fn_block.base, scope, .{ .both = err_code });
@@ -4529,7 +4564,7 @@ fn orelseCatchExpr(
var block_scope = parent_gz.makeSubBlock(scope);
block_scope.setBreakResultLoc(rl);
- defer block_scope.instructions.deinit(astgen.gpa);
+ defer block_scope.unstack();
const operand_rl: ResultLoc = switch (block_scope.break_result_loc) {
.ref => .ref,
@@ -4544,12 +4579,13 @@ fn orelseCatchExpr(
const cond = try block_scope.addUnNode(cond_op, operand, node);
const condbr = try block_scope.addCondBr(.condbr, node);
- const block = try parent_gz.addBlock(.block, node);
- try parent_gz.instructions.append(astgen.gpa, block);
+ const block = try parent_gz.makeBlockInst(.block, node);
try block_scope.setBlockBody(block);
+ // block_scope unstacked now, can add new instructions to parent_gz
+ try parent_gz.instructions.append(astgen.gpa, block);
var then_scope = parent_gz.makeSubBlock(scope);
- defer then_scope.instructions.deinit(astgen.gpa);
+ defer then_scope.unstack();
// This could be a pointer or value depending on `unwrap_op`.
const unwrapped_payload = try then_scope.addUnNode(unwrap_op, operand, node);
@@ -4559,7 +4595,7 @@ fn orelseCatchExpr(
};
var else_scope = parent_gz.makeSubBlock(scope);
- defer else_scope.instructions.deinit(astgen.gpa);
+ defer else_scope.unstack();
var err_val_scope: Scope.LocalVal = undefined;
const else_sub_scope = blk: {
@@ -4606,6 +4642,7 @@ fn orelseCatchExpr(
);
}
+/// Supports `else_scope` stacked on `then_scope` stacked on `block_scope`. Unstacks `else_scope` then `then_scope`.
fn finishThenElseBlock(
parent_gz: *GenZir,
rl: ResultLoc,
@@ -4624,33 +4661,33 @@ fn finishThenElseBlock(
// We now have enough information to decide whether the result instruction should
// be communicated via result location pointer or break instructions.
const strat = rl.strategy(block_scope);
+ // else_scope may be stacked on then_scope, so check for no-return on then_scope manually
+ const tags = parent_gz.astgen.instructions.items(.tag);
+ const then_slice = then_scope.instructionsSliceUpto(else_scope);
+ const then_no_return = then_slice.len > 0 and tags[then_slice[then_slice.len - 1]].isNoReturn();
+ const else_no_return = else_scope.endsWithNoReturn();
+
switch (strat.tag) {
.break_void => {
- if (!then_scope.endsWithNoReturn()) {
- _ = try then_scope.addBreak(break_tag, then_break_block, .void_value);
- }
- if (!else_scope.endsWithNoReturn()) {
- _ = try else_scope.addBreak(break_tag, main_block, .void_value);
- }
+ const then_break = if (!then_no_return) try then_scope.makeBreak(break_tag, then_break_block, .void_value) else 0;
+ const else_break = if (!else_no_return) try else_scope.makeBreak(break_tag, main_block, .void_value) else 0;
assert(!strat.elide_store_to_block_ptr_instructions);
- try setCondBrPayload(condbr, cond, then_scope, else_scope);
+ try setCondBrPayload(condbr, cond, then_scope, then_break, else_scope, else_break);
return indexToRef(main_block);
},
.break_operand => {
- if (!then_scope.endsWithNoReturn()) {
- _ = try then_scope.addBreak(break_tag, then_break_block, then_result);
- }
- if (else_result != .none) {
- if (!else_scope.endsWithNoReturn()) {
- _ = try else_scope.addBreak(break_tag, main_block, else_result);
- }
- } else {
- _ = try else_scope.addBreak(break_tag, main_block, .void_value);
- }
+ const then_break = if (!then_no_return) try then_scope.makeBreak(break_tag, then_break_block, then_result) else 0;
+ const else_break = if (else_result == .none)
+ try else_scope.makeBreak(break_tag, main_block, .void_value)
+ else if (!else_no_return)
+ try else_scope.makeBreak(break_tag, main_block, else_result)
+ else
+ 0;
+
if (strat.elide_store_to_block_ptr_instructions) {
- try setCondBrPayloadElideBlockStorePtr(condbr, cond, then_scope, else_scope, block_scope.rl_ptr);
+ try setCondBrPayloadElideBlockStorePtr(condbr, cond, then_scope, then_break, else_scope, else_break, block_scope.rl_ptr);
} else {
- try setCondBrPayload(condbr, cond, then_scope, else_scope);
+ try setCondBrPayload(condbr, cond, then_scope, then_break, else_scope, else_break);
}
const block_ref = indexToRef(main_block);
switch (rl) {
@@ -4778,7 +4815,7 @@ fn boolBinOp(
const bool_br = try gz.addBoolBr(zir_tag, lhs);
var rhs_scope = gz.makeSubBlock(scope);
- defer rhs_scope.instructions.deinit(gz.astgen.gpa);
+ defer rhs_scope.unstack();
const rhs = try expr(&rhs_scope, &rhs_scope.base, bool_rl, node_datas[node].rhs);
if (!gz.refIsNoReturn(rhs)) {
_ = try rhs_scope.addBreak(.break_inline, bool_br, rhs);
@@ -4802,7 +4839,7 @@ fn ifExpr(
var block_scope = parent_gz.makeSubBlock(scope);
block_scope.setBreakResultLoc(rl);
- defer block_scope.instructions.deinit(astgen.gpa);
+ defer block_scope.unstack();
const payload_is_ref = if (if_full.payload_token) |payload_token|
token_tags[payload_token] == .asterisk
@@ -4840,12 +4877,13 @@ fn ifExpr(
const condbr = try block_scope.addCondBr(.condbr, node);
- const block = try parent_gz.addBlock(.block, node);
- try parent_gz.instructions.append(astgen.gpa, block);
+ const block = try parent_gz.makeBlockInst(.block, node);
try block_scope.setBlockBody(block);
+ // block_scope unstacked now, can add new instructions to parent_gz
+ try parent_gz.instructions.append(astgen.gpa, block);
var then_scope = parent_gz.makeSubBlock(scope);
- defer then_scope.instructions.deinit(astgen.gpa);
+ defer then_scope.unstack();
var payload_val_scope: Scope.LocalVal = undefined;
@@ -4911,7 +4949,7 @@ fn ifExpr(
// instructions or not.
var else_scope = parent_gz.makeSubBlock(scope);
- defer else_scope.instructions.deinit(astgen.gpa);
+ defer else_scope.unstack();
const else_node = if_full.ast.else_expr;
const else_info: struct {
@@ -4974,52 +5012,70 @@ fn ifExpr(
);
}
+/// Supports `else_scope` stacked on `then_scope`. Unstacks `else_scope` then `then_scope`.
fn setCondBrPayload(
condbr: Zir.Inst.Index,
cond: Zir.Inst.Ref,
then_scope: *GenZir,
+ then_break: Zir.Inst.Index,
else_scope: *GenZir,
+ else_break: Zir.Inst.Index,
) !void {
+ defer then_scope.unstack();
+ defer else_scope.unstack();
const astgen = then_scope.astgen;
-
+ const then_body = then_scope.instructionsSliceUpto(else_scope);
+ const else_body = else_scope.instructionsSlice();
+ const then_body_len = @intCast(u32, then_body.len + @boolToInt(then_break != 0));
+ const else_body_len = @intCast(u32, else_body.len + @boolToInt(else_break != 0));
try astgen.extra.ensureUnusedCapacity(astgen.gpa, @typeInfo(Zir.Inst.CondBr).Struct.fields.len +
- then_scope.instructions.items.len + else_scope.instructions.items.len);
+ then_body_len + else_body_len);
const zir_datas = astgen.instructions.items(.data);
zir_datas[condbr].pl_node.payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.CondBr{
.condition = cond,
- .then_body_len = @intCast(u32, then_scope.instructions.items.len),
- .else_body_len = @intCast(u32, else_scope.instructions.items.len),
+ .then_body_len = then_body_len,
+ .else_body_len = else_body_len,
});
- astgen.extra.appendSliceAssumeCapacity(then_scope.instructions.items);
- astgen.extra.appendSliceAssumeCapacity(else_scope.instructions.items);
+ astgen.extra.appendSliceAssumeCapacity(then_body);
+ if (then_break != 0) astgen.extra.appendAssumeCapacity(then_break);
+ astgen.extra.appendSliceAssumeCapacity(else_body);
+ if (else_break != 0) astgen.extra.appendAssumeCapacity(else_break);
}
+/// Supports `else_scope` stacked on `then_scope`. Unstacks `else_scope` then `then_scope`.
fn setCondBrPayloadElideBlockStorePtr(
condbr: Zir.Inst.Index,
cond: Zir.Inst.Ref,
then_scope: *GenZir,
+ then_break: Zir.Inst.Index,
else_scope: *GenZir,
+ else_break: Zir.Inst.Index,
block_ptr: Zir.Inst.Ref,
) !void {
+ defer then_scope.unstack();
+ defer else_scope.unstack();
const astgen = then_scope.astgen;
-
+ const then_body = then_scope.instructionsSliceUpto(else_scope);
+ const else_body = else_scope.instructionsSlice();
+ const then_body_len = @intCast(u32, then_body.len + @boolToInt(then_break != 0));
+ const else_body_len = @intCast(u32, else_body.len + @boolToInt(else_break != 0));
try astgen.extra.ensureUnusedCapacity(astgen.gpa, @typeInfo(Zir.Inst.CondBr).Struct.fields.len +
- then_scope.instructions.items.len + else_scope.instructions.items.len);
+ then_body_len + else_body_len);
const zir_tags = astgen.instructions.items(.tag);
const zir_datas = astgen.instructions.items(.data);
const condbr_pl = astgen.addExtraAssumeCapacity(Zir.Inst.CondBr{
.condition = cond,
- .then_body_len = @intCast(u32, then_scope.instructions.items.len),
- .else_body_len = @intCast(u32, else_scope.instructions.items.len),
+ .then_body_len = then_body_len,
+ .else_body_len = else_body_len,
});
zir_datas[condbr].pl_node.payload_index = condbr_pl;
const then_body_len_index = condbr_pl + 1;
const else_body_len_index = condbr_pl + 2;
- for (then_scope.instructions.items) |src_inst| {
+ for (then_body) |src_inst| {
if (zir_tags[src_inst] == .store_to_block_ptr) {
if (zir_datas[src_inst].bin.lhs == block_ptr) {
astgen.extra.items[then_body_len_index] -= 1;
@@ -5028,7 +5084,8 @@ fn setCondBrPayloadElideBlockStorePtr(
}
astgen.extra.appendAssumeCapacity(src_inst);
}
- for (else_scope.instructions.items) |src_inst| {
+ if (then_break != 0) astgen.extra.appendAssumeCapacity(then_break);
+ for (else_body) |src_inst| {
if (zir_tags[src_inst] == .store_to_block_ptr) {
if (zir_datas[src_inst].bin.lhs == block_ptr) {
astgen.extra.items[else_body_len_index] -= 1;
@@ -5037,6 +5094,7 @@ fn setCondBrPayloadElideBlockStorePtr(
}
astgen.extra.appendAssumeCapacity(src_inst);
}
+ if (else_break != 0) astgen.extra.appendAssumeCapacity(else_break);
}
fn whileExpr(
@@ -5056,17 +5114,17 @@ fn whileExpr(
const is_inline = parent_gz.force_comptime or while_full.inline_token != null;
const loop_tag: Zir.Inst.Tag = if (is_inline) .block_inline else .loop;
- const loop_block = try parent_gz.addBlock(loop_tag, node);
+ const loop_block = try parent_gz.makeBlockInst(loop_tag, node);
try parent_gz.instructions.append(astgen.gpa, loop_block);
var loop_scope = parent_gz.makeSubBlock(scope);
loop_scope.setBreakResultLoc(rl);
- defer loop_scope.instructions.deinit(astgen.gpa);
+ defer loop_scope.unstack();
defer loop_scope.labeled_breaks.deinit(astgen.gpa);
defer loop_scope.labeled_store_to_block_ptr_list.deinit(astgen.gpa);
var continue_scope = parent_gz.makeSubBlock(&loop_scope.base);
- defer continue_scope.instructions.deinit(astgen.gpa);
+ defer continue_scope.unstack();
const payload_is_ref = if (while_full.payload_token) |payload_token|
token_tags[payload_token] == .asterisk
@@ -5105,15 +5163,19 @@ fn whileExpr(
const condbr_tag: Zir.Inst.Tag = if (is_inline) .condbr_inline else .condbr;
const condbr = try continue_scope.addCondBr(condbr_tag, node);
const block_tag: Zir.Inst.Tag = if (is_inline) .block_inline else .block;
- const cond_block = try loop_scope.addBlock(block_tag, node);
- try loop_scope.instructions.append(astgen.gpa, cond_block);
+ const cond_block = try loop_scope.makeBlockInst(block_tag, node);
try continue_scope.setBlockBody(cond_block);
+ // continue_scope unstacked now, can add new instructions to loop_scope
+ try loop_scope.instructions.append(astgen.gpa, cond_block);
+ // make scope now but don't stack on parent_gz until loop_scope
+ // gets unstacked after cont_expr is emitted and added below
var then_scope = parent_gz.makeSubBlock(&continue_scope.base);
- defer then_scope.instructions.deinit(astgen.gpa);
+ then_scope.instructions_top = GenZir.unstacked_top;
+ defer then_scope.unstack();
+ var payload_inst: Zir.Inst.Index = 0;
var payload_val_scope: Scope.LocalVal = undefined;
-
const then_sub_scope = s: {
if (while_full.error_token != null) {
if (while_full.payload_token) |payload_token| {
@@ -5121,7 +5183,8 @@ fn whileExpr(
.err_union_payload_unsafe_ptr
else
.err_union_payload_unsafe;
- const payload_inst = try then_scope.addUnNode(tag, cond.inst, node);
+ // will add this instruction to then_scope.instructions below
+ payload_inst = try then_scope.makeUnNode(tag, cond.inst, node);
const ident_token = if (payload_is_ref) payload_token + 1 else payload_token;
const ident_bytes = tree.tokenSlice(ident_token);
if (mem.eql(u8, "_", ident_bytes))
@@ -5133,7 +5196,7 @@ fn whileExpr(
.parent = &then_scope.base,
.gen_zir = &then_scope,
.name = ident_name,
- .inst = payload_inst,
+ .inst = indexToRef(payload_inst),
.token_src = payload_token,
.id_cat = .@"capture",
};
@@ -5147,7 +5210,8 @@ fn whileExpr(
.optional_payload_unsafe_ptr
else
.optional_payload_unsafe;
- const payload_inst = try then_scope.addUnNode(tag, cond.inst, node);
+ // will add this instruction to then_scope.instructions below
+ payload_inst = try then_scope.makeUnNode(tag, cond.inst, node);
const ident_name = try astgen.identAsString(ident_token);
const ident_bytes = tree.tokenSlice(ident_token);
if (mem.eql(u8, "_", ident_bytes))
@@ -5157,7 +5221,7 @@ fn whileExpr(
.parent = &then_scope.base,
.gen_zir = &then_scope,
.name = ident_name,
- .inst = payload_inst,
+ .inst = indexToRef(payload_inst),
.token_src = ident_token,
.id_cat = .@"capture",
};
@@ -5187,6 +5251,9 @@ fn whileExpr(
});
}
+ // done adding instructions to loop_scope, can now stack then_scope
+ then_scope.instructions_top = then_scope.instructions.items.len;
+ if (payload_inst != 0) try then_scope.instructions.append(astgen.gpa, payload_inst);
const then_result = try expr(&then_scope, then_sub_scope, loop_scope.break_result_loc, while_full.ast.then_expr);
if (!then_scope.endsWithNoReturn()) {
loop_scope.break_count += 1;
@@ -5194,7 +5261,7 @@ fn whileExpr(
try checkUsed(parent_gz, &then_scope.base, then_sub_scope);
var else_scope = parent_gz.makeSubBlock(&continue_scope.base);
- defer else_scope.instructions.deinit(astgen.gpa);
+ defer else_scope.unstack();
const else_node = while_full.ast.else_expr;
const else_info: struct {
@@ -5207,7 +5274,7 @@ fn whileExpr(
.err_union_code_ptr
else
.err_union_code;
- const payload_inst = try else_scope.addUnNode(tag, cond.inst, node);
+ const else_payload_inst = try else_scope.addUnNode(tag, cond.inst, node);
const ident_name = try astgen.identAsString(error_token);
const ident_bytes = tree.tokenSlice(error_token);
if (mem.eql(u8, ident_bytes, "_"))
@@ -5217,7 +5284,7 @@ fn whileExpr(
.parent = &else_scope.base,
.gen_zir = &else_scope,
.name = ident_name,
- .inst = payload_inst,
+ .inst = else_payload_inst,
.token_src = error_token,
.id_cat = .@"capture",
};
@@ -5299,17 +5366,17 @@ fn forExpr(
};
const loop_tag: Zir.Inst.Tag = if (is_inline) .block_inline else .loop;
- const loop_block = try parent_gz.addBlock(loop_tag, node);
+ const loop_block = try parent_gz.makeBlockInst(loop_tag, node);
try parent_gz.instructions.append(astgen.gpa, loop_block);
var loop_scope = parent_gz.makeSubBlock(scope);
loop_scope.setBreakResultLoc(rl);
- defer loop_scope.instructions.deinit(astgen.gpa);
+ defer loop_scope.unstack();
defer loop_scope.labeled_breaks.deinit(astgen.gpa);
defer loop_scope.labeled_store_to_block_ptr_list.deinit(astgen.gpa);
var cond_scope = parent_gz.makeSubBlock(&loop_scope.base);
- defer cond_scope.instructions.deinit(astgen.gpa);
+ defer cond_scope.unstack();
// check condition i < array_expr.len
const index = try cond_scope.addUnNode(.load, index_ptr, for_full.ast.cond_expr);
@@ -5321,9 +5388,10 @@ fn forExpr(
const condbr_tag: Zir.Inst.Tag = if (is_inline) .condbr_inline else .condbr;
const condbr = try cond_scope.addCondBr(condbr_tag, node);
const block_tag: Zir.Inst.Tag = if (is_inline) .block_inline else .block;
- const cond_block = try loop_scope.addBlock(block_tag, node);
- try loop_scope.instructions.append(astgen.gpa, cond_block);
+ const cond_block = try loop_scope.makeBlockInst(block_tag, node);
try cond_scope.setBlockBody(cond_block);
+ // cond_block unstacked now, can add new instructions to loop_scope
+ try loop_scope.instructions.append(astgen.gpa, cond_block);
// Increment the index variable.
const index_2 = try loop_scope.addUnNode(.load, index_ptr, for_full.ast.cond_expr);
@@ -5346,7 +5414,7 @@ fn forExpr(
}
var then_scope = parent_gz.makeSubBlock(&cond_scope.base);
- defer then_scope.instructions.deinit(astgen.gpa);
+ defer then_scope.unstack();
var payload_val_scope: Scope.LocalVal = undefined;
var index_scope: Scope.LocalPtr = undefined;
@@ -5408,7 +5476,7 @@ fn forExpr(
try checkUsed(parent_gz, &then_scope.base, then_sub_scope);
var else_scope = parent_gz.makeSubBlock(&cond_scope.base);
- defer else_scope.instructions.deinit(astgen.gpa);
+ defer else_scope.unstack();
const else_node = for_full.ast.else_expr;
const else_info: struct {
@@ -5600,15 +5668,16 @@ fn switchExpr(
defer astgen.scratch.items.len = scratch_top;
var block_scope = parent_gz.makeSubBlock(scope);
+ // block_scope not used for collecting instructions
+ block_scope.instructions_top = GenZir.unstacked_top;
block_scope.setBreakResultLoc(rl);
- defer block_scope.instructions.deinit(gpa);
// This gets added to the parent block later, after the item expressions.
- const switch_block = try parent_gz.addBlock(.switch_block, switch_node);
+ const switch_block = try parent_gz.makeBlockInst(.switch_block, switch_node);
// We re-use this same scope for all cases, including the special prong, if any.
var case_scope = parent_gz.makeSubBlock(&block_scope.base);
- defer case_scope.instructions.deinit(gpa);
+ case_scope.instructions_top = GenZir.unstacked_top;
// In this pass we generate all the item and prong expressions.
var multi_case_index: u32 = 0;
@@ -5620,12 +5689,10 @@ fn switchExpr(
else => unreachable,
};
- // Reset the scope.
- case_scope.instructions.shrinkRetainingCapacity(0);
-
const is_multi_case = case.ast.values.len > 1 or
(case.ast.values.len == 1 and node_tags[case.ast.values[0]] == .switch_range);
+ var capture_inst: Zir.Inst.Index = 0;
var capture_val_scope: Scope.LocalVal = undefined;
const sub_scope = blk: {
const payload_token = case.payload_token orelse break :blk &case_scope.base;
@@ -5640,19 +5707,20 @@ fn switchExpr(
}
break :blk &case_scope.base;
}
- const capture = if (case_node == special_node) capture: {
+ if (case_node == special_node) {
const capture_tag: Zir.Inst.Tag = if (is_ptr)
.switch_capture_else_ref
else
.switch_capture_else;
- break :capture try case_scope.add(.{
+ capture_inst = @intCast(Zir.Inst.Index, astgen.instructions.len);
+ try astgen.instructions.append(gpa, .{
.tag = capture_tag,
.data = .{ .switch_capture = .{
.switch_inst = switch_block,
.prong_index = undefined,
} },
});
- } else capture: {
+ } else {
const is_multi_case_bits: u2 = @boolToInt(is_multi_case);
const is_ptr_bits: u2 = @boolToInt(is_ptr);
const capture_tag: Zir.Inst.Tag = switch ((is_multi_case_bits << 1) | is_ptr_bits) {
@@ -5662,20 +5730,21 @@ fn switchExpr(
0b11 => .switch_capture_multi_ref,
};
const capture_index = if (is_multi_case) multi_case_index else scalar_case_index;
- break :capture try case_scope.add(.{
+ capture_inst = @intCast(Zir.Inst.Index, astgen.instructions.len);
+ try astgen.instructions.append(gpa, .{
.tag = capture_tag,
.data = .{ .switch_capture = .{
.switch_inst = switch_block,
.prong_index = capture_index,
} },
});
- };
+ }
const capture_name = try astgen.identAsString(ident);
capture_val_scope = .{
.parent = &case_scope.base,
.gen_zir = &case_scope,
.name = capture_name,
- .inst = capture,
+ .inst = indexToRef(capture_inst),
.token_src = payload_token,
.id_cat = .@"capture",
};
@@ -5728,14 +5797,23 @@ fn switchExpr(
break :blk header_index + 1;
};
- const case_result = try expr(&case_scope, sub_scope, block_scope.break_result_loc, case.ast.target_expr);
- try checkUsed(parent_gz, &case_scope.base, sub_scope);
- if (!parent_gz.refIsNoReturn(case_result)) {
- block_scope.break_count += 1;
- _ = try case_scope.addBreak(.@"break", switch_block, case_result);
+ {
+ // temporarily stack case_scope on parent_gz
+ case_scope.instructions_top = parent_gz.instructions.items.len;
+ defer case_scope.unstack();
+
+ if (capture_inst != 0) try case_scope.instructions.append(gpa, capture_inst);
+ const case_result = try expr(&case_scope, sub_scope, block_scope.break_result_loc, case.ast.target_expr);
+ try checkUsed(parent_gz, &case_scope.base, sub_scope);
+ if (!parent_gz.refIsNoReturn(case_result)) {
+ block_scope.break_count += 1;
+ _ = try case_scope.addBreak(.@"break", switch_block, case_result);
+ }
+
+ const case_slice = case_scope.instructionsSlice();
+ payloads.items[body_len_index] = @intCast(u32, case_slice.len);
+ try payloads.appendSlice(gpa, case_slice);
}
- payloads.items[body_len_index] = @intCast(u32, case_scope.instructions.items.len);
- try payloads.appendSlice(gpa, case_scope.instructions.items);
}
// Now that the item expressions are generated we can add this.
try parent_gz.instructions.append(gpa, switch_block);
@@ -5917,13 +5995,13 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
const condbr = try gz.addCondBr(.condbr, node);
var then_scope = gz.makeSubBlock(scope);
- defer then_scope.instructions.deinit(astgen.gpa);
+ defer then_scope.unstack();
try genDefers(&then_scope, defer_outer, scope, .normal_only);
try then_scope.addRet(rl, operand, node);
var else_scope = gz.makeSubBlock(scope);
- defer else_scope.instructions.deinit(astgen.gpa);
+ defer else_scope.unstack();
const which_ones: DefersToEmit = if (!defer_counts.need_err_code) .both_sans_err else .{
.both = try else_scope.addUnNode(.err_union_code, result, node),
@@ -5931,7 +6009,7 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
try genDefers(&else_scope, defer_outer, scope, which_ones);
try else_scope.addRet(rl, operand, node);
- try setCondBrPayload(condbr, is_non_err, &then_scope, &else_scope);
+ try setCondBrPayload(condbr, is_non_err, &then_scope, 0, &else_scope, 0);
return Zir.Inst.Ref.unreachable_value;
},
@@ -6561,10 +6639,8 @@ fn asRlPtr(
operand_node: Ast.Node.Index,
dest_type: Zir.Inst.Ref,
) InnerError!Zir.Inst.Ref {
- const astgen = parent_gz.astgen;
-
var as_scope = try parent_gz.makeCoercionScope(scope, dest_type, result_ptr);
- defer as_scope.instructions.deinit(astgen.gpa);
+ defer as_scope.unstack();
const result = try reachableExpr(&as_scope, &as_scope.base, .{ .block_ptr = &as_scope }, operand_node, src_node);
return as_scope.finishCoercion(parent_gz, rl, operand_node, result, dest_type);
@@ -7336,14 +7412,15 @@ fn cImport(
var block_scope = gz.makeSubBlock(scope);
block_scope.force_comptime = true;
block_scope.c_import = true;
- defer block_scope.instructions.deinit(gpa);
+ defer block_scope.unstack();
- const block_inst = try gz.addBlock(.c_import, node);
+ const block_inst = try gz.makeBlockInst(.c_import, node);
const block_result = try expr(&block_scope, &block_scope.base, .none, body_node);
if (!gz.refIsNoReturn(block_result)) {
_ = try block_scope.addBreak(.break_inline, block_inst, .void_value);
}
try block_scope.setBlockBody(block_inst);
+ // block_scope unstacked now, can add new instructions to gz
try gz.instructions.append(gpa, block_inst);
return indexToRef(block_inst);
@@ -8167,6 +8244,7 @@ fn nodeImpliesRuntimeBits(tree: *const Ast, start_node: Ast.Node.Index) bool {
/// result locations must call this function on their result.
/// As an example, if the `ResultLoc` is `ptr`, it will write the result to the pointer.
/// If the `ResultLoc` is `ty`, it will coerce the result to the type.
+/// Assumes nothing stacked on `gz`.
fn rvalue(
gz: *GenZir,
rl: ResultLoc,
@@ -8779,9 +8857,12 @@ const GenZir = struct {
parent: *Scope,
/// All `GenZir` scopes for the same ZIR share this.
astgen: *AstGen,
- /// Keeps track of the list of instructions in this scope only. Indexes
- /// to instructions in `astgen`.
- instructions: ArrayListUnmanaged(Zir.Inst.Index) = .{},
+ /// Keeps track of the list of instructions in this scope. Possibly shared.
+ /// Indexes to instructions in `astgen`.
+ instructions: *ArrayListUnmanaged(Zir.Inst.Index),
+ /// A sub-block may share its instructions ArrayList with containing GenZir,
+ /// if use is strictly nested. This saves prior size of list for unstacking.
+ instructions_top: usize,
label: ?Label = null,
break_block: Zir.Inst.Index = 0,
continue_block: Zir.Inst.Index = 0,
@@ -8817,6 +8898,36 @@ const GenZir = struct {
/// Keys are the raw instruction index, values are the closure_capture instruction.
captures: std.AutoHashMapUnmanaged(Zir.Inst.Index, Zir.Inst.Index) = .{},
+ const unstacked_top = std.math.maxInt(usize);
+ /// Call unstack before adding any new instructions to containing GenZir.
+ fn unstack(self: *GenZir) void {
+ if (self.instructions_top != unstacked_top) {
+ self.instructions.items.len = self.instructions_top;
+ self.instructions_top = unstacked_top;
+ }
+ }
+
+ fn isEmpty(self: *const GenZir) bool {
+ return (self.instructions_top == unstacked_top) or
+ (self.instructions.items.len == self.instructions_top);
+ }
+
+ fn instructionsSlice(self: *const GenZir) []Zir.Inst.Index {
+ return if (self.instructions_top == unstacked_top)
+ &[0]Zir.Inst.Index{}
+ else
+ self.instructions.items[self.instructions_top..];
+ }
+
+ fn instructionsSliceUpto(self: *const GenZir, stacked_gz: *GenZir) []Zir.Inst.Index {
+ return if (self.instructions_top == unstacked_top)
+ &[0]Zir.Inst.Index{}
+ else if (self.instructions == stacked_gz.instructions and stacked_gz.instructions_top != unstacked_top)
+ self.instructions.items[self.instructions_top..stacked_gz.instructions_top]
+ else
+ self.instructions.items[self.instructions_top..];
+ }
+
fn makeSubBlock(gz: *GenZir, scope: *Scope) GenZir {
return .{
.force_comptime = gz.force_comptime,
@@ -8828,6 +8939,8 @@ const GenZir = struct {
.astgen = gz.astgen,
.suspend_node = gz.suspend_node,
.nosuspend_node = gz.nosuspend_node,
+ .instructions = gz.instructions,
+ .instructions_top = gz.instructions.items.len,
};
}
@@ -8841,12 +8954,13 @@ const GenZir = struct {
// result location. If it does, elide the coerce_result_ptr instruction
// as well as the store instruction, instead passing the result as an rvalue.
var as_scope = parent_gz.makeSubBlock(scope);
- errdefer as_scope.instructions.deinit(parent_gz.astgen.gpa);
+ errdefer as_scope.unstack();
as_scope.rl_ptr = try as_scope.addBin(.coerce_result_ptr, dest_type, result_ptr);
return as_scope;
}
+ /// Assumes `as_scope` is stacked immediately on top of `parent_gz`. Unstacks `as_scope`.
fn finishCoercion(
as_scope: *GenZir,
parent_gz: *GenZir,
@@ -8854,25 +8968,32 @@ const GenZir = struct {
src_node: Ast.Node.Index,
result: Zir.Inst.Ref,
dest_type: Zir.Inst.Ref,
- ) !Zir.Inst.Ref {
+ ) InnerError!Zir.Inst.Ref {
+ assert(as_scope.instructions == parent_gz.instructions);
const astgen = as_scope.astgen;
- const parent_zir = &parent_gz.instructions;
if (as_scope.rvalue_rl_count == 1) {
// Busted! This expression didn't actually need a pointer.
const zir_tags = astgen.instructions.items(.tag);
const zir_datas = astgen.instructions.items(.data);
- try parent_zir.ensureUnusedCapacity(astgen.gpa, as_scope.instructions.items.len);
- for (as_scope.instructions.items) |src_inst| {
+ var src: usize = as_scope.instructions_top;
+ var dst: usize = src;
+ while (src < as_scope.instructions.items.len) : (src += 1) {
+ const src_inst = as_scope.instructions.items[src];
if (indexToRef(src_inst) == as_scope.rl_ptr) continue;
if (zir_tags[src_inst] == .store_to_block_ptr) {
if (zir_datas[src_inst].bin.lhs == as_scope.rl_ptr) continue;
}
- parent_zir.appendAssumeCapacity(src_inst);
+ as_scope.instructions.items[dst] = src_inst;
+ dst += 1;
}
+ parent_gz.instructions.items.len -= src - dst;
+ as_scope.instructions_top = GenZir.unstacked_top;
+ // as_scope now unstacked, can add new instructions to parent_gz
const casted_result = try parent_gz.addBin(.as, dest_type, result);
return rvalue(parent_gz, rl, casted_result, src_node);
} else {
- try parent_zir.appendSlice(astgen.gpa, as_scope.instructions.items);
+ // implicitly move all as_scope instructions to parent_gz
+ as_scope.instructions_top = GenZir.unstacked_top;
return result;
}
}
@@ -8883,9 +9004,10 @@ const GenZir = struct {
used: bool = false,
};
+ /// Assumes nothing stacked on `gz`.
fn endsWithNoReturn(gz: GenZir) bool {
+ if (gz.isEmpty()) return false;
const tags = gz.astgen.instructions.items(.tag);
- if (gz.instructions.items.len == 0) return false;
const last_inst = gz.instructions.items[gz.instructions.items.len - 1];
return tags[last_inst].isNoReturn();
}
@@ -8955,41 +9077,46 @@ const GenZir = struct {
}
}
- fn setBoolBrBody(gz: GenZir, inst: Zir.Inst.Index) !void {
+ /// Assumes nothing stacked on `gz`. Unstacks `gz`.
+ fn setBoolBrBody(gz: *GenZir, inst: Zir.Inst.Index) !void {
const gpa = gz.astgen.gpa;
- try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Block).Struct.fields.len +
- gz.instructions.items.len);
+ const body = gz.instructionsSlice();
+ try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Block).Struct.fields.len + body.len);
const zir_datas = gz.astgen.instructions.items(.data);
zir_datas[inst].bool_br.payload_index = gz.astgen.addExtraAssumeCapacity(
- Zir.Inst.Block{ .body_len = @intCast(u32, gz.instructions.items.len) },
+ Zir.Inst.Block{ .body_len = @intCast(u32, body.len) },
);
- gz.astgen.extra.appendSliceAssumeCapacity(gz.instructions.items);
+ gz.astgen.extra.appendSliceAssumeCapacity(body);
+ gz.unstack();
}
- fn setBlockBody(gz: GenZir, inst: Zir.Inst.Index) !void {
+ /// Assumes nothing stacked on `gz`. Unstacks `gz`.
+ fn setBlockBody(gz: *GenZir, inst: Zir.Inst.Index) !void {
const gpa = gz.astgen.gpa;
- try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Block).Struct.fields.len +
- gz.instructions.items.len);
+ const body = gz.instructionsSlice();
+ try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Block).Struct.fields.len + body.len);
const zir_datas = gz.astgen.instructions.items(.data);
zir_datas[inst].pl_node.payload_index = gz.astgen.addExtraAssumeCapacity(
- Zir.Inst.Block{ .body_len = @intCast(u32, gz.instructions.items.len) },
+ Zir.Inst.Block{ .body_len = @intCast(u32, body.len) },
);
- gz.astgen.extra.appendSliceAssumeCapacity(gz.instructions.items);
+ gz.astgen.extra.appendSliceAssumeCapacity(body);
+ gz.unstack();
}
/// Same as `setBlockBody` except we don't copy instructions which are
/// `store_to_block_ptr` instructions with lhs set to .none.
- fn setBlockBodyEliding(gz: GenZir, inst: Zir.Inst.Index) !void {
+ /// Assumes nothing stacked on `gz`. Unstacks `gz`.
+ fn setBlockBodyEliding(gz: *GenZir, inst: Zir.Inst.Index) !void {
const gpa = gz.astgen.gpa;
- try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Block).Struct.fields.len +
- gz.instructions.items.len);
+ const body = gz.instructionsSlice();
+ try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Block).Struct.fields.len + body.len);
const zir_datas = gz.astgen.instructions.items(.data);
const zir_tags = gz.astgen.instructions.items(.tag);
const block_pl_index = gz.astgen.addExtraAssumeCapacity(Zir.Inst.Block{
- .body_len = @intCast(u32, gz.instructions.items.len),
+ .body_len = @intCast(u32, body.len),
});
zir_datas[inst].pl_node.payload_index = block_pl_index;
- for (gz.instructions.items) |sub_inst| {
+ for (body) |sub_inst| {
if (zir_tags[sub_inst] == .store_to_block_ptr and
zir_datas[sub_inst].bin.lhs == .none)
{
@@ -8999,15 +9126,17 @@ const GenZir = struct {
}
gz.astgen.extra.appendAssumeCapacity(sub_inst);
}
+ gz.unstack();
}
+ /// Supports `body_gz` stacked on `ret_gz` stacked on `gz`. Unstacks `body_gz` and `ret_gz`.
fn addFunc(gz: *GenZir, args: struct {
src_node: Ast.Node.Index,
lbrace_line: u32 = 0,
lbrace_column: u32 = 0,
- body: []const Zir.Inst.Index,
+ body_gz: ?*GenZir,
param_block: Zir.Inst.Index,
- ret_ty: []const Zir.Inst.Index,
+ ret_gz: ?*GenZir,
ret_br: Zir.Inst.Index,
cc: Zir.Inst.Ref,
align_inst: Zir.Inst.Ref,
@@ -9021,12 +9150,13 @@ const GenZir = struct {
const astgen = gz.astgen;
const gpa = astgen.gpa;
- try gz.instructions.ensureUnusedCapacity(gpa, 1);
try astgen.instructions.ensureUnusedCapacity(gpa, 1);
+ var body: []Zir.Inst.Index = &[0]Zir.Inst.Index{};
+ var ret_ty: []Zir.Inst.Index = &[0]Zir.Inst.Index{};
var src_locs_buffer: [3]u32 = undefined;
var src_locs: []u32 = src_locs_buffer[0..0];
- if (args.body.len != 0) {
+ if (args.body_gz) |body_gz| {
const tree = astgen.tree;
const node_tags = tree.nodes.items(.tag);
const node_datas = tree.nodes.items(.data);
@@ -9044,6 +9174,13 @@ const GenZir = struct {
src_locs_buffer[1] = rbrace_line;
src_locs_buffer[2] = columns;
src_locs = &src_locs_buffer;
+
+ body = body_gz.instructionsSlice();
+ if (args.ret_gz) |ret_gz|
+ ret_ty = ret_gz.instructionsSliceUpto(body_gz);
+ } else {
+ if (args.ret_gz) |ret_gz|
+ ret_ty = ret_gz.instructionsSlice();
}
if (args.cc != .none or args.lib_name != 0 or
@@ -9053,7 +9190,7 @@ const GenZir = struct {
try astgen.extra.ensureUnusedCapacity(
gpa,
@typeInfo(Zir.Inst.ExtendedFunc).Struct.fields.len +
- args.ret_ty.len + args.body.len + src_locs.len +
+ ret_ty.len + body.len + src_locs.len +
@boolToInt(args.lib_name != 0) +
@boolToInt(args.align_inst != .none) +
@boolToInt(args.cc != .none),
@@ -9061,8 +9198,8 @@ const GenZir = struct {
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.ExtendedFunc{
.src_node = gz.nodeIndexToRelative(args.src_node),
.param_block = args.param_block,
- .ret_body_len = @intCast(u32, args.ret_ty.len),
- .body_len = @intCast(u32, args.body.len),
+ .ret_body_len = @intCast(u32, ret_ty.len),
+ .body_len = @intCast(u32, body.len),
});
if (args.lib_name != 0) {
astgen.extra.appendAssumeCapacity(args.lib_name);
@@ -9073,9 +9210,13 @@ const GenZir = struct {
if (args.align_inst != .none) {
astgen.extra.appendAssumeCapacity(@enumToInt(args.align_inst));
}
- astgen.extra.appendSliceAssumeCapacity(args.ret_ty);
- astgen.extra.appendSliceAssumeCapacity(args.body);
+ astgen.extra.appendSliceAssumeCapacity(ret_ty);
+ astgen.extra.appendSliceAssumeCapacity(body);
astgen.extra.appendSliceAssumeCapacity(src_locs);
+ // order is important when unstacking
+ if (args.body_gz) |body_gz| body_gz.unstack();
+ if (args.ret_gz) |ret_gz| ret_gz.unstack();
+ try gz.instructions.ensureUnusedCapacity(gpa, 1);
const new_index = @intCast(Zir.Inst.Index, astgen.instructions.len);
if (args.ret_br != 0) {
@@ -9103,17 +9244,21 @@ const GenZir = struct {
try astgen.extra.ensureUnusedCapacity(
gpa,
@typeInfo(Zir.Inst.Func).Struct.fields.len +
- args.ret_ty.len + args.body.len + src_locs.len,
+ ret_ty.len + body.len + src_locs.len,
);
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.Func{
.param_block = args.param_block,
- .ret_body_len = @intCast(u32, args.ret_ty.len),
- .body_len = @intCast(u32, args.body.len),
+ .ret_body_len = @intCast(u32, ret_ty.len),
+ .body_len = @intCast(u32, body.len),
});
- astgen.extra.appendSliceAssumeCapacity(args.ret_ty);
- astgen.extra.appendSliceAssumeCapacity(args.body);
+ astgen.extra.appendSliceAssumeCapacity(ret_ty);
+ astgen.extra.appendSliceAssumeCapacity(body);
astgen.extra.appendSliceAssumeCapacity(src_locs);
+ // order is important when unstacking
+ if (args.body_gz) |body_gz| body_gz.unstack();
+ if (args.ret_gz) |ret_gz| ret_gz.unstack();
+ try gz.instructions.ensureUnusedCapacity(gpa, 1);
const tag: Zir.Inst.Tag = if (args.is_inferred_error) .func_inferred else .func;
const new_index = @intCast(Zir.Inst.Index, astgen.instructions.len);
@@ -9260,6 +9405,25 @@ const GenZir = struct {
});
}
+ fn makeUnNode(
+ gz: *GenZir,
+ tag: Zir.Inst.Tag,
+ operand: Zir.Inst.Ref,
+ /// Absolute node index. This function does the conversion to offset from Decl.
+ src_node: Ast.Node.Index,
+ ) !Zir.Inst.Index {
+ assert(operand != .none);
+ const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
+ try gz.astgen.instructions.append(gz.astgen.gpa, .{
+ .tag = tag,
+ .data = .{ .un_node = .{
+ .operand = operand,
+ .src_node = gz.nodeIndexToRelative(src_node),
+ } },
+ });
+ return new_index;
+ }
+
fn addPlNode(
gz: *GenZir,
tag: Zir.Inst.Tag,
@@ -9300,25 +9464,27 @@ const GenZir = struct {
});
}
+ /// Supports `param_gz` stacked on `gz`. Assumes nothing stacked on `param_gz`. Unstacks `param_gz`.
fn addParam(
gz: *GenZir,
+ param_gz: *GenZir,
tag: Zir.Inst.Tag,
/// Absolute token index. This function does the conversion to Decl offset.
abs_tok_index: Ast.TokenIndex,
name: u32,
- body: []const u32,
) !Zir.Inst.Index {
const gpa = gz.astgen.gpa;
- try gz.instructions.ensureUnusedCapacity(gpa, 1);
+ const param_body = param_gz.instructionsSlice();
try gz.astgen.instructions.ensureUnusedCapacity(gpa, 1);
try gz.astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.Param).Struct.fields.len +
- body.len);
+ param_body.len);
const payload_index = gz.astgen.addExtraAssumeCapacity(Zir.Inst.Param{
.name = name,
- .body_len = @intCast(u32, body.len),
+ .body_len = @intCast(u32, param_body.len),
});
- gz.astgen.extra.appendSliceAssumeCapacity(body);
+ gz.astgen.extra.appendSliceAssumeCapacity(param_body);
+ param_gz.unstack();
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
gz.astgen.instructions.appendAssumeCapacity(.{
@@ -9461,6 +9627,23 @@ const GenZir = struct {
});
}
+ fn makeBreak(
+ gz: *GenZir,
+ tag: Zir.Inst.Tag,
+ break_block: Zir.Inst.Index,
+ operand: Zir.Inst.Ref,
+ ) !Zir.Inst.Index {
+ const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
+ try gz.astgen.instructions.append(gz.astgen.gpa, .{
+ .tag = tag,
+ .data = .{ .@"break" = .{
+ .block_inst = break_block,
+ .operand = operand,
+ } },
+ });
+ return new_index;
+ }
+
fn addBin(
gz: *GenZir,
tag: Zir.Inst.Tag,
@@ -9649,7 +9832,7 @@ const GenZir = struct {
/// Note that this returns a `Zir.Inst.Index` not a ref.
/// Does *not* append the block instruction to the scope.
/// Leaves the `payload_index` field undefined.
- fn addBlock(gz: *GenZir, tag: Zir.Inst.Tag, node: Ast.Node.Index) !Zir.Inst.Index {
+ fn makeBlockInst(gz: *GenZir, tag: Zir.Inst.Tag, node: Ast.Node.Index) !Zir.Inst.Index {
const new_index = @intCast(Zir.Inst.Index, gz.astgen.instructions.len);
const gpa = gz.astgen.gpa;
try gz.astgen.instructions.append(gpa, .{