Commit a6ca20b9a1
Changed files (7)
lib/std/zig/AstGen.zig
@@ -2205,7 +2205,7 @@ fn breakExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index) Inn
},
.local_val => scope = scope.cast(Scope.LocalVal).?.parent,
.local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent,
- .namespace, .enum_namespace => break,
+ .namespace => break,
.defer_normal, .defer_error => scope = scope.cast(Scope.Defer).?.parent,
.top => unreachable,
}
@@ -2279,7 +2279,7 @@ fn continueExpr(parent_gz: *GenZir, parent_scope: *Scope, node: Ast.Node.Index)
try parent_gz.addDefer(defer_scope.index, defer_scope.len);
},
.defer_error => scope = scope.cast(Scope.Defer).?.parent,
- .namespace, .enum_namespace => break,
+ .namespace => break,
.top => unreachable,
}
}
@@ -2412,7 +2412,7 @@ fn checkLabelRedefinition(astgen: *AstGen, parent_scope: *Scope, label: Ast.Toke
.local_val => scope = scope.cast(Scope.LocalVal).?.parent,
.local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent,
.defer_normal, .defer_error => scope = scope.cast(Scope.Defer).?.parent,
- .namespace, .enum_namespace => break,
+ .namespace => break,
.top => unreachable,
}
}
@@ -2790,7 +2790,6 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.@"resume",
.@"await",
.ret_err_value_code,
- .closure_get,
.ret_ptr,
.ret_type,
.for_len,
@@ -2860,7 +2859,6 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
.store_to_inferred_ptr,
.resolve_inferred_alloc,
.set_runtime_safety,
- .closure_capture,
.memcpy,
.memset,
.validate_deref,
@@ -2928,7 +2926,7 @@ fn countDefers(outer_scope: *Scope, inner_scope: *Scope) struct {
const have_err_payload = defer_scope.remapped_err_code != .none;
need_err_code = need_err_code or have_err_payload;
},
- .namespace, .enum_namespace => unreachable,
+ .namespace => unreachable,
.top => unreachable,
}
}
@@ -2998,7 +2996,7 @@ fn genDefers(
.normal_only => continue,
}
},
- .namespace, .enum_namespace => unreachable,
+ .namespace => unreachable,
.top => unreachable,
}
}
@@ -3042,7 +3040,7 @@ fn checkUsed(gz: *GenZir, outer_scope: *Scope, inner_scope: *Scope) InnerError!v
scope = s.parent;
},
.defer_normal, .defer_error => scope = scope.cast(Scope.Defer).?.parent,
- .namespace, .enum_namespace => unreachable,
+ .namespace => unreachable,
.top => unreachable,
}
}
@@ -4732,7 +4730,7 @@ fn testDecl(
},
.gen_zir => s = s.cast(GenZir).?.parent,
.defer_normal, .defer_error => s = s.cast(Scope.Defer).?.parent,
- .namespace, .enum_namespace => {
+ .namespace => {
const ns = s.cast(Scope.Namespace).?;
if (ns.decls.get(name_str_index)) |i| {
if (found_already) |f| {
@@ -4849,10 +4847,10 @@ fn structDeclInner(
try gz.setStruct(decl_inst, .{
.src_node = node,
.layout = layout,
+ .captures_len = 0,
.fields_len = 0,
.decls_len = 0,
- .backing_int_ref = .none,
- .backing_int_body_len = 0,
+ .has_backing_int = false,
.known_non_opv = false,
.known_comptime_only = false,
.is_tuple = false,
@@ -5142,10 +5140,10 @@ fn structDeclInner(
try gz.setStruct(decl_inst, .{
.src_node = node,
.layout = layout,
+ .captures_len = @intCast(namespace.captures.count()),
.fields_len = field_count,
.decls_len = decl_count,
- .backing_int_ref = backing_int_ref,
- .backing_int_body_len = @intCast(backing_int_body_len),
+ .has_backing_int = backing_int_ref != .none,
.known_non_opv = known_non_opv,
.known_comptime_only = known_comptime_only,
.is_tuple = is_tuple,
@@ -5159,15 +5157,22 @@ fn structDeclInner(
const decls_slice = wip_members.declsSlice();
const fields_slice = wip_members.fieldsSlice();
const bodies_slice = astgen.scratch.items[bodies_start..];
- try astgen.extra.ensureUnusedCapacity(gpa, backing_int_body_len +
- decls_slice.len + fields_slice.len + bodies_slice.len);
- astgen.extra.appendSliceAssumeCapacity(astgen.scratch.items[scratch_top..][0..backing_int_body_len]);
+ try astgen.extra.ensureUnusedCapacity(gpa, backing_int_body_len + 2 +
+ decls_slice.len + namespace.captures.count() + fields_slice.len + bodies_slice.len);
+ astgen.extra.appendSliceAssumeCapacity(@ptrCast(namespace.captures.keys()));
+ if (backing_int_ref != .none) {
+ astgen.extra.appendAssumeCapacity(@intCast(backing_int_body_len));
+ if (backing_int_body_len == 0) {
+ astgen.extra.appendAssumeCapacity(@intFromEnum(backing_int_ref));
+ } else {
+ astgen.extra.appendSliceAssumeCapacity(astgen.scratch.items[scratch_top..][0..backing_int_body_len]);
+ }
+ }
astgen.extra.appendSliceAssumeCapacity(decls_slice);
astgen.extra.appendSliceAssumeCapacity(fields_slice);
astgen.extra.appendSliceAssumeCapacity(bodies_slice);
block_scope.unstack();
- try gz.addNamespaceCaptures(&namespace);
return decl_inst.toRef();
}
@@ -5368,6 +5373,7 @@ fn unionDeclInner(
.src_node = node,
.layout = layout,
.tag_type = arg_inst,
+ .captures_len = @intCast(namespace.captures.count()),
.body_len = body_len,
.fields_len = field_count,
.decls_len = decl_count,
@@ -5379,13 +5385,13 @@ fn unionDeclInner(
wip_members.finishBits(bits_per_field);
const decls_slice = wip_members.declsSlice();
const fields_slice = wip_members.fieldsSlice();
- try astgen.extra.ensureUnusedCapacity(gpa, decls_slice.len + body_len + fields_slice.len);
+ try astgen.extra.ensureUnusedCapacity(gpa, namespace.captures.count() + decls_slice.len + body_len + fields_slice.len);
+ astgen.extra.appendSliceAssumeCapacity(@ptrCast(namespace.captures.keys()));
astgen.extra.appendSliceAssumeCapacity(decls_slice);
astgen.appendBodyWithFixups(body);
astgen.extra.appendSliceAssumeCapacity(fields_slice);
block_scope.unstack();
- try gz.addNamespaceCaptures(&namespace);
return decl_inst.toRef();
}
@@ -5555,7 +5561,7 @@ fn containerDecl(
defer block_scope.unstack();
_ = try astgen.scanDecls(&namespace, container_decl.ast.members);
- namespace.base.tag = .enum_namespace;
+ namespace.base.tag = .namespace;
const arg_inst: Zir.Inst.Ref = if (container_decl.ast.arg != 0)
try comptimeExpr(&block_scope, &namespace.base, coerced_type_ri, container_decl.ast.arg)
@@ -5586,7 +5592,6 @@ fn containerDecl(
if (member_node == counts.nonexhaustive_node)
continue;
fields_hasher.update(tree.getNodeSource(member_node));
- namespace.base.tag = .namespace;
var member = switch (try containerMember(&block_scope, &namespace.base, &wip_members, member_node)) {
.decl => continue,
.field => |field| field,
@@ -5630,7 +5635,6 @@ fn containerDecl(
},
);
}
- namespace.base.tag = .enum_namespace;
const tag_value_inst = try expr(&block_scope, &namespace.base, .{ .rl = .{ .ty = arg_inst } }, member.ast.value_expr);
wip_members.appendToField(@intFromEnum(tag_value_inst));
}
@@ -5676,6 +5680,7 @@ fn containerDecl(
.src_node = node,
.nonexhaustive = nonexhaustive,
.tag_type = arg_inst,
+ .captures_len = @intCast(namespace.captures.count()),
.body_len = body_len,
.fields_len = @intCast(counts.total_fields),
.decls_len = @intCast(counts.decls),
@@ -5685,13 +5690,13 @@ fn containerDecl(
wip_members.finishBits(bits_per_field);
const decls_slice = wip_members.declsSlice();
const fields_slice = wip_members.fieldsSlice();
- try astgen.extra.ensureUnusedCapacity(gpa, decls_slice.len + body_len + fields_slice.len);
+ try astgen.extra.ensureUnusedCapacity(gpa, namespace.captures.count() + decls_slice.len + body_len + fields_slice.len);
+ astgen.extra.appendSliceAssumeCapacity(@ptrCast(namespace.captures.keys()));
astgen.extra.appendSliceAssumeCapacity(decls_slice);
astgen.appendBodyWithFixups(body);
astgen.extra.appendSliceAssumeCapacity(fields_slice);
block_scope.unstack();
- try gz.addNamespaceCaptures(&namespace);
return rvalue(gz, ri, decl_inst.toRef(), node);
},
.keyword_opaque => {
@@ -5733,16 +5738,17 @@ fn containerDecl(
try gz.setOpaque(decl_inst, .{
.src_node = node,
+ .captures_len = @intCast(namespace.captures.count()),
.decls_len = decl_count,
});
wip_members.finishBits(0);
const decls_slice = wip_members.declsSlice();
- try astgen.extra.ensureUnusedCapacity(gpa, decls_slice.len);
+ try astgen.extra.ensureUnusedCapacity(gpa, namespace.captures.count() + decls_slice.len);
+ astgen.extra.appendSliceAssumeCapacity(@ptrCast(namespace.captures.keys()));
astgen.extra.appendSliceAssumeCapacity(decls_slice);
block_scope.unstack();
- try gz.addNamespaceCaptures(&namespace);
return rvalue(gz, ri, decl_inst.toRef(), node);
},
else => unreachable,
@@ -8238,12 +8244,12 @@ fn localVarRef(
ident_token: Ast.TokenIndex,
) InnerError!Zir.Inst.Ref {
const astgen = gz.astgen;
- const gpa = astgen.gpa;
const name_str_index = try astgen.identAsString(ident_token);
var s = scope;
var found_already: ?Ast.Node.Index = null; // we have found a decl with the same name already
var num_namespaces_out: u32 = 0;
- var capturing_namespace: ?*Scope.Namespace = null;
+ // defined when `num_namespaces_out != 0`
+ var capturing_namespace: *Scope.Namespace = undefined;
while (true) switch (s.tag) {
.local_val => {
const local_val = s.cast(Scope.LocalVal).?;
@@ -8257,15 +8263,14 @@ fn localVarRef(
local_val.used = ident_token;
}
- const value_inst = try tunnelThroughClosure(
+ const value_inst = if (num_namespaces_out != 0) try tunnelThroughClosure(
gz,
ident,
num_namespaces_out,
capturing_namespace,
local_val.inst,
local_val.token_src,
- gpa,
- );
+ ) else local_val.inst;
return rvalueNoCoercePreRef(gz, ri, value_inst, ident);
}
@@ -8285,19 +8290,18 @@ fn localVarRef(
const ident_name = try astgen.identifierTokenString(ident_token);
return astgen.failNodeNotes(ident, "mutable '{s}' not accessible from here", .{ident_name}, &.{
try astgen.errNoteTok(local_ptr.token_src, "declared mutable here", .{}),
- try astgen.errNoteNode(capturing_namespace.?.node, "crosses namespace boundary here", .{}),
+ try astgen.errNoteNode(capturing_namespace.node, "crosses namespace boundary here", .{}),
});
}
- const ptr_inst = try tunnelThroughClosure(
+ const ptr_inst = if (num_namespaces_out != 0) try tunnelThroughClosure(
gz,
ident,
num_namespaces_out,
capturing_namespace,
local_ptr.ptr,
local_ptr.token_src,
- gpa,
- );
+ ) else local_ptr.ptr;
switch (ri.rl) {
.ref, .ref_coerced_ty => {
@@ -8314,7 +8318,7 @@ fn localVarRef(
},
.gen_zir => s = s.cast(GenZir).?.parent,
.defer_normal, .defer_error => s = s.cast(Scope.Defer).?.parent,
- .namespace, .enum_namespace => {
+ .namespace => {
const ns = s.cast(Scope.Namespace).?;
if (ns.decls.get(name_str_index)) |i| {
if (found_already) |f| {
@@ -8326,7 +8330,7 @@ fn localVarRef(
// We found a match but must continue looking for ambiguous references to decls.
found_already = i;
}
- if (s.tag == .namespace) num_namespaces_out += 1;
+ num_namespaces_out += 1;
capturing_namespace = ns;
s = ns.parent;
},
@@ -8348,41 +8352,70 @@ fn localVarRef(
}
}
-/// Adds a capture to a namespace, if needed.
-/// Returns the index of the closure_capture instruction.
+/// Access a ZIR instruction through closure. May tunnel through arbitrarily
+/// many namespaces, adding closure captures as required.
+/// Returns the index of the `closure_get` instruction added to `gz`.
fn tunnelThroughClosure(
gz: *GenZir,
+ /// The node which references the value to be captured.
inner_ref_node: Ast.Node.Index,
+ /// The number of namespaces being tunnelled through. At least 1.
num_tunnels: u32,
- ns: ?*Scope.Namespace,
+ /// The namespace being captured from.
+ ns: *Scope.Namespace,
+ /// The value being captured.
value: Zir.Inst.Ref,
+ /// The token of the value's declaration.
token: Ast.TokenIndex,
- gpa: Allocator,
) !Zir.Inst.Ref {
- // For trivial values, we don't need a tunnel.
- // Just return the ref.
- if (num_tunnels == 0 or value.toIndex() == null) {
+ const value_inst = value.toIndex() orelse {
+ // For trivial values, we don't need a tunnel; just return the ref.
return value;
+ };
+
+ const astgen = gz.astgen;
+ const gpa = astgen.gpa;
+
+ // Otherwise we need a tunnel. First, figure out the path of namespaces we
+ // are tunneling through. This is usually only going to be one or two, so
+ // use an SFBA to optimize for the common case.
+ var sfba = std.heap.stackFallback(@sizeOf(usize) * 2, astgen.arena);
+ var intermediate_tunnels = try sfba.get().alloc(*Scope.Namespace, num_tunnels - 1);
+
+ {
+ var i: usize = num_tunnels - 1;
+ var scope: *Scope = gz.parent;
+ while (i > 0) {
+ if (scope.cast(Scope.Namespace)) |mid_ns| {
+ i -= 1;
+ intermediate_tunnels[i] = mid_ns;
+ }
+ scope = scope.parent().?;
+ }
}
- // Otherwise we need a tunnel. Check if this namespace
- // already has one for this value.
- const gop = try ns.?.captures.getOrPut(gpa, value.toIndex().?);
- if (!gop.found_existing) {
- // Make a new capture for this value but don't add it to the declaring_gz yet
- try gz.astgen.instructions.append(gz.astgen.gpa, .{
- .tag = .closure_capture,
- .data = .{ .un_tok = .{
- .operand = value,
- .src_tok = ns.?.declaring_gz.?.tokenIndexToRelative(token),
- } },
+ // Now that we know the scopes we're tunneling through, begin adding
+ // captures as required, starting with the outermost namespace.
+ var cur_capture_index = std.math.cast(
+ u16,
+ (try ns.captures.getOrPut(gpa, Zir.Inst.Capture.wrap(.{ .inst = value_inst }))).index,
+ ) orelse return astgen.failNodeNotes(ns.node, "this compiler implementation only supports up to 65536 captures per namespace", .{}, &.{
+ try astgen.errNoteTok(token, "captured value here", .{}),
+ try astgen.errNoteNode(inner_ref_node, "value used here", .{}),
+ });
+
+ for (intermediate_tunnels) |tunnel_ns| {
+ cur_capture_index = std.math.cast(
+ u16,
+ (try tunnel_ns.captures.getOrPut(gpa, Zir.Inst.Capture.wrap(.{ .nested = cur_capture_index }))).index,
+ ) orelse return astgen.failNodeNotes(tunnel_ns.node, "this compiler implementation only supports up to 65536 captures per namespace", .{}, &.{
+ try astgen.errNoteTok(token, "captured value here", .{}),
+ try astgen.errNoteNode(inner_ref_node, "value used here", .{}),
});
- gop.value_ptr.* = @enumFromInt(gz.astgen.instructions.len - 1);
}
- // Add an instruction to get the value from the closure into
- // our current context
- return try gz.addInstNode(.closure_get, gop.value_ptr.*, inner_ref_node);
+ // Add an instruction to get the value from the closure.
+ return gz.addExtendedNodeSmall(.closure_get, inner_ref_node, cur_capture_index);
}
fn stringLiteral(
@@ -9095,7 +9128,7 @@ fn builtinCall(
},
.gen_zir => s = s.cast(GenZir).?.parent,
.defer_normal, .defer_error => s = s.cast(Scope.Defer).?.parent,
- .namespace, .enum_namespace => {
+ .namespace => {
const ns = s.cast(Scope.Namespace).?;
if (ns.decls.get(decl_name)) |i| {
if (found_already) |f| {
@@ -11605,7 +11638,7 @@ const Scope = struct {
}
if (T == Namespace) {
switch (base.tag) {
- .namespace, .enum_namespace => return @fieldParentPtr(T, "base", base),
+ .namespace => return @fieldParentPtr(T, "base", base),
else => return null,
}
}
@@ -11621,7 +11654,7 @@ const Scope = struct {
.local_val => base.cast(LocalVal).?.parent,
.local_ptr => base.cast(LocalPtr).?.parent,
.defer_normal, .defer_error => base.cast(Defer).?.parent,
- .namespace, .enum_namespace => base.cast(Namespace).?.parent,
+ .namespace => base.cast(Namespace).?.parent,
.top => null,
};
}
@@ -11633,7 +11666,6 @@ const Scope = struct {
defer_normal,
defer_error,
namespace,
- enum_namespace,
top,
};
@@ -11725,9 +11757,8 @@ const Scope = struct {
/// Only valid during astgen.
declaring_gz: ?*GenZir,
- /// Map from the raw captured value to the instruction
- /// ref of the capture for decls in this namespace
- captures: std.AutoArrayHashMapUnmanaged(Zir.Inst.Index, Zir.Inst.Index) = .{},
+ /// Set of captures used by this namespace.
+ captures: std.AutoArrayHashMapUnmanaged(Zir.Inst.Capture, void) = .{},
fn deinit(self: *Namespace, gpa: Allocator) void {
self.decls.deinit(gpa);
@@ -11787,12 +11818,6 @@ const GenZir = struct {
// Set if this GenZir is a defer or it is inside a defer.
any_defer_node: Ast.Node.Index = 0,
- /// Namespace members are lazy. When executing a decl within a namespace,
- /// any references to external instructions need to be treated specially.
- /// This list tracks those references. See also .closure_capture and .closure_get.
- /// Keys are the raw instruction index, values are the closure_capture instruction.
- captures: std.AutoHashMapUnmanaged(Zir.Inst.Index, Zir.Inst.Index) = .{},
-
const unstacked_top = std.math.maxInt(usize);
/// Call unstack before adding any new instructions to containing GenZir.
fn unstack(self: *GenZir) void {
@@ -12534,6 +12559,30 @@ const GenZir = struct {
return new_index.toRef();
}
+ fn addExtendedNodeSmall(
+ gz: *GenZir,
+ opcode: Zir.Inst.Extended,
+ src_node: Ast.Node.Index,
+ small: u16,
+ ) !Zir.Inst.Ref {
+ const astgen = gz.astgen;
+ const gpa = astgen.gpa;
+
+ try gz.instructions.ensureUnusedCapacity(gpa, 1);
+ try astgen.instructions.ensureUnusedCapacity(gpa, 1);
+ const new_index: Zir.Inst.Index = @enumFromInt(astgen.instructions.len);
+ astgen.instructions.appendAssumeCapacity(.{
+ .tag = .extended,
+ .data = .{ .extended = .{
+ .opcode = opcode,
+ .small = small,
+ .operand = @bitCast(gz.nodeIndexToRelative(src_node)),
+ } },
+ });
+ gz.instructions.appendAssumeCapacity(new_index);
+ return new_index.toRef();
+ }
+
fn addUnTok(
gz: *GenZir,
tag: Zir.Inst.Tag,
@@ -12957,10 +13006,10 @@ const GenZir = struct {
fn setStruct(gz: *GenZir, inst: Zir.Inst.Index, args: struct {
src_node: Ast.Node.Index,
+ captures_len: u32,
fields_len: u32,
decls_len: u32,
- backing_int_ref: Zir.Inst.Ref,
- backing_int_body_len: u32,
+ has_backing_int: bool,
layout: std.builtin.Type.ContainerLayout,
known_non_opv: bool,
known_comptime_only: bool,
@@ -12978,7 +13027,7 @@ const GenZir = struct {
const fields_hash_arr: [4]u32 = @bitCast(args.fields_hash);
- try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.StructDecl).Struct.fields.len + 4);
+ try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.StructDecl).Struct.fields.len + 3);
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.StructDecl{
.fields_hash_0 = fields_hash_arr[0],
.fields_hash_1 = fields_hash_arr[1],
@@ -12987,26 +13036,24 @@ const GenZir = struct {
.src_node = gz.nodeIndexToRelative(args.src_node),
});
+ if (args.captures_len != 0) {
+ astgen.extra.appendAssumeCapacity(args.captures_len);
+ }
if (args.fields_len != 0) {
astgen.extra.appendAssumeCapacity(args.fields_len);
}
if (args.decls_len != 0) {
astgen.extra.appendAssumeCapacity(args.decls_len);
}
- if (args.backing_int_ref != .none) {
- astgen.extra.appendAssumeCapacity(args.backing_int_body_len);
- if (args.backing_int_body_len == 0) {
- astgen.extra.appendAssumeCapacity(@intFromEnum(args.backing_int_ref));
- }
- }
astgen.instructions.set(@intFromEnum(inst), .{
.tag = .extended,
.data = .{ .extended = .{
.opcode = .struct_decl,
.small = @bitCast(Zir.Inst.StructDecl.Small{
+ .has_captures_len = args.captures_len != 0,
.has_fields_len = args.fields_len != 0,
.has_decls_len = args.decls_len != 0,
- .has_backing_int = args.backing_int_ref != .none,
+ .has_backing_int = args.has_backing_int,
.known_non_opv = args.known_non_opv,
.known_comptime_only = args.known_comptime_only,
.is_tuple = args.is_tuple,
@@ -13024,6 +13071,7 @@ const GenZir = struct {
fn setUnion(gz: *GenZir, inst: Zir.Inst.Index, args: struct {
src_node: Ast.Node.Index,
tag_type: Zir.Inst.Ref,
+ captures_len: u32,
body_len: u32,
fields_len: u32,
decls_len: u32,
@@ -13039,7 +13087,7 @@ const GenZir = struct {
const fields_hash_arr: [4]u32 = @bitCast(args.fields_hash);
- try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.UnionDecl).Struct.fields.len + 4);
+ try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.UnionDecl).Struct.fields.len + 5);
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.UnionDecl{
.fields_hash_0 = fields_hash_arr[0],
.fields_hash_1 = fields_hash_arr[1],
@@ -13051,6 +13099,9 @@ const GenZir = struct {
if (args.tag_type != .none) {
astgen.extra.appendAssumeCapacity(@intFromEnum(args.tag_type));
}
+ if (args.captures_len != 0) {
+ astgen.extra.appendAssumeCapacity(args.captures_len);
+ }
if (args.body_len != 0) {
astgen.extra.appendAssumeCapacity(args.body_len);
}
@@ -13066,6 +13117,7 @@ const GenZir = struct {
.opcode = .union_decl,
.small = @bitCast(Zir.Inst.UnionDecl.Small{
.has_tag_type = args.tag_type != .none,
+ .has_captures_len = args.captures_len != 0,
.has_body_len = args.body_len != 0,
.has_fields_len = args.fields_len != 0,
.has_decls_len = args.decls_len != 0,
@@ -13082,6 +13134,7 @@ const GenZir = struct {
fn setEnum(gz: *GenZir, inst: Zir.Inst.Index, args: struct {
src_node: Ast.Node.Index,
tag_type: Zir.Inst.Ref,
+ captures_len: u32,
body_len: u32,
fields_len: u32,
decls_len: u32,
@@ -13095,7 +13148,7 @@ const GenZir = struct {
const fields_hash_arr: [4]u32 = @bitCast(args.fields_hash);
- try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.EnumDecl).Struct.fields.len + 4);
+ try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.EnumDecl).Struct.fields.len + 5);
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.EnumDecl{
.fields_hash_0 = fields_hash_arr[0],
.fields_hash_1 = fields_hash_arr[1],
@@ -13107,6 +13160,9 @@ const GenZir = struct {
if (args.tag_type != .none) {
astgen.extra.appendAssumeCapacity(@intFromEnum(args.tag_type));
}
+ if (args.captures_len != 0) {
+ astgen.extra.appendAssumeCapacity(args.captures_len);
+ }
if (args.body_len != 0) {
astgen.extra.appendAssumeCapacity(args.body_len);
}
@@ -13122,6 +13178,7 @@ const GenZir = struct {
.opcode = .enum_decl,
.small = @bitCast(Zir.Inst.EnumDecl.Small{
.has_tag_type = args.tag_type != .none,
+ .has_captures_len = args.captures_len != 0,
.has_body_len = args.body_len != 0,
.has_fields_len = args.fields_len != 0,
.has_decls_len = args.decls_len != 0,
@@ -13135,6 +13192,7 @@ const GenZir = struct {
fn setOpaque(gz: *GenZir, inst: Zir.Inst.Index, args: struct {
src_node: Ast.Node.Index,
+ captures_len: u32,
decls_len: u32,
}) !void {
const astgen = gz.astgen;
@@ -13142,11 +13200,14 @@ const GenZir = struct {
assert(args.src_node != 0);
- try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.OpaqueDecl).Struct.fields.len + 1);
+ try astgen.extra.ensureUnusedCapacity(gpa, @typeInfo(Zir.Inst.OpaqueDecl).Struct.fields.len + 2);
const payload_index = astgen.addExtraAssumeCapacity(Zir.Inst.OpaqueDecl{
.src_node = gz.nodeIndexToRelative(args.src_node),
});
+ if (args.captures_len != 0) {
+ astgen.extra.appendAssumeCapacity(args.captures_len);
+ }
if (args.decls_len != 0) {
astgen.extra.appendAssumeCapacity(args.decls_len);
}
@@ -13155,6 +13216,7 @@ const GenZir = struct {
.data = .{ .extended = .{
.opcode = .opaque_decl,
.small = @bitCast(Zir.Inst.OpaqueDecl.Small{
+ .has_captures_len = args.captures_len != 0,
.has_decls_len = args.decls_len != 0,
.name_strategy = gz.anon_name_strategy,
}),
@@ -13197,15 +13259,6 @@ const GenZir = struct {
}
}
- fn addNamespaceCaptures(gz: *GenZir, namespace: *Scope.Namespace) !void {
- if (namespace.captures.count() > 0) {
- try gz.instructions.ensureUnusedCapacity(gz.astgen.gpa, namespace.captures.count());
- for (namespace.captures.values()) |capture| {
- gz.instructions.appendAssumeCapacity(capture);
- }
- }
- }
-
fn addDbgVar(gz: *GenZir, tag: Zir.Inst.Tag, name: Zir.NullTerminatedString, inst: Zir.Inst.Ref) !void {
if (gz.is_comptime) return;
@@ -13305,7 +13358,7 @@ fn detectLocalShadowing(
}
s = local_ptr.parent;
},
- .namespace, .enum_namespace => {
+ .namespace => {
outer_scope = true;
const ns = s.cast(Scope.Namespace).?;
const decl_node = ns.decls.get(ident_name) orelse {
@@ -13478,7 +13531,7 @@ fn scanDecls(astgen: *AstGen, namespace: *Scope.Namespace, members: []const Ast.
}
s = local_ptr.parent;
},
- .namespace, .enum_namespace => s = s.cast(Scope.Namespace).?.parent,
+ .namespace => s = s.cast(Scope.Namespace).?.parent,
.gen_zir => s = s.cast(GenZir).?.parent,
.defer_normal, .defer_error => s = s.cast(Scope.Defer).?.parent,
.top => break,
lib/std/zig/Zir.zig
@@ -1004,17 +1004,6 @@ pub const Inst = struct {
@"resume",
@"await",
- /// When a type or function refers to a comptime value from an outer
- /// scope, that forms a closure over comptime value. The outer scope
- /// will record a capture of that value, which encodes its current state
- /// and marks it to persist. Uses `un_tok` field. Operand is the
- /// instruction value to capture.
- closure_capture,
- /// The inner scope of a closure uses closure_get to retrieve the value
- /// stored by the outer scope. Uses `inst_node` field. Operand is the
- /// closure_capture instruction ref.
- closure_get,
-
/// A defer statement.
/// Uses the `defer` union field.
@"defer",
@@ -1251,8 +1240,6 @@ pub const Inst = struct {
.@"await",
.ret_err_value_code,
.extended,
- .closure_get,
- .closure_capture,
.ret_ptr,
.ret_type,
.@"try",
@@ -1542,8 +1529,6 @@ pub const Inst = struct {
.@"resume",
.@"await",
.ret_err_value_code,
- .closure_get,
- .closure_capture,
.@"break",
.break_inline,
.condbr,
@@ -1829,9 +1814,6 @@ pub const Inst = struct {
.@"resume" = .un_node,
.@"await" = .un_node,
- .closure_capture = .un_tok,
- .closure_get = .inst_node,
-
.@"defer" = .@"defer",
.defer_err_code = .defer_err_code,
@@ -2074,6 +2056,10 @@ pub const Inst = struct {
/// `operand` is payload index to `RestoreErrRetIndex`.
/// `small` is undefined.
restore_err_ret_index,
+ /// Retrieves a value from the current type declaration scope's closure.
+ /// `operand` is `src_node: i32`.
+ /// `small` is closure index.
+ closure_get,
/// Used as a placeholder instruction which is just a dummy index for Sema to replace
/// with a specific value. For instance, this is used for the capture of an `errdefer`.
/// This should never appear in a body.
@@ -2949,7 +2935,7 @@ pub const Inst = struct {
/// These are stored in trailing data in `extra` for each prong.
pub const ProngInfo = packed struct(u32) {
body_len: u28,
- capture: Capture,
+ capture: ProngInfo.Capture,
is_inline: bool,
has_tag_capture: bool,
@@ -3013,19 +2999,21 @@ pub const Inst = struct {
};
/// Trailing:
- /// 0. fields_len: u32, // if has_fields_len
- /// 1. decls_len: u32, // if has_decls_len
- /// 2. backing_int_body_len: u32, // if has_backing_int
- /// 3. backing_int_ref: Ref, // if has_backing_int and backing_int_body_len is 0
- /// 4. backing_int_body_inst: Inst, // if has_backing_int and backing_int_body_len is > 0
- /// 5. decl: Index, // for every decls_len; points to a `declaration` instruction
- /// 6. flags: u32 // for every 8 fields
+ /// 0. captures_len: u32 // if has_captures_len
+ /// 1. fields_len: u32, // if has_fields_len
+ /// 2. decls_len: u32, // if has_decls_len
+ /// 3. capture: Capture // for every captures_len
+ /// 4. backing_int_body_len: u32, // if has_backing_int
+ /// 5. backing_int_ref: Ref, // if has_backing_int and backing_int_body_len is 0
+ /// 6. backing_int_body_inst: Inst, // if has_backing_int and backing_int_body_len is > 0
+ /// 7. decl: Index, // for every decls_len; points to a `declaration` instruction
+ /// 8. flags: u32 // for every 8 fields
/// - sets of 4 bits:
/// 0b000X: whether corresponding field has an align expression
/// 0b00X0: whether corresponding field has a default expression
/// 0b0X00: whether corresponding field is comptime
/// 0bX000: whether corresponding field has a type expression
- /// 7. fields: { // for every fields_len
+ /// 9. fields: { // for every fields_len
/// field_name: u32, // if !is_tuple
/// doc_comment: NullTerminatedString, // .empty if no doc comment
/// field_type: Ref, // if corresponding bit is not set. none means anytype.
@@ -3033,7 +3021,7 @@ pub const Inst = struct {
/// align_body_len: u32, // if corresponding bit is set
/// init_body_len: u32, // if corresponding bit is set
/// }
- /// 8. bodies: { // for every fields_len
+ /// 10. bodies: { // for every fields_len
/// field_type_body_inst: Inst, // for each field_type_body_len
/// align_body_inst: Inst, // for each align_body_len
/// init_body_inst: Inst, // for each init_body_len
@@ -3052,6 +3040,7 @@ pub const Inst = struct {
}
pub const Small = packed struct {
+ has_captures_len: bool,
has_fields_len: bool,
has_decls_len: bool,
has_backing_int: bool,
@@ -3063,10 +3052,35 @@ pub const Inst = struct {
any_default_inits: bool,
any_comptime_fields: bool,
any_aligned_fields: bool,
- _: u3 = undefined,
+ _: u2 = undefined,
};
};
+ /// Represents a single value being captured in a type declaration's closure.
+ /// If high bit is 0, this represents a `Zir.Inst,Index`.
+ /// If high bit is 1, this represents an index into the last closure.
+ pub const Capture = enum(u32) {
+ _,
+ pub const Unwrapped = union(enum) {
+ inst: Zir.Inst.Index,
+ nested: u16,
+ };
+ pub fn wrap(cap: Unwrapped) Capture {
+ return switch (cap) {
+ .inst => |inst| @enumFromInt(@intFromEnum(inst)),
+ .nested => |idx| @enumFromInt((1 << 31) | @as(u32, idx)),
+ };
+ }
+ pub fn unwrap(cap: Capture) Unwrapped {
+ const raw = @intFromEnum(cap);
+ const tag: u1 = @intCast(raw >> 31);
+ return switch (tag) {
+ 0 => .{ .inst = @enumFromInt(raw) },
+ 1 => .{ .nested = @truncate(raw) },
+ };
+ }
+ };
+
pub const NameStrategy = enum(u2) {
/// Use the same name as the parent declaration name.
/// e.g. `const Foo = struct {...};`.
@@ -3098,14 +3112,16 @@ pub const Inst = struct {
/// Trailing:
/// 0. tag_type: Ref, // if has_tag_type
- /// 1. body_len: u32, // if has_body_len
- /// 2. fields_len: u32, // if has_fields_len
- /// 3. decls_len: u32, // if has_decls_len
- /// 4. decl: Index, // for every decls_len; points to a `declaration` instruction
- /// 5. inst: Index // for every body_len
- /// 6. has_bits: u32 // for every 32 fields
+ /// 1. captures_len: u32, // if has_captures_len
+ /// 2. body_len: u32, // if has_body_len
+ /// 3. fields_len: u32, // if has_fields_len
+ /// 4. decls_len: u32, // if has_decls_len
+ /// 5. capture: Capture // for every captures_len
+ /// 6. decl: Index, // for every decls_len; points to a `declaration` instruction
+ /// 7. inst: Index // for every body_len
+ /// 8. has_bits: u32 // for every 32 fields
/// - the bit is whether corresponding field has an value expression
- /// 7. fields: { // for every fields_len
+ /// 9. fields: { // for every fields_len
/// field_name: u32,
/// doc_comment: u32, // .empty if no doc_comment
/// value: Ref, // if corresponding bit is set
@@ -3125,29 +3141,32 @@ pub const Inst = struct {
pub const Small = packed struct {
has_tag_type: bool,
+ has_captures_len: bool,
has_body_len: bool,
has_fields_len: bool,
has_decls_len: bool,
name_strategy: NameStrategy,
nonexhaustive: bool,
- _: u9 = undefined,
+ _: u8 = undefined,
};
};
/// Trailing:
/// 0. tag_type: Ref, // if has_tag_type
- /// 1. body_len: u32, // if has_body_len
- /// 2. fields_len: u32, // if has_fields_len
- /// 3. decls_len: u32, // if has_decls_len
- /// 4. decl: Index, // for every decls_len; points to a `declaration` instruction
- /// 5. inst: Index // for every body_len
- /// 6. has_bits: u32 // for every 8 fields
+ /// 1. captures_len: u32 // if has_captures_len
+ /// 2. body_len: u32, // if has_body_len
+ /// 3. fields_len: u32, // if has_fields_len
+ /// 4. decls_len: u37, // if has_decls_len
+ /// 5. capture: Capture // for every captures_len
+ /// 6. decl: Index, // for every decls_len; points to a `declaration` instruction
+ /// 7. inst: Index // for every body_len
+ /// 8. has_bits: u32 // for every 8 fields
/// - sets of 4 bits:
/// 0b000X: whether corresponding field has a type expression
/// 0b00X0: whether corresponding field has a align expression
/// 0b0X00: whether corresponding field has a tag value expression
/// 0bX000: unused
- /// 7. fields: { // for every fields_len
+ /// 9. fields: { // for every fields_len
/// field_name: NullTerminatedString, // null terminated string index
/// doc_comment: NullTerminatedString, // .empty if no doc comment
/// field_type: Ref, // if corresponding bit is set
@@ -3170,6 +3189,7 @@ pub const Inst = struct {
pub const Small = packed struct {
has_tag_type: bool,
+ has_captures_len: bool,
has_body_len: bool,
has_fields_len: bool,
has_decls_len: bool,
@@ -3183,13 +3203,15 @@ pub const Inst = struct {
/// true | false | union(T) { }
auto_enum_tag: bool,
any_aligned_fields: bool,
- _: u6 = undefined,
+ _: u5 = undefined,
};
};
/// Trailing:
- /// 0. decls_len: u32, // if has_decls_len
- /// 1. decl: Index, // for every decls_len; points to a `declaration` instruction
+ /// 0. captures_len: u32, // if has_captures_len
+ /// 1. decls_len: u32, // if has_decls_len
+ /// 2. capture: Capture, // for every captures_len
+ /// 3. decl: Index, // for every decls_len; points to a `declaration` instruction
pub const OpaqueDecl = struct {
src_node: i32,
@@ -3198,9 +3220,10 @@ pub const Inst = struct {
}
pub const Small = packed struct {
+ has_captures_len: bool,
has_decls_len: bool,
name_strategy: NameStrategy,
- _: u13 = undefined,
+ _: u12 = undefined,
};
};
@@ -3502,6 +3525,11 @@ pub fn declIterator(zir: Zir, decl_inst: Zir.Inst.Index) DeclIterator {
.struct_decl => {
const small: Inst.StructDecl.Small = @bitCast(extended.small);
var extra_index: u32 = @intCast(extended.operand + @typeInfo(Inst.StructDecl).Struct.fields.len);
+ const captures_len = if (small.has_captures_len) captures_len: {
+ const captures_len = zir.extra[extra_index];
+ extra_index += 1;
+ break :captures_len captures_len;
+ } else 0;
extra_index += @intFromBool(small.has_fields_len);
const decls_len = if (small.has_decls_len) decls_len: {
const decls_len = zir.extra[extra_index];
@@ -3509,6 +3537,8 @@ pub fn declIterator(zir: Zir, decl_inst: Zir.Inst.Index) DeclIterator {
break :decls_len decls_len;
} else 0;
+ extra_index += captures_len;
+
if (small.has_backing_int) {
const backing_int_body_len = zir.extra[extra_index];
extra_index += 1; // backing_int_body_len
@@ -3529,6 +3559,11 @@ pub fn declIterator(zir: Zir, decl_inst: Zir.Inst.Index) DeclIterator {
const small: Inst.EnumDecl.Small = @bitCast(extended.small);
var extra_index: u32 = @intCast(extended.operand + @typeInfo(Inst.EnumDecl).Struct.fields.len);
extra_index += @intFromBool(small.has_tag_type);
+ const captures_len = if (small.has_captures_len) captures_len: {
+ const captures_len = zir.extra[extra_index];
+ extra_index += 1;
+ break :captures_len captures_len;
+ } else 0;
extra_index += @intFromBool(small.has_body_len);
extra_index += @intFromBool(small.has_fields_len);
const decls_len = if (small.has_decls_len) decls_len: {
@@ -3537,6 +3572,8 @@ pub fn declIterator(zir: Zir, decl_inst: Zir.Inst.Index) DeclIterator {
break :decls_len decls_len;
} else 0;
+ extra_index += captures_len;
+
return .{
.extra_index = extra_index,
.decls_remaining = decls_len,
@@ -3547,6 +3584,11 @@ pub fn declIterator(zir: Zir, decl_inst: Zir.Inst.Index) DeclIterator {
const small: Inst.UnionDecl.Small = @bitCast(extended.small);
var extra_index: u32 = @intCast(extended.operand + @typeInfo(Inst.UnionDecl).Struct.fields.len);
extra_index += @intFromBool(small.has_tag_type);
+ const captures_len = if (small.has_captures_len) captures_len: {
+ const captures_len = zir.extra[extra_index];
+ extra_index += 1;
+ break :captures_len captures_len;
+ } else 0;
extra_index += @intFromBool(small.has_body_len);
extra_index += @intFromBool(small.has_fields_len);
const decls_len = if (small.has_decls_len) decls_len: {
@@ -3555,6 +3597,8 @@ pub fn declIterator(zir: Zir, decl_inst: Zir.Inst.Index) DeclIterator {
break :decls_len decls_len;
} else 0;
+ extra_index += captures_len;
+
return .{
.extra_index = extra_index,
.decls_remaining = decls_len,
@@ -3569,6 +3613,13 @@ pub fn declIterator(zir: Zir, decl_inst: Zir.Inst.Index) DeclIterator {
extra_index += 1;
break :decls_len decls_len;
} else 0;
+ const captures_len = if (small.has_captures_len) captures_len: {
+ const captures_len = zir.extra[extra_index];
+ extra_index += 1;
+ break :captures_len captures_len;
+ } else 0;
+
+ extra_index += captures_len;
return .{
.extra_index = extra_index,
src/Autodoc.zig
@@ -450,7 +450,7 @@ const Scope = struct {
Zir.NullTerminatedString, // index into the current file's string table (decl name)
*DeclStatus,
) = .{},
-
+ captures: []const Zir.Inst.Capture = &.{},
enclosing_type: ?usize, // index into `types`, null = file top-level struct
pub const DeclStatus = union(enum) {
@@ -459,6 +459,14 @@ const Scope = struct {
NotRequested: u32, // instr_index
};
+ fn getCapture(scope: Scope, idx: u16) struct { Zir.Inst.Index, *Scope } {
+ const parent = scope.parent.?;
+ return switch (scope.captures[idx].unwrap()) {
+ .inst => |inst| .{ inst, parent },
+ .nested => |parent_idx| parent.getCapture(parent_idx),
+ };
+ }
+
/// Returns a pointer so that the caller has a chance to modify the value
/// in case they decide to start analyzing a previously not requested decl.
/// Another reason is that in some places we use the pointer to uniquely
@@ -1151,29 +1159,6 @@ fn walkInstruction(
.expr = .{ .comptimeExpr = 0 },
};
},
- .closure_get => {
- const inst_node = data[@intFromEnum(inst)].inst_node;
-
- const code = try self.getBlockSource(file, parent_src, inst_node.src_node);
- const idx = self.comptime_exprs.items.len;
- try self.exprs.append(self.arena, .{ .comptimeExpr = idx });
- try self.comptime_exprs.append(self.arena, .{ .code = code });
-
- return DocData.WalkResult{
- .expr = .{ .comptimeExpr = idx },
- };
- },
- .closure_capture => {
- const un_tok = data[@intFromEnum(inst)].un_tok;
- return try self.walkRef(
- file,
- parent_scope,
- parent_src,
- un_tok.operand,
- need_type,
- call_ctx,
- );
- },
.str => {
const str = data[@intFromEnum(inst)].str.get(file.zir);
@@ -3395,11 +3380,23 @@ fn walkInstruction(
.enclosing_type = type_slot_index,
};
+ const small: Zir.Inst.OpaqueDecl.Small = @bitCast(extended.small);
const extra = file.zir.extraData(Zir.Inst.OpaqueDecl, extended.operand);
var extra_index: usize = extra.end;
const src_info = try self.srcLocInfo(file, extra.data.src_node, parent_src);
+ const captures_len = if (small.has_captures_len) blk: {
+ const captures_len = file.zir.extra[extra_index];
+ extra_index += 1;
+ break :blk captures_len;
+ } else 0;
+
+ if (small.has_decls_len) extra_index += 1;
+
+ scope.captures = @ptrCast(file.zir.extra[extra_index..][0..captures_len]);
+ extra_index += captures_len;
+
var decl_indexes: std.ArrayListUnmanaged(usize) = .{};
var priv_decl_indexes: std.ArrayListUnmanaged(usize) = .{};
@@ -3503,6 +3500,12 @@ fn walkInstruction(
break :blk tag_ref;
} else null;
+ const captures_len = if (small.has_captures_len) blk: {
+ const captures_len = file.zir.extra[extra_index];
+ extra_index += 1;
+ break :blk captures_len;
+ } else 0;
+
const body_len = if (small.has_body_len) blk: {
const body_len = file.zir.extra[extra_index];
extra_index += 1;
@@ -3520,6 +3523,11 @@ fn walkInstruction(
else => .{ .enumLiteral = @tagName(small.layout) },
};
+ if (small.has_decls_len) extra_index += 1;
+
+ scope.captures = @ptrCast(file.zir.extra[extra_index..][0..captures_len]);
+ extra_index += captures_len;
+
var decl_indexes: std.ArrayListUnmanaged(usize) = .{};
var priv_decl_indexes: std.ArrayListUnmanaged(usize) = .{};
@@ -3631,6 +3639,12 @@ fn walkInstruction(
break :blk wr.expr;
} else null;
+ const captures_len = if (small.has_captures_len) blk: {
+ const captures_len = file.zir.extra[extra_index];
+ extra_index += 1;
+ break :blk captures_len;
+ } else 0;
+
const body_len = if (small.has_body_len) blk: {
const body_len = file.zir.extra[extra_index];
extra_index += 1;
@@ -3643,6 +3657,11 @@ fn walkInstruction(
break :blk fields_len;
} else 0;
+ if (small.has_decls_len) extra_index += 1;
+
+ scope.captures = @ptrCast(file.zir.extra[extra_index..][0..captures_len]);
+ extra_index += captures_len;
+
var decl_indexes: std.ArrayListUnmanaged(usize) = .{};
var priv_decl_indexes: std.ArrayListUnmanaged(usize) = .{};
@@ -3759,6 +3778,12 @@ fn walkInstruction(
const src_info = try self.srcLocInfo(file, extra.data.src_node, parent_src);
+ const captures_len = if (small.has_captures_len) blk: {
+ const captures_len = file.zir.extra[extra_index];
+ extra_index += 1;
+ break :blk captures_len;
+ } else 0;
+
const fields_len = if (small.has_fields_len) blk: {
const fields_len = file.zir.extra[extra_index];
extra_index += 1;
@@ -3768,6 +3793,9 @@ fn walkInstruction(
// We don't care about decls yet
if (small.has_decls_len) extra_index += 1;
+ scope.captures = @ptrCast(file.zir.extra[extra_index..][0..captures_len]);
+ extra_index += captures_len;
+
var backing_int: ?DocData.Expr = null;
if (small.has_backing_int) {
const backing_int_body_len = file.zir.extra[extra_index];
@@ -4018,6 +4046,10 @@ fn walkInstruction(
.expr = .{ .cmpxchgIndex = cmpxchg_index },
};
},
+ .closure_get => {
+ const captured, const scope = parent_scope.getCapture(extended.small);
+ return self.walkInstruction(file, scope, parent_src, captured, need_type, call_ctx);
+ },
}
},
}
src/InternPool.zig
@@ -1,7 +1,6 @@
//! All interned objects have both a value and a type.
//! This data structure is self-contained, with the following exceptions:
//! * Module.Namespace has a pointer to Module.File
-//! * Module.Decl has a pointer to Module.CaptureScope
/// Maps `Key` to `Index`. `Key` objects are not stored anywhere; they are
/// constructed lazily.
@@ -6395,7 +6394,6 @@ fn finishFuncInstance(
.@"addrspace" = fn_owner_decl.@"addrspace",
.analysis = .complete,
.zir_decl_index = fn_owner_decl.zir_decl_index,
- .src_scope = fn_owner_decl.src_scope,
.is_pub = fn_owner_decl.is_pub,
.is_exported = fn_owner_decl.is_exported,
.alive = true,
@@ -7891,6 +7889,7 @@ pub fn destroyNamespace(ip: *InternPool, gpa: Allocator, index: NamespaceIndex)
.parent = undefined,
.file_scope = undefined,
.decl_index = undefined,
+ .captures = undefined,
};
ip.namespaces_free_list.append(gpa, index) catch {
// In order to keep `destroyNamespace` a non-fallible function, we ignore memory
src/Module.zig
@@ -101,17 +101,6 @@ embed_table: std.StringArrayHashMapUnmanaged(*EmbedFile) = .{},
/// is not yet implemented.
intern_pool: InternPool = .{},
-/// The index type for this array is `CaptureScope.Index` and the elements here are
-/// the indexes of the parent capture scopes.
-/// Memory is owned by gpa; garbage collected.
-capture_scope_parents: std.ArrayListUnmanaged(CaptureScope.Index) = .{},
-/// Value is index of type
-/// Memory is owned by gpa; garbage collected.
-runtime_capture_scopes: std.AutoArrayHashMapUnmanaged(CaptureScope.Key, InternPool.Index) = .{},
-/// Value is index of value
-/// Memory is owned by gpa; garbage collected.
-comptime_capture_scopes: std.AutoArrayHashMapUnmanaged(CaptureScope.Key, InternPool.Index) = .{},
-
/// To be eliminated in a future commit by moving more data into InternPool.
/// Current uses that must be eliminated:
/// * comptime pointer mutation
@@ -305,28 +294,6 @@ pub const Export = struct {
}
};
-pub const CaptureScope = struct {
- pub const Key = extern struct {
- zir_index: Zir.Inst.Index,
- index: Index,
- };
-
- /// Index into `capture_scope_parents` which uniquely identifies a capture scope.
- pub const Index = enum(u32) {
- none = std.math.maxInt(u32),
- _,
-
- pub fn parent(i: Index, mod: *Module) Index {
- return mod.capture_scope_parents.items[@intFromEnum(i)];
- }
- };
-};
-
-pub fn createCaptureScope(mod: *Module, parent: CaptureScope.Index) error{OutOfMemory}!CaptureScope.Index {
- try mod.capture_scope_parents.append(mod.gpa, parent);
- return @enumFromInt(mod.capture_scope_parents.items.len - 1);
-}
-
const ValueArena = struct {
state: std.heap.ArenaAllocator.State,
state_acquired: ?*std.heap.ArenaAllocator.State = null,
@@ -386,9 +353,6 @@ pub const Decl = struct {
/// there is no parent.
src_namespace: Namespace.Index,
- /// The scope which lexically contains this decl.
- src_scope: CaptureScope.Index,
-
/// The AST node index of this declaration.
/// Must be recomputed when the corresponding source file is modified.
src_node: Ast.Node.Index,
@@ -792,11 +756,41 @@ pub const Namespace = struct {
/// These are only declarations named directly by the AST; anonymous
/// declarations are not stored here.
decls: std.ArrayHashMapUnmanaged(Decl.Index, void, DeclContext, true) = .{},
-
/// Key is usingnamespace Decl itself. To find the namespace being included,
/// the Decl Value has to be resolved as a Type which has a Namespace.
/// Value is whether the usingnamespace decl is marked `pub`.
usingnamespace_set: std.AutoHashMapUnmanaged(Decl.Index, bool) = .{},
+ /// Allocated into `gpa`.
+ /// The ordered set of values captured in this type's closure.
+ /// `closure_get` instructions look up values in this list.
+ captures: []CaptureValue,
+
+ /// A single value captured in a container's closure. This is not an
+ /// `InternPool.Index` so we can differentiate between runtime-known values
+ /// (where only the type is comptime-known) and comptime-known values.
+ pub const CaptureValue = enum(u32) {
+ _,
+ pub const Unwrapped = union(enum) {
+ /// Index refers to the value.
+ @"comptime": InternPool.Index,
+ /// Index refers to the type.
+ runtime: InternPool.Index,
+ };
+ pub fn wrap(val: Unwrapped) CaptureValue {
+ return switch (val) {
+ .@"comptime" => |i| @enumFromInt(@intFromEnum(i)),
+ .runtime => |i| @enumFromInt((1 << 31) | @intFromEnum(i)),
+ };
+ }
+ pub fn unwrap(val: CaptureValue) Unwrapped {
+ const tag: u1 = @intCast(@intFromEnum(val) >> 31);
+ const raw = @intFromEnum(val);
+ return switch (tag) {
+ 0 => .{ .@"comptime" = @enumFromInt(raw) },
+ 1 => .{ .runtime = @enumFromInt(@as(u31, @truncate(raw))) },
+ };
+ }
+ };
const Index = InternPool.NamespaceIndex;
const OptionalIndex = InternPool.OptionalNamespaceIndex;
@@ -2135,15 +2129,12 @@ pub fn deinit(zcu: *Zcu) void {
while (it.next()) |namespace| {
namespace.decls.deinit(gpa);
namespace.usingnamespace_set.deinit(gpa);
+ gpa.free(namespace.captures);
}
}
zcu.intern_pool.deinit(gpa);
zcu.tmp_hack_arena.deinit();
-
- zcu.capture_scope_parents.deinit(gpa);
- zcu.runtime_capture_scopes.deinit(gpa);
- zcu.comptime_capture_scopes.deinit(gpa);
}
pub fn destroyDecl(mod: *Module, decl_index: Decl.Index) void {
@@ -3362,11 +3353,12 @@ pub fn semaFile(mod: *Module, file: *File) SemaError!void {
.parent = .none,
.decl_index = undefined,
.file_scope = file,
+ .captures = &.{},
});
const new_namespace = mod.namespacePtr(new_namespace_index);
errdefer mod.destroyNamespace(new_namespace_index);
- const new_decl_index = try mod.allocateNewDecl(new_namespace_index, 0, .none);
+ const new_decl_index = try mod.allocateNewDecl(new_namespace_index, 0);
const new_decl = mod.declPtr(new_decl_index);
errdefer @panic("TODO error handling");
@@ -3420,9 +3412,18 @@ pub fn semaFile(mod: *Module, file: *File) SemaError!void {
const struct_ty = sema.getStructType(
new_decl_index,
new_namespace_index,
+ null,
try mod.intern_pool.trackZir(gpa, file, .main_struct_inst),
) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
+ // The following errors are from resolving capture values, but the root
+ // struct of a file has no captures.
+ error.AnalysisFail,
+ error.NeededSourceLocation,
+ error.GenericPoison,
+ error.ComptimeReturn,
+ error.ComptimeBreak,
+ => unreachable,
};
// TODO: figure out InternPool removals for incremental compilation
//errdefer ip.remove(struct_ty);
@@ -3573,7 +3574,6 @@ fn semaDecl(mod: *Module, decl_index: Decl.Index) !SemaDeclResult {
.sema = &sema,
.src_decl = decl_index,
.namespace = decl.src_namespace,
- .wip_capture_scope = try mod.createCaptureScope(decl.src_scope),
.instructions = .{},
.inlining = null,
.is_comptime = true,
@@ -4205,7 +4205,7 @@ fn scanDecl(iter: *ScanDeclIter, decl_inst: Zir.Inst.Index) Allocator.Error!void
);
const comp = zcu.comp;
if (!gop.found_existing) {
- const new_decl_index = try zcu.allocateNewDecl(namespace_index, decl_node, iter.parent_decl.src_scope);
+ const new_decl_index = try zcu.allocateNewDecl(namespace_index, decl_node);
const new_decl = zcu.declPtr(new_decl_index);
new_decl.kind = kind;
new_decl.name = decl_name;
@@ -4438,7 +4438,6 @@ pub fn analyzeFnBody(mod: *Module, func_index: InternPool.Index, arena: Allocato
.sema = &sema,
.src_decl = decl_index,
.namespace = decl.src_namespace,
- .wip_capture_scope = try mod.createCaptureScope(decl.src_scope),
.instructions = .{},
.inlining = null,
.is_comptime = false,
@@ -4639,7 +4638,6 @@ pub fn allocateNewDecl(
mod: *Module,
namespace: Namespace.Index,
src_node: Ast.Node.Index,
- src_scope: CaptureScope.Index,
) !Decl.Index {
const ip = &mod.intern_pool;
const gpa = mod.gpa;
@@ -4657,7 +4655,6 @@ pub fn allocateNewDecl(
.@"addrspace" = .generic,
.analysis = .unreferenced,
.zir_decl_index = .none,
- .src_scope = src_scope,
.is_pub = false,
.is_exported = false,
.alive = false,
@@ -4697,17 +4694,16 @@ pub fn errorSetBits(mod: *Module) u16 {
pub fn createAnonymousDecl(mod: *Module, block: *Sema.Block, typed_value: TypedValue) !Decl.Index {
const src_decl = mod.declPtr(block.src_decl);
- return mod.createAnonymousDeclFromDecl(src_decl, block.namespace, block.wip_capture_scope, typed_value);
+ return mod.createAnonymousDeclFromDecl(src_decl, block.namespace, typed_value);
}
pub fn createAnonymousDeclFromDecl(
mod: *Module,
src_decl: *Decl,
namespace: Namespace.Index,
- src_scope: CaptureScope.Index,
tv: TypedValue,
) !Decl.Index {
- const new_decl_index = try mod.allocateNewDecl(namespace, src_decl.src_node, src_scope);
+ const new_decl_index = try mod.allocateNewDecl(namespace, src_decl.src_node);
errdefer mod.destroyDecl(new_decl_index);
const name = try mod.intern_pool.getOrPutStringFmt(mod.gpa, "{}__anon_{d}", .{
src_decl.name.fmt(&mod.intern_pool), @intFromEnum(new_decl_index),
@@ -5276,7 +5272,7 @@ pub fn populateTestFunctions(
.len = test_decl_name.len,
.child = .u8_type,
});
- const test_name_decl_index = try mod.createAnonymousDeclFromDecl(decl, decl.src_namespace, .none, .{
+ const test_name_decl_index = try mod.createAnonymousDeclFromDecl(decl, decl.src_namespace, .{
.ty = test_name_decl_ty,
.val = Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = test_name_decl_ty.toIntern(),
@@ -5322,7 +5318,7 @@ pub fn populateTestFunctions(
.child = test_fn_ty.toIntern(),
.sentinel = .none,
});
- const array_decl_index = try mod.createAnonymousDeclFromDecl(decl, decl.src_namespace, .none, .{
+ const array_decl_index = try mod.createAnonymousDeclFromDecl(decl, decl.src_namespace, .{
.ty = array_decl_ty,
.val = Value.fromInterned((try mod.intern(.{ .aggregate = .{
.ty = array_decl_ty.toIntern(),
src/print_zir.zig
@@ -282,7 +282,6 @@ const Writer = struct {
.ref,
.ret_implicit,
- .closure_capture,
.validate_ref_ty,
=> try self.writeUnTok(stream, inst),
@@ -510,8 +509,6 @@ const Writer = struct {
.dbg_stmt => try self.writeDbgStmt(stream, inst),
- .closure_get => try self.writeInstNode(stream, inst),
-
.@"defer" => try self.writeDefer(stream, inst),
.defer_err_code => try self.writeDeferErrCode(stream, inst),
@@ -611,6 +608,7 @@ const Writer = struct {
.ptr_cast_no_dest => try self.writePtrCastNoDest(stream, extended),
.restore_err_ret_index => try self.writeRestoreErrRetIndex(stream, extended),
+ .closure_get => try self.writeClosureGet(stream, extended),
}
}
@@ -1401,6 +1399,12 @@ const Writer = struct {
var extra_index: usize = extra.end;
+ const captures_len = if (small.has_captures_len) blk: {
+ const captures_len = self.code.extra[extra_index];
+ extra_index += 1;
+ break :blk captures_len;
+ } else 0;
+
const fields_len = if (small.has_fields_len) blk: {
const fields_len = self.code.extra[extra_index];
extra_index += 1;
@@ -1419,12 +1423,26 @@ const Writer = struct {
try stream.print("{s}, ", .{@tagName(small.name_strategy)});
- if (small.layout == .Packed and small.has_backing_int) {
+ if (captures_len == 0) {
+ try stream.writeAll("{}, ");
+ } else {
+ try stream.writeAll("{ ");
+ try self.writeCapture(stream, @enumFromInt(self.code.extra[extra_index]));
+ extra_index += 1;
+ for (1..captures_len) |_| {
+ try stream.writeAll(", ");
+ try self.writeCapture(stream, @enumFromInt(self.code.extra[extra_index]));
+ extra_index += 1;
+ }
+ try stream.writeAll(" }, ");
+ }
+
+ if (small.has_backing_int) {
const backing_int_body_len = self.code.extra[extra_index];
extra_index += 1;
try stream.writeAll("Packed(");
if (backing_int_body_len == 0) {
- const backing_int_ref = @as(Zir.Inst.Ref, @enumFromInt(self.code.extra[extra_index]));
+ const backing_int_ref: Zir.Inst.Ref = @enumFromInt(self.code.extra[extra_index]);
extra_index += 1;
try self.writeInstRef(stream, backing_int_ref);
} else {
@@ -1601,6 +1619,12 @@ const Writer = struct {
break :blk tag_type_ref;
} else .none;
+ const captures_len = if (small.has_captures_len) blk: {
+ const captures_len = self.code.extra[extra_index];
+ extra_index += 1;
+ break :blk captures_len;
+ } else 0;
+
const body_len = if (small.has_body_len) blk: {
const body_len = self.code.extra[extra_index];
extra_index += 1;
@@ -1624,6 +1648,20 @@ const Writer = struct {
});
try self.writeFlag(stream, "autoenum, ", small.auto_enum_tag);
+ if (captures_len == 0) {
+ try stream.writeAll("{}, ");
+ } else {
+ try stream.writeAll("{ ");
+ try self.writeCapture(stream, @enumFromInt(self.code.extra[extra_index]));
+ extra_index += 1;
+ for (1..captures_len) |_| {
+ try stream.writeAll(", ");
+ try self.writeCapture(stream, @enumFromInt(self.code.extra[extra_index]));
+ extra_index += 1;
+ }
+ try stream.writeAll(" }, ");
+ }
+
if (decls_len == 0) {
try stream.writeAll("{}");
} else {
@@ -1748,6 +1786,12 @@ const Writer = struct {
break :blk tag_type_ref;
} else .none;
+ const captures_len = if (small.has_captures_len) blk: {
+ const captures_len = self.code.extra[extra_index];
+ extra_index += 1;
+ break :blk captures_len;
+ } else 0;
+
const body_len = if (small.has_body_len) blk: {
const body_len = self.code.extra[extra_index];
extra_index += 1;
@@ -1769,6 +1813,20 @@ const Writer = struct {
try stream.print("{s}, ", .{@tagName(small.name_strategy)});
try self.writeFlag(stream, "nonexhaustive, ", small.nonexhaustive);
+ if (captures_len == 0) {
+ try stream.writeAll("{}, ");
+ } else {
+ try stream.writeAll("{ ");
+ try self.writeCapture(stream, @enumFromInt(self.code.extra[extra_index]));
+ extra_index += 1;
+ for (1..captures_len) |_| {
+ try stream.writeAll(", ");
+ try self.writeCapture(stream, @enumFromInt(self.code.extra[extra_index]));
+ extra_index += 1;
+ }
+ try stream.writeAll(" }, ");
+ }
+
if (decls_len == 0) {
try stream.writeAll("{}, ");
} else {
@@ -1854,6 +1912,12 @@ const Writer = struct {
const extra = self.code.extraData(Zir.Inst.OpaqueDecl, extended.operand);
var extra_index: usize = extra.end;
+ const captures_len = if (small.has_captures_len) blk: {
+ const captures_len = self.code.extra[extra_index];
+ extra_index += 1;
+ break :blk captures_len;
+ } else 0;
+
const decls_len = if (small.has_decls_len) blk: {
const decls_len = self.code.extra[extra_index];
extra_index += 1;
@@ -1862,6 +1926,20 @@ const Writer = struct {
try stream.print("{s}, ", .{@tagName(small.name_strategy)});
+ if (captures_len == 0) {
+ try stream.writeAll("{}, ");
+ } else {
+ try stream.writeAll("{ ");
+ try self.writeCapture(stream, @enumFromInt(self.code.extra[extra_index]));
+ extra_index += 1;
+ for (1..captures_len) |_| {
+ try stream.writeAll(", ");
+ try self.writeCapture(stream, @enumFromInt(self.code.extra[extra_index]));
+ extra_index += 1;
+ }
+ try stream.writeAll(" }, ");
+ }
+
if (decls_len == 0) {
try stream.writeAll("{})");
} else {
@@ -2706,6 +2784,12 @@ const Writer = struct {
try self.writeSrc(stream, inst_data.src());
}
+ fn writeClosureGet(self: *Writer, stream: anytype, extended: Zir.Inst.Extended.InstData) !void {
+ const src = LazySrcLoc.nodeOffset(@bitCast(extended.operand));
+ try stream.print("{d})) ", .{extended.small});
+ try self.writeSrc(stream, src);
+ }
+
fn writeInstRef(self: *Writer, stream: anytype, ref: Zir.Inst.Ref) !void {
if (ref == .none) {
return stream.writeAll(".none");
@@ -2722,6 +2806,13 @@ const Writer = struct {
return stream.print("%{d}", .{@intFromEnum(inst)});
}
+ fn writeCapture(self: *Writer, stream: anytype, capture: Zir.Inst.Capture) !void {
+ switch (capture.unwrap()) {
+ .inst => |inst| return self.writeInstIndex(stream, inst),
+ .nested => |i| return stream.print("[{d}]", .{i}),
+ }
+ }
+
fn writeOptionalInstRef(
self: *Writer,
stream: anytype,
src/Sema.zig
@@ -155,7 +155,6 @@ const Namespace = Module.Namespace;
const CompileError = Module.CompileError;
const SemaError = Module.SemaError;
const Decl = Module.Decl;
-const CaptureScope = Module.CaptureScope;
const LazySrcLoc = std.zig.LazySrcLoc;
const RangeSet = @import("RangeSet.zig");
const target_util = @import("target.zig");
@@ -331,8 +330,6 @@ pub const Block = struct {
/// used to add a `func_instance` into the `InternPool`.
params: std.MultiArrayList(Param) = .{},
- wip_capture_scope: CaptureScope.Index,
-
label: ?*Label = null,
inlining: ?*Inlining,
/// If runtime_index is not 0 then one of these is guaranteed to be non null.
@@ -475,7 +472,6 @@ pub const Block = struct {
.src_decl = parent.src_decl,
.namespace = parent.namespace,
.instructions = .{},
- .wip_capture_scope = parent.wip_capture_scope,
.label = null,
.inlining = parent.inlining,
.is_comptime = parent.is_comptime,
@@ -974,12 +970,6 @@ fn analyzeBodyInner(
try sema.inst_map.ensureSpaceForInstructions(sema.gpa, body);
- // Most of the time, we don't need to construct a new capture scope for a
- // block. However, successive iterations of comptime loops can capture
- // different values for the same Zir.Inst.Index, so in those cases, we will
- // have to create nested capture scopes; see the `.repeat` case below.
- const parent_capture_scope = block.wip_capture_scope;
-
const mod = sema.mod;
const map = &sema.inst_map;
const tags = sema.code.instructions.items(.tag);
@@ -1028,7 +1018,6 @@ fn analyzeBodyInner(
.c_import => try sema.zirCImport(block, inst),
.call => try sema.zirCall(block, inst, .direct),
.field_call => try sema.zirCall(block, inst, .field),
- .closure_get => try sema.zirClosureGet(block, inst),
.cmp_lt => try sema.zirCmp(block, inst, .lt),
.cmp_lte => try sema.zirCmp(block, inst, .lte),
.cmp_eq => try sema.zirCmpEq(block, inst, .eq, Air.Inst.Tag.fromCmpOp(.eq, block.float_mode == .Optimized)),
@@ -1275,6 +1264,7 @@ fn analyzeBodyInner(
.work_group_size => try sema.zirWorkItem( block, extended, extended.opcode),
.work_group_id => try sema.zirWorkItem( block, extended, extended.opcode),
.in_comptime => try sema.zirInComptime( block),
+ .closure_get => try sema.zirClosureGet( block, extended),
// zig fmt: on
.fence => {
@@ -1453,11 +1443,6 @@ fn analyzeBodyInner(
i += 1;
continue;
},
- .closure_capture => {
- try sema.zirClosureCapture(block, inst);
- i += 1;
- continue;
- },
.memcpy => {
try sema.zirMemcpy(block, inst);
i += 1;
@@ -1534,11 +1519,6 @@ fn analyzeBodyInner(
// Send comptime control flow back to the beginning of this block.
const src = LazySrcLoc.nodeOffset(datas[@intFromEnum(inst)].node);
try sema.emitBackwardBranch(block, src);
-
- // We need to construct new capture scopes for the next loop iteration so it
- // can capture values without clobbering the earlier iteration's captures.
- block.wip_capture_scope = try mod.createCaptureScope(parent_capture_scope);
-
i = 0;
continue;
} else {
@@ -1552,11 +1532,6 @@ fn analyzeBodyInner(
// Send comptime control flow back to the beginning of this block.
const src = LazySrcLoc.nodeOffset(datas[@intFromEnum(inst)].node);
try sema.emitBackwardBranch(block, src);
-
- // We need to construct new capture scopes for the next loop iteration so it
- // can capture values without clobbering the earlier iteration's captures.
- block.wip_capture_scope = try mod.createCaptureScope(parent_capture_scope);
-
i = 0;
continue;
},
@@ -1855,10 +1830,6 @@ fn analyzeBodyInner(
map.putAssumeCapacity(inst, air_inst);
i += 1;
}
-
- // We may have overwritten the capture scope due to a `repeat` instruction where
- // the body had a capture; restore it now.
- block.wip_capture_scope = parent_capture_scope;
}
pub fn resolveInstAllowNone(sema: *Sema, zir_ref: Zir.Inst.Ref) !Air.Inst.Ref {
@@ -2698,10 +2669,41 @@ fn analyzeAsInt(
return (try val.getUnsignedIntAdvanced(mod, sema)).?;
}
+/// Given a ZIR extra index which points to a list of `Zir.Inst.Capture`,
+/// resolves this into a list of `Namespace.CaptureValue` allocated by `gpa`.
+/// Caller owns returned memory.
+fn getCaptures(sema: *Sema, parent_namespace: ?InternPool.NamespaceIndex, extra_index: usize, captures_len: u32) ![]Namespace.CaptureValue {
+ const gpa = sema.gpa;
+ const parent_captures: []const Namespace.CaptureValue = if (parent_namespace) |p| parent: {
+ break :parent sema.mod.namespacePtr(p).captures;
+ } else &.{};
+
+ const captures = try gpa.alloc(Namespace.CaptureValue, captures_len);
+ errdefer gpa.free(captures);
+
+ for (sema.code.extra[extra_index..][0..captures_len], captures) |raw, *capture| {
+ const zir_capture: Zir.Inst.Capture = @enumFromInt(raw);
+ capture.* = switch (zir_capture.unwrap()) {
+ .inst => |inst| Namespace.CaptureValue.wrap(capture: {
+ const air_ref = try sema.resolveInst(inst.toRef());
+ if (try sema.resolveValue(air_ref)) |val| {
+ break :capture .{ .@"comptime" = val.toIntern() };
+ }
+ break :capture .{ .runtime = sema.typeOf(air_ref).toIntern() };
+ }),
+ .nested => |parent_idx| parent_captures[parent_idx],
+ };
+ }
+
+ return captures;
+}
+
pub fn getStructType(
sema: *Sema,
decl: InternPool.DeclIndex,
namespace: InternPool.NamespaceIndex,
+ /// The direct parent Namespace for resolving nested capture values.
+ parent_namespace: ?InternPool.NamespaceIndex,
tracked_inst: InternPool.TrackedInst.Index,
) !InternPool.Index {
const mod = sema.mod;
@@ -2713,6 +2715,11 @@ pub fn getStructType(
const small: Zir.Inst.StructDecl.Small = @bitCast(extended.small);
var extra_index: usize = extended.operand + @typeInfo(Zir.Inst.StructDecl).Struct.fields.len;
+ const captures_len = if (small.has_captures_len) blk: {
+ const captures_len = sema.code.extra[extra_index];
+ extra_index += 1;
+ break :blk captures_len;
+ } else 0;
const fields_len = if (small.has_fields_len) blk: {
const fields_len = sema.code.extra[extra_index];
extra_index += 1;
@@ -2724,6 +2731,9 @@ pub fn getStructType(
break :blk decls_len;
} else 0;
+ mod.namespacePtr(namespace).captures = try sema.getCaptures(parent_namespace, extra_index, captures_len);
+ extra_index += captures_len;
+
if (small.has_backing_int) {
const backing_int_body_len = sema.code.extra[extra_index];
extra_index += 1; // backing_int_body_len
@@ -2791,12 +2801,13 @@ fn zirStructDecl(
.parent = block.namespace.toOptional(),
.decl_index = new_decl_index,
.file_scope = block.getFileScope(mod),
+ .captures = &.{}, // Will be set by `getStructType`
});
errdefer mod.destroyNamespace(new_namespace_index);
const struct_ty = ty: {
const tracked_inst = try ip.trackZir(mod.gpa, block.getFileScope(mod), inst);
- const ty = try sema.getStructType(new_decl_index, new_namespace_index, tracked_inst);
+ const ty = try sema.getStructType(new_decl_index, new_namespace_index, block.namespace, tracked_inst);
if (sema.builtin_type_target_index != .none) {
ip.resolveBuiltinType(sema.builtin_type_target_index, ty);
break :ty sema.builtin_type_target_index;
@@ -2827,10 +2838,9 @@ fn createAnonymousDeclTypeNamed(
const ip = &mod.intern_pool;
const gpa = sema.gpa;
const namespace = block.namespace;
- const src_scope = block.wip_capture_scope;
const src_decl = mod.declPtr(block.src_decl);
const src_node = src_decl.relativeToNodeIndex(src.node_offset.x);
- const new_decl_index = try mod.allocateNewDecl(namespace, src_node, src_scope);
+ const new_decl_index = try mod.allocateNewDecl(namespace, src_node);
errdefer mod.destroyDecl(new_decl_index);
switch (name_strategy) {
@@ -2935,6 +2945,12 @@ fn zirEnumDecl(
break :blk tag_type_ref;
} else .none;
+ const captures_len = if (small.has_captures_len) blk: {
+ const captures_len = sema.code.extra[extra_index];
+ extra_index += 1;
+ break :blk captures_len;
+ } else 0;
+
const body_len = if (small.has_body_len) blk: {
const body_len = sema.code.extra[extra_index];
extra_index += 1;
@@ -2974,10 +2990,14 @@ fn zirEnumDecl(
);
}
+ const captures = try sema.getCaptures(block.namespace, extra_index, captures_len);
+ extra_index += captures_len;
+
const new_namespace_index = try mod.createNamespace(.{
.parent = block.namespace.toOptional(),
.decl_index = new_decl_index,
.file_scope = block.getFileScope(mod),
+ .captures = captures,
});
errdefer if (!done) mod.destroyNamespace(new_namespace_index);
@@ -3054,7 +3074,6 @@ fn zirEnumDecl(
.sema = sema,
.src_decl = new_decl_index,
.namespace = new_namespace_index,
- .wip_capture_scope = try mod.createCaptureScope(new_decl.src_scope),
.instructions = .{},
.inlining = null,
.is_comptime = true,
@@ -3197,6 +3216,11 @@ fn zirUnionDecl(
const src = extra.data.src();
extra_index += @intFromBool(small.has_tag_type);
+ const captures_len = if (small.has_captures_len) blk: {
+ const captures_len = sema.code.extra[extra_index];
+ extra_index += 1;
+ break :blk captures_len;
+ } else 0;
extra_index += @intFromBool(small.has_body_len);
const fields_len = if (small.has_fields_len) blk: {
const fields_len = sema.code.extra[extra_index];
@@ -3230,10 +3254,14 @@ fn zirUnionDecl(
);
}
+ const captures = try sema.getCaptures(block.namespace, extra_index, captures_len);
+ extra_index += captures_len;
+
const new_namespace_index = try mod.createNamespace(.{
.parent = block.namespace.toOptional(),
.decl_index = new_decl_index,
.file_scope = block.getFileScope(mod),
+ .captures = captures,
});
errdefer mod.destroyNamespace(new_namespace_index);
@@ -3300,6 +3328,12 @@ fn zirOpaqueDecl(
const src = extra.data.src();
+ const captures_len = if (small.has_captures_len) blk: {
+ const captures_len = sema.code.extra[extra_index];
+ extra_index += 1;
+ break :blk captures_len;
+ } else 0;
+
const decls_len = if (small.has_decls_len) blk: {
const decls_len = sema.code.extra[extra_index];
extra_index += 1;
@@ -3326,10 +3360,14 @@ fn zirOpaqueDecl(
);
}
+ const captures = try sema.getCaptures(block.namespace, extra_index, captures_len);
+ extra_index += captures_len;
+
const new_namespace_index = try mod.createNamespace(.{
.parent = block.namespace.toOptional(),
.decl_index = new_decl_index,
.file_scope = block.getFileScope(mod),
+ .captures = captures,
});
errdefer mod.destroyNamespace(new_namespace_index);
@@ -5780,7 +5818,6 @@ fn zirCImport(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileEr
.sema = sema,
.src_decl = parent_block.src_decl,
.namespace = parent_block.namespace,
- .wip_capture_scope = parent_block.wip_capture_scope,
.instructions = .{},
.inlining = parent_block.inlining,
.is_comptime = true,
@@ -5900,7 +5937,6 @@ fn zirBlock(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index, force_compt
.sema = sema,
.src_decl = parent_block.src_decl,
.namespace = parent_block.namespace,
- .wip_capture_scope = parent_block.wip_capture_scope,
.instructions = .{},
.label = &label,
.inlining = parent_block.inlining,
@@ -7515,7 +7551,6 @@ fn analyzeCall(
.sema = sema,
.src_decl = module_fn.owner_decl,
.namespace = fn_owner_decl.src_namespace,
- .wip_capture_scope = try mod.createCaptureScope(fn_owner_decl.src_scope),
.instructions = .{},
.label = null,
.inlining = &inlining,
@@ -8036,7 +8071,6 @@ fn instantiateGenericCall(
.sema = &child_sema,
.src_decl = generic_owner_func.owner_decl,
.namespace = namespace_index,
- .wip_capture_scope = try mod.createCaptureScope(fn_owner_decl.src_scope),
.instructions = .{},
.inlining = null,
.is_comptime = true,
@@ -11409,7 +11443,6 @@ fn zirSwitchBlockErrUnion(sema: *Sema, block: *Block, inst: Zir.Inst.Index) Comp
.sema = sema,
.src_decl = block.src_decl,
.namespace = block.namespace,
- .wip_capture_scope = block.wip_capture_scope,
.instructions = .{},
.label = &label,
.inlining = block.inlining,
@@ -12117,7 +12150,6 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index, operand_is_r
.sema = sema,
.src_decl = block.src_decl,
.namespace = block.namespace,
- .wip_capture_scope = block.wip_capture_scope,
.instructions = .{},
.label = &label,
.inlining = block.inlining,
@@ -12281,7 +12313,6 @@ fn analyzeSwitchRuntimeBlock(
extra_index += info.body_len;
case_block.instructions.shrinkRetainingCapacity(0);
- case_block.wip_capture_scope = try mod.createCaptureScope(child_block.wip_capture_scope);
const item = case_vals.items[scalar_i];
// `item` is already guaranteed to be constant known.
@@ -12339,7 +12370,6 @@ fn analyzeSwitchRuntimeBlock(
case_val_idx += items_len;
case_block.instructions.shrinkRetainingCapacity(0);
- case_block.wip_capture_scope = child_block.wip_capture_scope;
// Generate all possible cases as scalar prongs.
if (info.is_inline) {
@@ -12371,7 +12401,6 @@ fn analyzeSwitchRuntimeBlock(
const item_ref = Air.internedToRef(item.toIntern());
case_block.instructions.shrinkRetainingCapacity(0);
- case_block.wip_capture_scope = child_block.wip_capture_scope;
if (emit_bb) sema.emitBackwardBranch(block, .unneeded) catch |err| switch (err) {
error.NeededSourceLocation => {
@@ -12411,7 +12440,6 @@ fn analyzeSwitchRuntimeBlock(
cases_len += 1;
case_block.instructions.shrinkRetainingCapacity(0);
- case_block.wip_capture_scope = child_block.wip_capture_scope;
const analyze_body = if (union_originally) blk: {
const item_val = sema.resolveConstDefinedValue(block, .unneeded, item, undefined) catch unreachable;
@@ -12557,7 +12585,6 @@ fn analyzeSwitchRuntimeBlock(
defer gpa.free(cond_body);
case_block.instructions.shrinkRetainingCapacity(0);
- case_block.wip_capture_scope = try mod.createCaptureScope(child_block.wip_capture_scope);
const body = sema.code.bodySlice(extra_index, info.body_len);
extra_index += info.body_len;
@@ -12618,7 +12645,6 @@ fn analyzeSwitchRuntimeBlock(
const item_ref = Air.internedToRef(item_val.toIntern());
case_block.instructions.shrinkRetainingCapacity(0);
- case_block.wip_capture_scope = child_block.wip_capture_scope;
const analyze_body = if (union_originally) blk: {
const field_ty = maybe_union_ty.unionFieldType(item_val, mod).?;
@@ -12669,7 +12695,6 @@ fn analyzeSwitchRuntimeBlock(
const item_ref = Air.internedToRef(item_val);
case_block.instructions.shrinkRetainingCapacity(0);
- case_block.wip_capture_scope = child_block.wip_capture_scope;
if (emit_bb) try sema.emitBackwardBranch(block, special_prong_src);
emit_bb = true;
@@ -12700,7 +12725,6 @@ fn analyzeSwitchRuntimeBlock(
const item_ref = Air.internedToRef(cur);
case_block.instructions.shrinkRetainingCapacity(0);
- case_block.wip_capture_scope = child_block.wip_capture_scope;
if (emit_bb) try sema.emitBackwardBranch(block, special_prong_src);
emit_bb = true;
@@ -12728,7 +12752,6 @@ fn analyzeSwitchRuntimeBlock(
cases_len += 1;
case_block.instructions.shrinkRetainingCapacity(0);
- case_block.wip_capture_scope = child_block.wip_capture_scope;
if (emit_bb) try sema.emitBackwardBranch(block, special_prong_src);
emit_bb = true;
@@ -12754,7 +12777,6 @@ fn analyzeSwitchRuntimeBlock(
cases_len += 1;
case_block.instructions.shrinkRetainingCapacity(0);
- case_block.wip_capture_scope = child_block.wip_capture_scope;
if (emit_bb) try sema.emitBackwardBranch(block, special_prong_src);
emit_bb = true;
@@ -12783,7 +12805,6 @@ fn analyzeSwitchRuntimeBlock(
};
case_block.instructions.shrinkRetainingCapacity(0);
- case_block.wip_capture_scope = try mod.createCaptureScope(child_block.wip_capture_scope);
if (mod.backendSupportsFeature(.is_named_enum_value) and
special.body.len != 0 and block.wantSafety() and
@@ -17264,49 +17285,16 @@ fn zirThis(
return sema.analyzeDeclVal(block, src, this_decl_index);
}
-fn zirClosureCapture(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void {
+fn zirClosureGet(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) CompileError!Air.Inst.Ref {
const mod = sema.mod;
- const gpa = sema.gpa;
- const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].un_tok;
- // Closures are not necessarily constant values. For example, the
- // code might do something like this:
- // fn foo(x: anytype) void { const S = struct {field: @TypeOf(x)}; }
- // ...in which case the closure_capture instruction has access to a runtime
- // value only. In such case only the type is saved into the scope.
- const operand = try sema.resolveInst(inst_data.operand);
- const ty = sema.typeOf(operand);
- const key: CaptureScope.Key = .{
- .zir_index = inst,
- .index = block.wip_capture_scope,
- };
- if (try sema.resolveValue(operand)) |val| {
- try mod.comptime_capture_scopes.put(gpa, key, try val.intern(ty, mod));
- } else {
- try mod.runtime_capture_scopes.put(gpa, key, ty.toIntern());
- }
-}
+ const captures = mod.namespacePtr(block.namespace).captures;
-fn zirClosureGet(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Inst.Ref {
- const mod = sema.mod;
- //const ip = &mod.intern_pool;
- const inst_data = sema.code.instructions.items(.data)[@intFromEnum(inst)].inst_node;
- var scope: CaptureScope.Index = mod.declPtr(block.src_decl).src_scope;
- assert(scope != .none);
- // Note: The target closure must be in this scope list.
- // If it's not here, the zir is invalid, or the list is broken.
- const capture_ty = while (true) {
- // Note: We don't need to add a dependency here, because
- // decls always depend on their lexical parents.
- const key: CaptureScope.Key = .{
- .zir_index = inst_data.inst,
- .index = scope,
- };
- if (mod.comptime_capture_scopes.get(key)) |val|
- return Air.internedToRef(val);
- if (mod.runtime_capture_scopes.get(key)) |ty|
- break ty;
- scope = scope.parent(mod);
- assert(scope != .none);
+ const src_node: i32 = @bitCast(extended.operand);
+ const src = LazySrcLoc.nodeOffset(src_node);
+
+ const capture_ty = switch (captures[extended.small].unwrap()) {
+ .@"comptime" => |index| return Air.internedToRef(index),
+ .runtime => |index| index,
};
// The comptime case is handled already above. Runtime case below.
@@ -17322,15 +17310,15 @@ fn zirClosureGet(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!
});
break :name null;
};
- const node = sema.owner_decl.relativeToNodeIndex(inst_data.src_node);
+ const node = sema.owner_decl.relativeToNodeIndex(src_node);
const token = tree.nodes.items(.main_token)[node];
break :name tree.tokenSlice(token);
};
const msg = if (name) |some|
- try sema.errMsg(block, inst_data.src(), "'{s}' not accessible outside function scope", .{some})
+ try sema.errMsg(block, src, "'{s}' not accessible outside function scope", .{some})
else
- try sema.errMsg(block, inst_data.src(), "variable not accessible outside function scope", .{});
+ try sema.errMsg(block, src, "variable not accessible outside function scope", .{});
errdefer msg.destroy(sema.gpa);
// TODO add "declared here" note
@@ -17350,15 +17338,15 @@ fn zirClosureGet(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!
});
break :name null;
};
- const node = sema.owner_decl.relativeToNodeIndex(inst_data.src_node);
+ const node = sema.owner_decl.relativeToNodeIndex(src_node);
const token = tree.nodes.items(.main_token)[node];
break :name tree.tokenSlice(token);
};
const msg = if (name) |some|
- try sema.errMsg(block, inst_data.src(), "'{s}' not accessible from inner function", .{some})
+ try sema.errMsg(block, src, "'{s}' not accessible from inner function", .{some})
else
- try sema.errMsg(block, inst_data.src(), "variable not accessible from inner function", .{});
+ try sema.errMsg(block, src, "variable not accessible from inner function", .{});
errdefer msg.destroy(sema.gpa);
try sema.errNote(block, LazySrcLoc.nodeOffset(0), msg, "crossed function definition here", .{});
@@ -18631,7 +18619,6 @@ fn zirTypeofBuiltin(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErr
.sema = sema,
.src_decl = block.src_decl,
.namespace = block.namespace,
- .wip_capture_scope = block.wip_capture_scope,
.instructions = .{},
.inlining = block.inlining,
.is_comptime = false,
@@ -18710,7 +18697,6 @@ fn zirTypeofPeer(
.sema = sema,
.src_decl = block.src_decl,
.namespace = block.namespace,
- .wip_capture_scope = block.wip_capture_scope,
.instructions = .{},
.inlining = block.inlining,
.is_comptime = false,
@@ -19186,7 +19172,6 @@ fn ensurePostHoc(sema: *Sema, block: *Block, dest_block: Zir.Inst.Index) !*Label
.sema = sema,
.src_decl = block.src_decl,
.namespace = block.namespace,
- .wip_capture_scope = block.wip_capture_scope,
.instructions = .{},
.label = &labeled_block.label,
.inlining = block.inlining,
@@ -21462,6 +21447,7 @@ fn zirReify(
.parent = block.namespace.toOptional(),
.decl_index = new_decl_index,
.file_scope = block.getFileScope(mod),
+ .captures = &.{},
});
errdefer mod.destroyNamespace(new_namespace_index);
@@ -21670,6 +21656,7 @@ fn zirReify(
.parent = block.namespace.toOptional(),
.decl_index = new_decl_index,
.file_scope = block.getFileScope(mod),
+ .captures = &.{},
});
errdefer mod.destroyNamespace(new_namespace_index);
@@ -25919,7 +25906,7 @@ fn zirBuiltinExtern(
// TODO check duplicate extern
- const new_decl_index = try mod.allocateNewDecl(sema.owner_decl.src_namespace, sema.owner_decl.src_node, .none);
+ const new_decl_index = try mod.allocateNewDecl(sema.owner_decl.src_namespace, sema.owner_decl.src_node);
errdefer mod.destroyDecl(new_decl_index);
const new_decl = mod.declPtr(new_decl_index);
new_decl.name = options.name;
@@ -26515,7 +26502,6 @@ fn addSafetyCheck(
.sema = sema,
.src_decl = parent_block.src_decl,
.namespace = parent_block.namespace,
- .wip_capture_scope = parent_block.wip_capture_scope,
.instructions = .{},
.inlining = parent_block.inlining,
.is_comptime = false,
@@ -26624,7 +26610,6 @@ fn panicUnwrapError(
.sema = sema,
.src_decl = parent_block.src_decl,
.namespace = parent_block.namespace,
- .wip_capture_scope = parent_block.wip_capture_scope,
.instructions = .{},
.inlining = parent_block.inlining,
.is_comptime = false,
@@ -26741,7 +26726,6 @@ fn safetyCheckFormatted(
.sema = sema,
.src_decl = parent_block.src_decl,
.namespace = parent_block.namespace,
- .wip_capture_scope = parent_block.wip_capture_scope,
.instructions = .{},
.inlining = parent_block.inlining,
.is_comptime = false,
@@ -35766,7 +35750,6 @@ fn semaBackingIntType(mod: *Module, struct_type: InternPool.Key.StructType) Comp
.sema = &sema,
.src_decl = decl_index,
.namespace = struct_type.namespace.unwrap() orelse decl.src_namespace,
- .wip_capture_scope = try mod.createCaptureScope(decl.src_scope),
.instructions = .{},
.inlining = null,
.is_comptime = true,
@@ -35789,9 +35772,16 @@ fn semaBackingIntType(mod: *Module, struct_type: InternPool.Key.StructType) Comp
if (small.has_backing_int) {
var extra_index: usize = extended.operand + @typeInfo(Zir.Inst.StructDecl).Struct.fields.len;
+ const captures_len = if (small.has_captures_len) blk: {
+ const captures_len = zir.extra[extra_index];
+ extra_index += 1;
+ break :blk captures_len;
+ } else 0;
extra_index += @intFromBool(small.has_fields_len);
extra_index += @intFromBool(small.has_decls_len);
+ extra_index += captures_len;
+
const backing_int_body_len = zir.extra[extra_index];
extra_index += 1;
@@ -36500,6 +36490,12 @@ fn structZirInfo(zir: Zir, zir_index: Zir.Inst.Index) struct {
const small: Zir.Inst.StructDecl.Small = @bitCast(extended.small);
var extra_index: usize = extended.operand + @typeInfo(Zir.Inst.StructDecl).Struct.fields.len;
+ const captures_len = if (small.has_captures_len) blk: {
+ const captures_len = zir.extra[extra_index];
+ extra_index += 1;
+ break :blk captures_len;
+ } else 0;
+
const fields_len = if (small.has_fields_len) blk: {
const fields_len = zir.extra[extra_index];
extra_index += 1;
@@ -36512,6 +36508,8 @@ fn structZirInfo(zir: Zir, zir_index: Zir.Inst.Index) struct {
break :decls_len decls_len;
} else 0;
+ extra_index += captures_len;
+
// The backing integer cannot be handled until `resolveStructLayout()`.
if (small.has_backing_int) {
const backing_int_body_len = zir.extra[extra_index];
@@ -36584,7 +36582,6 @@ fn semaStructFields(
.sema = &sema,
.src_decl = decl_index,
.namespace = namespace_index,
- .wip_capture_scope = try mod.createCaptureScope(decl.src_scope),
.instructions = .{},
.inlining = null,
.is_comptime = true,
@@ -36842,7 +36839,6 @@ fn semaStructFieldInits(
.sema = &sema,
.src_decl = decl_index,
.namespace = namespace_index,
- .wip_capture_scope = try mod.createCaptureScope(decl.src_scope),
.instructions = .{},
.inlining = null,
.is_comptime = true,
@@ -36974,6 +36970,12 @@ fn semaUnionFields(mod: *Module, arena: Allocator, union_type: InternPool.Key.Un
break :blk ty_ref;
} else .none;
+ const captures_len = if (small.has_captures_len) blk: {
+ const captures_len = zir.extra[extra_index];
+ extra_index += 1;
+ break :blk captures_len;
+ } else 0;
+
const body_len = if (small.has_body_len) blk: {
const body_len = zir.extra[extra_index];
extra_index += 1;
@@ -36992,8 +36994,8 @@ fn semaUnionFields(mod: *Module, arena: Allocator, union_type: InternPool.Key.Un
break :decls_len decls_len;
} else 0;
- // Skip over decls.
- extra_index += decls_len;
+ // Skip over captures and decls.
+ extra_index += captures_len + decls_len;
const body = zir.bodySlice(extra_index, body_len);
extra_index += body.len;
@@ -37028,7 +37030,6 @@ fn semaUnionFields(mod: *Module, arena: Allocator, union_type: InternPool.Key.Un
.sema = &sema,
.src_decl = decl_index,
.namespace = union_type.namespace,
- .wip_capture_scope = try mod.createCaptureScope(decl.src_scope),
.instructions = .{},
.inlining = null,
.is_comptime = true,
@@ -37372,7 +37373,7 @@ fn generateUnionTagTypeNumbered(
const ip = &mod.intern_pool;
const src_decl = mod.declPtr(block.src_decl);
- const new_decl_index = try mod.allocateNewDecl(block.namespace, src_decl.src_node, block.wip_capture_scope);
+ const new_decl_index = try mod.allocateNewDecl(block.namespace, src_decl.src_node);
errdefer mod.destroyDecl(new_decl_index);
const fqn = try decl.fullyQualifiedName(mod);
const name = try ip.getOrPutStringFmt(gpa, "@typeInfo({}).Union.tag_type.?", .{fqn.fmt(ip)});
@@ -37425,7 +37426,7 @@ fn generateUnionTagTypeSimple(
};
const fqn = try mod.declPtr(decl_index).fullyQualifiedName(mod);
const src_decl = mod.declPtr(block.src_decl);
- const new_decl_index = try mod.allocateNewDecl(block.namespace, src_decl.src_node, block.wip_capture_scope);
+ const new_decl_index = try mod.allocateNewDecl(block.namespace, src_decl.src_node);
errdefer mod.destroyDecl(new_decl_index);
const name = try ip.getOrPutStringFmt(gpa, "@typeInfo({}).Union.tag_type.?", .{fqn.fmt(ip)});
try mod.initNewAnonDecl(new_decl_index, src_decl.src_line, .{
@@ -37460,7 +37461,6 @@ fn generateUnionTagTypeSimple(
}
fn getBuiltin(sema: *Sema, name: []const u8) CompileError!Air.Inst.Ref {
- const mod = sema.mod;
const gpa = sema.gpa;
const src = LazySrcLoc.nodeOffset(0);
@@ -37469,7 +37469,6 @@ fn getBuiltin(sema: *Sema, name: []const u8) CompileError!Air.Inst.Ref {
.sema = sema,
.src_decl = sema.owner_decl_index,
.namespace = sema.owner_decl.src_namespace,
- .wip_capture_scope = try mod.createCaptureScope(sema.owner_decl.src_scope),
.instructions = .{},
.inlining = null,
.is_comptime = true,
@@ -37510,7 +37509,6 @@ fn getBuiltinDecl(sema: *Sema, block: *Block, name: []const u8) CompileError!Int
}
fn getBuiltinType(sema: *Sema, name: []const u8) CompileError!Type {
- const mod = sema.mod;
const ty_inst = try sema.getBuiltin(name);
var block: Block = .{
@@ -37518,7 +37516,6 @@ fn getBuiltinType(sema: *Sema, name: []const u8) CompileError!Type {
.sema = sema,
.src_decl = sema.owner_decl_index,
.namespace = sema.owner_decl.src_namespace,
- .wip_capture_scope = try mod.createCaptureScope(sema.owner_decl.src_scope),
.instructions = .{},
.inlining = null,
.is_comptime = true,