Commit 064b355912
Changed files (4)
src
src/codegen/c/type.zig
@@ -110,10 +110,16 @@ pub const CType = extern union {
pointer_const_volatile,
array,
vector,
+ fwd_anon_struct,
+ fwd_anon_union,
fwd_struct,
fwd_union,
+ unnamed_struct,
+ unnamed_union,
+ packed_unnamed_struct,
+ packed_unnamed_union,
anon_struct,
- packed_anon_struct,
+ anon_union,
@"struct",
@"union",
packed_struct,
@@ -183,14 +189,22 @@ pub const CType = extern union {
.vector,
=> Payload.Sequence,
+ .fwd_anon_struct,
+ .fwd_anon_union,
+ => Payload.Fields,
+
.fwd_struct,
.fwd_union,
=> Payload.FwdDecl,
- .anon_struct,
- .packed_anon_struct,
- => Payload.Fields,
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ => Payload.Unnamed,
+ .anon_struct,
+ .anon_union,
.@"struct",
.@"union",
.packed_struct,
@@ -229,14 +243,55 @@ pub const CType = extern union {
base: Payload,
data: Data,
- const Data = []const Field;
- const Field = struct {
+ pub const Data = []const Field;
+ pub const Field = struct {
name: [*:0]const u8,
type: Index,
- alignas: u32,
+ alignas: AlignAs,
+ };
+ pub const AlignAs = struct {
+ @"align": std.math.Log2Int(u32),
+ abi: std.math.Log2Int(u32),
+
+ pub fn init(alignment: u32, abi_alignment: u32) AlignAs {
+ assert(std.math.isPowerOfTwo(alignment));
+ assert(std.math.isPowerOfTwo(abi_alignment));
+ return .{
+ .@"align" = std.math.log2_int(u32, alignment),
+ .abi = std.math.log2_int(u32, abi_alignment),
+ };
+ }
+ pub fn abiAlign(ty: Type, target: Target) AlignAs {
+ const abi_align = ty.abiAlignment(target);
+ return init(abi_align, abi_align);
+ }
+ pub fn fieldAlign(struct_ty: Type, field_i: usize, target: Target) AlignAs {
+ return init(
+ struct_ty.structFieldAlign(field_i, target),
+ struct_ty.structFieldType(field_i).abiAlignment(target),
+ );
+ }
+ pub fn unionPayloadAlign(union_ty: Type, target: Target) AlignAs {
+ const union_obj = union_ty.cast(Type.Payload.Union).?.data;
+ const union_payload_align = union_obj.abiAlignment(target, false);
+ return init(union_payload_align, union_payload_align);
+ }
+
+ pub fn getAlign(self: AlignAs) u32 {
+ return @as(u32, 1) << self.@"align";
+ }
};
};
+ pub const Unnamed = struct {
+ base: Payload,
+ data: struct {
+ fields: Fields.Data,
+ owner_decl: Module.Decl.Index,
+ id: u32,
+ },
+ };
+
pub const Aggregate = struct {
base: Payload,
data: struct {
@@ -259,22 +314,23 @@ pub const CType = extern union {
arena: std.heap.ArenaAllocator.State = .{},
set: Set = .{},
- const Set = struct {
- const Map = std.ArrayHashMapUnmanaged(CType, void, HashContext32, true);
+ pub const Set = struct {
+ pub const Map = std.ArrayHashMapUnmanaged(CType, void, HashContext32, true);
map: Map = .{},
- fn indexToCType(self: Set, index: Index) CType {
+ pub fn indexToCType(self: Set, index: Index) CType {
if (index < Tag.no_payload_count) return initTag(@intToEnum(Tag, index));
return self.map.keys()[index - Tag.no_payload_count];
}
- fn indexToHash(self: Set, index: Index) Map.Hash {
- if (index < Tag.no_payload_count) return self.indexToCType(index).hash(self);
+ pub fn indexToHash(self: Set, index: Index) Map.Hash {
+ if (index < Tag.no_payload_count)
+ return (HashContext32{ .store = &self }).hash(self.indexToCType(index));
return self.map.entries.items(.hash)[index - Tag.no_payload_count];
}
- fn typeToIndex(self: Set, ty: Type, target: Target, kind: Kind) ?Index {
+ pub fn typeToIndex(self: Set, ty: Type, target: Target, kind: Kind) ?Index {
const lookup = Convert.Lookup{ .imm = .{ .set = &self, .target = target } };
var convert: Convert = undefined;
@@ -298,21 +354,27 @@ pub const CType = extern union {
return self.arena.child_allocator;
}
- fn cTypeToIndex(self: *Promoted, cty: CType) Allocator.Error!Index {
+ pub fn cTypeToIndex(self: *Promoted, cty: CType) Allocator.Error!Index {
const t = cty.tag();
if (@enumToInt(t) < Tag.no_payload_count) return @intCast(Index, @enumToInt(t));
const gop = try self.set.map.getOrPutContext(self.gpa(), cty, .{ .store = &self.set });
if (!gop.found_existing) gop.key_ptr.* = cty;
if (std.debug.runtime_safety) {
- const key = self.set.map.entries.items(.key)[gop.index];
- assert(key.eql(cty));
+ const key = &self.set.map.entries.items(.key)[gop.index];
+ assert(key == gop.key_ptr);
+ assert(cty.eql(key.*));
assert(cty.hash(self.set) == key.hash(self.set));
}
return @intCast(Index, Tag.no_payload_count + gop.index);
}
- fn typeToIndex(self: *Promoted, ty: Type, mod: *Module, kind: Kind) Allocator.Error!Index {
+ pub fn typeToIndex(
+ self: *Promoted,
+ ty: Type,
+ mod: *Module,
+ kind: Kind,
+ ) Allocator.Error!Index {
const lookup = Convert.Lookup{ .mut = .{ .promoted = self, .mod = mod } };
var convert: Convert = undefined;
@@ -337,9 +399,10 @@ pub const CType = extern union {
.lookup = lookup.freeze(),
.convert = &convert,
};
- const key = self.set.map.entries.items(.key)[gop.index];
- assert(adapter.eql(ty, key));
- assert(adapter.hash(ty) == key.hash(self.set));
+ const cty = &self.set.map.entries.items(.key)[gop.index];
+ assert(cty == gop.key_ptr);
+ assert(adapter.eql(ty, cty.*));
+ assert(adapter.hash(ty) == cty.hash(self.set));
}
return @intCast(Index, Tag.no_payload_count + gop.index);
}
@@ -358,21 +421,25 @@ pub const CType = extern union {
return self.set.indexToCType(index);
}
+ pub fn indexToHash(self: Store, index: Index) Set.Map.Hash {
+ return self.set.indexToHash(index);
+ }
+
pub fn cTypeToIndex(self: *Store, gpa: Allocator, cty: CType) !Index {
var promoted = self.promote(gpa);
defer self.demote(promoted);
return promoted.cTypeToIndex(cty);
}
- pub fn typeToCType(self: *Store, gpa: Allocator, ty: Type, mod: *Module) !CType {
- const idx = try self.typeToIndex(gpa, ty, mod);
+ pub fn typeToCType(self: *Store, gpa: Allocator, ty: Type, mod: *Module, kind: Kind) !CType {
+ const idx = try self.typeToIndex(gpa, ty, mod, kind);
return self.indexToCType(idx);
}
- pub fn typeToIndex(self: *Store, gpa: Allocator, ty: Type, mod: *Module) !Index {
+ pub fn typeToIndex(self: *Store, gpa: Allocator, ty: Type, mod: *Module, kind: Kind) !Index {
var promoted = self.promote(gpa);
defer self.demote(promoted);
- return promoted.typeToIndex(ty, mod, .complete);
+ return promoted.typeToIndex(ty, mod, kind);
}
pub fn clearRetainingCapacity(self: *Store, gpa: Allocator) void {
@@ -389,8 +456,16 @@ pub const CType = extern union {
_ = promoted.arena.reset(.free_all);
}
- pub fn shrinkToFit(self: *Store, gpa: Allocator) void {
- self.set.map.shrinkAndFree(gpa, self.set.map.count());
+ pub fn shrinkRetainingCapacity(self: *Store, gpa: Allocator, new_len: usize) void {
+ self.set.map.shrinkRetainingCapacity(gpa, new_len);
+ }
+
+ pub fn shrinkAndFree(self: *Store, gpa: Allocator, new_len: usize) void {
+ self.set.map.shrinkAndFree(gpa, new_len);
+ }
+
+ pub fn count(self: Store) usize {
+ return self.set.map.count();
}
pub fn move(self: *Store) Store {
@@ -407,7 +482,37 @@ pub const CType = extern union {
}
};
+ pub fn isPacked(self: CType) bool {
+ return switch (self.tag()) {
+ else => false,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ .packed_struct,
+ .packed_union,
+ => true,
+ };
+ }
+
+ pub fn fields(self: CType) Payload.Fields.Data {
+ return if (self.cast(Payload.Aggregate)) |pl|
+ pl.data.fields
+ else if (self.cast(Payload.Unnamed)) |pl|
+ pl.data.fields
+ else if (self.cast(Payload.Fields)) |pl|
+ pl.data
+ else
+ unreachable;
+ }
+
pub fn eql(lhs: CType, rhs: CType) bool {
+ return lhs.eqlContext(rhs, struct {
+ pub fn eqlIndex(_: @This(), lhs_idx: Index, rhs_idx: Index) bool {
+ return lhs_idx == rhs_idx;
+ }
+ }{});
+ }
+
+ pub fn eqlContext(lhs: CType, rhs: CType, ctx: anytype) bool {
// As a shortcut, if the small tags / addresses match, we're done.
if (lhs.tag_if_small_enough == rhs.tag_if_small_enough) return true;
@@ -458,35 +563,52 @@ pub const CType = extern union {
.pointer_const,
.pointer_volatile,
.pointer_const_volatile,
- => lhs.cast(Payload.Child).?.data == rhs.cast(Payload.Child).?.data,
+ => ctx.eqlIndex(lhs.cast(Payload.Child).?.data, rhs.cast(Payload.Child).?.data),
.array,
.vector,
- => std.meta.eql(lhs.cast(Payload.Sequence).?.data, rhs.cast(Payload.Sequence).?.data),
-
- .fwd_struct,
- .fwd_union,
- => lhs.cast(Payload.FwdDecl).?.data == rhs.cast(Payload.FwdDecl).?.data,
+ => {
+ const lhs_data = lhs.cast(Payload.Sequence).?.data;
+ const rhs_data = rhs.cast(Payload.Sequence).?.data;
+ return lhs_data.len == rhs_data.len and
+ ctx.eqlIndex(lhs_data.elem_type, rhs_data.elem_type);
+ },
- .anon_struct,
- .packed_anon_struct,
+ .fwd_anon_struct,
+ .fwd_anon_union,
=> {
const lhs_data = lhs.cast(Payload.Fields).?.data;
const rhs_data = rhs.cast(Payload.Fields).?.data;
if (lhs_data.len != rhs_data.len) return false;
for (lhs_data, rhs_data) |lhs_field, rhs_field| {
- if (lhs_field.type != rhs_field.type) return false;
- if (lhs_field.alignas != rhs_field.alignas) return false;
+ if (!ctx.eqlIndex(lhs_field.type, rhs_field.type)) return false;
+ if (lhs_field.alignas.@"align" != rhs_field.alignas.@"align") return false;
if (cstr.cmp(lhs_field.name, rhs_field.name) != 0) return false;
}
return true;
},
+ .fwd_struct,
+ .fwd_union,
+ => lhs.cast(Payload.FwdDecl).?.data == rhs.cast(Payload.FwdDecl).?.data,
+
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ => {
+ const lhs_data = lhs.cast(Payload.Unnamed).?.data;
+ const rhs_data = rhs.cast(Payload.Unnamed).?.data;
+ return lhs_data.owner_decl == rhs_data.owner_decl and lhs_data.id == rhs_data.id;
+ },
+
+ .anon_struct,
+ .anon_union,
.@"struct",
.@"union",
.packed_struct,
.packed_union,
- => std.meta.eql(
+ => ctx.eqlIndex(
lhs.cast(Payload.Aggregate).?.data.fwd_decl,
rhs.cast(Payload.Aggregate).?.data.fwd_decl,
),
@@ -496,10 +618,10 @@ pub const CType = extern union {
=> {
const lhs_data = lhs.cast(Payload.Function).?.data;
const rhs_data = rhs.cast(Payload.Function).?.data;
- if (lhs_data.return_type != rhs_data.return_type) return false;
if (lhs_data.param_types.len != rhs_data.param_types.len) return false;
- for (lhs_data.param_types, rhs_data.param_types) |lhs_param_cty, rhs_param_cty| {
- if (lhs_param_cty != rhs_param_cty) return false;
+ if (!ctx.eqlIndex(lhs_data.return_type, rhs_data.return_type)) return false;
+ for (lhs_data.param_types, rhs_data.param_types) |lhs_param_idx, rhs_param_idx| {
+ if (!ctx.eqlIndex(lhs_param_idx, rhs_param_idx)) return false;
}
return true;
},
@@ -568,18 +690,30 @@ pub const CType = extern union {
store.indexToCType(data.elem_type).updateHasher(hasher, store);
},
+ .fwd_anon_struct,
+ .fwd_anon_union,
+ => for (self.cast(Payload.Fields).?.data) |field| {
+ store.indexToCType(field.type).updateHasher(hasher, store);
+ hasher.update(mem.span(field.name));
+ autoHash(hasher, field.alignas.@"align");
+ },
+
.fwd_struct,
.fwd_union,
=> autoHash(hasher, self.cast(Payload.FwdDecl).?.data),
- .anon_struct,
- .packed_anon_struct,
- => for (self.cast(Payload.Fields).?.data) |field| {
- store.indexToCType(field.type).updateHasher(hasher, store);
- hasher.update(mem.span(field.name));
- autoHash(hasher, field.alignas);
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ => {
+ const data = self.cast(Payload.Unnamed).?.data;
+ autoHash(hasher, data.owner_decl);
+ autoHash(hasher, data.id);
},
+ .anon_struct,
+ .anon_union,
.@"struct",
.@"union",
.packed_struct,
@@ -599,7 +733,7 @@ pub const CType = extern union {
}
}
- pub const Kind = enum { forward, complete, global, parameter };
+ pub const Kind = enum { forward, forward_parameter, complete, global, parameter, payload };
const Convert = struct {
storage: union {
@@ -609,9 +743,11 @@ pub const CType = extern union {
fwd: Payload.FwdDecl,
anon: struct {
fields: [2]Payload.Fields.Field,
- pl: Payload.Fields,
+ pl: union {
+ forward: Payload.Fields,
+ complete: Payload.Aggregate,
+ },
},
- agg: Payload.Aggregate,
},
value: union(enum) {
tag: Tag,
@@ -716,6 +852,66 @@ pub const CType = extern union {
}
};
+ fn sortFields(self: *@This(), fields_len: usize) []Payload.Fields.Field {
+ const Field = Payload.Fields.Field;
+ const slice = self.storage.anon.fields[0..fields_len];
+ std.sort.sort(Field, slice, {}, struct {
+ fn before(_: void, lhs: Field, rhs: Field) bool {
+ return lhs.alignas.@"align" > rhs.alignas.@"align";
+ }
+ }.before);
+ return slice;
+ }
+
+ fn initAnon(self: *@This(), kind: Kind, fwd_idx: Index, fields_len: usize) void {
+ switch (kind) {
+ .forward, .forward_parameter => {
+ self.storage.anon.pl = .{ .forward = .{
+ .base = .{ .tag = .fwd_anon_struct },
+ .data = self.sortFields(fields_len),
+ } };
+ self.value = .{ .cty = initPayload(&self.storage.anon.pl.forward) };
+ },
+ .complete, .parameter, .global => {
+ self.storage.anon.pl = .{ .complete = .{
+ .base = .{ .tag = .anon_struct },
+ .data = .{
+ .fields = self.sortFields(fields_len),
+ .fwd_decl = fwd_idx,
+ },
+ } };
+ self.value = .{ .cty = initPayload(&self.storage.anon.pl.complete) };
+ },
+ .payload => unreachable,
+ }
+ }
+
+ fn initArrayParameter(self: *@This(), ty: Type, kind: Kind, lookup: Lookup) !void {
+ if (switch (kind) {
+ .forward_parameter => @as(Index, undefined),
+ .parameter => try lookup.typeToIndex(ty, .forward_parameter),
+ .forward, .complete, .global, .payload => unreachable,
+ }) |fwd_idx| {
+ if (try lookup.typeToIndex(ty, switch (kind) {
+ .forward_parameter => .forward,
+ .parameter => .complete,
+ .forward, .complete, .global, .payload => unreachable,
+ })) |array_idx| {
+ self.storage = .{ .anon = undefined };
+ self.storage.anon.fields[0] = .{
+ .name = "array",
+ .type = array_idx,
+ .alignas = Payload.Fields.AlignAs.abiAlign(ty, lookup.getTarget()),
+ };
+ self.initAnon(kind, fwd_idx, 1);
+ } else self.init(switch (kind) {
+ .forward_parameter => .fwd_anon_struct,
+ .parameter => .anon_struct,
+ .forward, .complete, .global, .payload => unreachable,
+ });
+ } else self.init(.anon_struct);
+ }
+
pub fn initType(self: *@This(), ty: Type, kind: Kind, lookup: Lookup) !void {
const target = lookup.getTarget();
@@ -739,17 +935,23 @@ pub const CType = extern union {
switch (t) {
.void => unreachable,
else => self.init(t),
- .array => {
- const abi_size = ty.abiSize(target);
- const abi_align = ty.abiAlignment(target);
- self.storage = .{ .seq = .{ .base = .{ .tag = .array }, .data = .{
- .len = @divExact(abi_size, abi_align),
- .elem_type = tagFromIntInfo(
- .unsigned,
- @intCast(u16, abi_align * 8),
- ).toIndex(),
- } } };
- self.value = .{ .cty = initPayload(&self.storage.seq) };
+ .array => switch (kind) {
+ .forward, .complete, .global => {
+ const abi_size = ty.abiSize(target);
+ const abi_align = ty.abiAlignment(target);
+ self.storage = .{ .seq = .{ .base = .{ .tag = .array }, .data = .{
+ .len = @divExact(abi_size, abi_align),
+ .elem_type = tagFromIntInfo(
+ .unsigned,
+ @intCast(u16, abi_align * 8),
+ ).toIndex(),
+ } } };
+ self.value = .{ .cty = initPayload(&self.storage.seq) };
+ },
+ .forward_parameter,
+ .parameter,
+ => try self.initArrayParameter(ty, kind, lookup),
+ .payload => unreachable,
},
}
},
@@ -782,165 +984,297 @@ pub const CType = extern union {
else => unreachable,
}),
- .Pointer => switch (ty.ptrSize()) {
- .Slice => {
- var buf: Type.SlicePtrFieldTypeBuffer = undefined;
- const ptr_ty = ty.slicePtrFieldType(&buf);
- if (try lookup.typeToIndex(ptr_ty, kind)) |ptr_idx| {
- self.storage = .{ .anon = .{ .fields = .{
- .{
- .name = "ptr",
- .type = ptr_idx,
- .alignas = ptr_ty.abiAlignment(target),
+ .Pointer => {
+ const info = ty.ptrInfo().data;
+ switch (info.size) {
+ .Slice => {
+ if (switch (kind) {
+ .forward, .forward_parameter => @as(Index, undefined),
+ .complete, .parameter, .global => try lookup.typeToIndex(ty, .forward),
+ .payload => unreachable,
+ }) |fwd_idx| {
+ var buf: Type.SlicePtrFieldTypeBuffer = undefined;
+ const ptr_ty = ty.slicePtrFieldType(&buf);
+ if (try lookup.typeToIndex(ptr_ty, kind)) |ptr_idx| {
+ self.storage = .{ .anon = undefined };
+ self.storage.anon.fields[0] = .{
+ .name = "ptr",
+ .type = ptr_idx,
+ .alignas = Payload.Fields.AlignAs.abiAlign(ptr_ty, target),
+ };
+ self.storage.anon.fields[1] = .{
+ .name = "len",
+ .type = Tag.uintptr_t.toIndex(),
+ .alignas = Payload.Fields.AlignAs.abiAlign(Type.usize, target),
+ };
+ self.initAnon(kind, fwd_idx, 2);
+ } else self.init(switch (kind) {
+ .forward, .forward_parameter => .fwd_anon_struct,
+ .complete, .parameter, .global => .anon_struct,
+ .payload => unreachable,
+ });
+ } else self.init(.anon_struct);
+ },
+
+ .One, .Many, .C => {
+ const t: Tag = switch (info.@"volatile") {
+ false => switch (info.mutable) {
+ true => .pointer,
+ false => .pointer_const,
},
- .{
- .name = "len",
- .type = Tag.size_t.toIndex(),
- .alignas = Type.usize.abiAlignment(target),
+ true => switch (info.mutable) {
+ true => .pointer_volatile,
+ false => .pointer_const_volatile,
},
- }, .pl = undefined } };
- self.storage.anon.pl = .{
- .base = .{ .tag = .anon_struct },
- .data = self.storage.anon.fields[0..2],
};
- self.value = .{ .cty = initPayload(&self.storage.anon.pl) };
- } else self.init(.anon_struct);
- },
- .One, .Many, .C => {
- const t: Tag = switch (ty.isVolatilePtr()) {
- false => switch (ty.isConstPtr()) {
- false => .pointer,
- true => .pointer_const,
- },
- true => switch (ty.isConstPtr()) {
- false => .pointer_volatile,
- true => .pointer_const_volatile,
- },
- };
- if (try lookup.typeToIndex(ty.childType(), .forward)) |child_idx| {
- self.storage = .{ .child = .{ .base = .{ .tag = t }, .data = child_idx } };
- self.value = .{ .cty = initPayload(&self.storage.child) };
- } else self.init(t);
- },
+ var host_int_pl = Type.Payload.Bits{
+ .base = .{ .tag = .int_unsigned },
+ .data = info.host_size * 8,
+ };
+ const pointee_ty = if (info.host_size > 0)
+ Type.initPayload(&host_int_pl.base)
+ else
+ info.pointee_type;
+
+ if (if (info.size == .C and pointee_ty.tag() == .u8)
+ Tag.char.toIndex()
+ else
+ try lookup.typeToIndex(pointee_ty, .forward)) |child_idx|
+ {
+ self.storage = .{ .child = .{
+ .base = .{ .tag = t },
+ .data = child_idx,
+ } };
+ self.value = .{ .cty = initPayload(&self.storage.child) };
+ } else self.init(t);
+ },
+ }
},
- .Struct, .Union => |zig_tag| if (ty.isTupleOrAnonStruct()) {
+ .Struct, .Union => |zig_tag| if (ty.containerLayout() == .Packed) {
+ if (ty.castTag(.@"struct")) |struct_obj| {
+ try self.initType(struct_obj.data.backing_int_ty, kind, lookup);
+ } else {
+ var buf: Type.Payload.Bits = .{
+ .base = .{ .tag = .int_unsigned },
+ .data = @intCast(u16, ty.bitSize(target)),
+ };
+ try self.initType(Type.initPayload(&buf.base), kind, lookup);
+ }
+ } else if (ty.isTupleOrAnonStruct()) {
if (lookup.isMutable()) {
for (0..ty.structFieldCount()) |field_i| {
const field_ty = ty.structFieldType(field_i);
if (ty.structFieldIsComptime(field_i) or
!field_ty.hasRuntimeBitsIgnoreComptime()) continue;
_ = try lookup.typeToIndex(field_ty, switch (kind) {
- .forward, .complete, .parameter => .complete,
+ .forward, .forward_parameter => .forward,
+ .complete, .parameter => .complete,
.global => .global,
+ .payload => unreachable,
});
}
+ switch (kind) {
+ .forward, .forward_parameter => {},
+ .complete, .parameter, .global => _ = try lookup.typeToIndex(ty, .forward),
+ .payload => unreachable,
+ }
}
- self.init(.anon_struct);
+ self.init(switch (kind) {
+ .forward, .forward_parameter => .fwd_anon_struct,
+ .complete, .parameter, .global => .anon_struct,
+ .payload => unreachable,
+ });
} else {
- const is_struct = zig_tag == .Struct or ty.unionTagTypeSafety() != null;
+ const tag_ty = ty.unionTagTypeSafety();
+ const is_tagged_union_wrapper = kind != .payload and tag_ty != null;
+ const is_struct = zig_tag == .Struct or is_tagged_union_wrapper;
switch (kind) {
- .forward => {
+ .forward, .forward_parameter => {
self.storage = .{ .fwd = .{
.base = .{ .tag = if (is_struct) .fwd_struct else .fwd_union },
.data = ty.getOwnerDecl(),
} };
self.value = .{ .cty = initPayload(&self.storage.fwd) };
},
- else => {
- if (lookup.isMutable()) {
- for (0..switch (zig_tag) {
- .Struct => ty.structFieldCount(),
- .Union => ty.cast(Type.Payload.Union).?.data.fields.count(),
- else => unreachable,
- }) |field_i| {
- const field_ty = ty.structFieldType(field_i);
- if (!field_ty.hasRuntimeBitsIgnoreComptime()) continue;
+ .complete, .parameter, .global, .payload => if (is_tagged_union_wrapper) {
+ const fwd_idx = try lookup.typeToIndex(ty, .forward);
+ const payload_idx = try lookup.typeToIndex(ty, .payload);
+ const tag_idx = try lookup.typeToIndex(tag_ty.?, kind);
+ if (fwd_idx != null and payload_idx != null and tag_idx != null) {
+ self.storage = .{ .anon = undefined };
+ var field_count: usize = 0;
+ if (payload_idx != Tag.void.toIndex()) {
+ self.storage.anon.fields[field_count] = .{
+ .name = "payload",
+ .type = payload_idx.?,
+ .alignas = Payload.Fields.AlignAs.unionPayloadAlign(ty, target),
+ };
+ field_count += 1;
+ }
+ if (tag_idx != Tag.void.toIndex()) {
+ self.storage.anon.fields[field_count] = .{
+ .name = "tag",
+ .type = tag_idx.?,
+ .alignas = Payload.Fields.AlignAs.abiAlign(tag_ty.?, target),
+ };
+ field_count += 1;
+ }
+ self.storage.anon.pl = .{ .complete = .{
+ .base = .{ .tag = .@"struct" },
+ .data = .{
+ .fields = self.sortFields(field_count),
+ .fwd_decl = fwd_idx.?,
+ },
+ } };
+ self.value = .{ .cty = initPayload(&self.storage.anon.pl.complete) };
+ } else self.init(.@"struct");
+ } else if (kind == .payload and ty.unionHasAllZeroBitFieldTypes()) {
+ self.init(.void);
+ } else {
+ var is_packed = false;
+ for (0..switch (zig_tag) {
+ .Struct => ty.structFieldCount(),
+ .Union => ty.unionFields().count(),
+ else => unreachable,
+ }) |field_i| {
+ const field_ty = ty.structFieldType(field_i);
+ if (!field_ty.hasRuntimeBitsIgnoreComptime()) continue;
+
+ const field_align = Payload.Fields.AlignAs.fieldAlign(
+ ty,
+ field_i,
+ target,
+ );
+ if (field_align.@"align" < field_align.abi) {
+ is_packed = true;
+ if (!lookup.isMutable()) break;
+ }
+
+ if (lookup.isMutable()) {
_ = try lookup.typeToIndex(field_ty, switch (kind) {
- .forward => unreachable,
- .complete, .parameter => .complete,
+ .forward, .forward_parameter => unreachable,
+ .complete, .parameter, .payload => .complete,
.global => .global,
});
}
- _ = try lookup.typeToIndex(ty, .forward);
}
- self.init(if (is_struct) .@"struct" else .@"union");
+ switch (kind) {
+ .forward, .forward_parameter => unreachable,
+ .complete, .parameter, .global => {
+ _ = try lookup.typeToIndex(ty, .forward);
+ self.init(if (is_struct)
+ if (is_packed) .packed_struct else .@"struct"
+ else if (is_packed) .packed_union else .@"union");
+ },
+ .payload => self.init(if (is_packed)
+ .packed_unnamed_union
+ else
+ .unnamed_union),
+ }
},
}
},
.Array, .Vector => |zig_tag| {
- const t: Tag = switch (zig_tag) {
- .Array => .array,
- .Vector => .vector,
- else => unreachable,
- };
- if (try lookup.typeToIndex(ty.childType(), kind)) |child_idx| {
- self.storage = .{ .seq = .{ .base = .{ .tag = t }, .data = .{
- .len = ty.arrayLenIncludingSentinel(),
- .elem_type = child_idx,
- } } };
- self.value = .{ .cty = initPayload(&self.storage.seq) };
- } else self.init(t);
+ switch (kind) {
+ .forward, .complete, .global => {
+ const t: Tag = switch (zig_tag) {
+ .Array => .array,
+ .Vector => .vector,
+ else => unreachable,
+ };
+ if (try lookup.typeToIndex(ty.childType(), kind)) |child_idx| {
+ self.storage = .{ .seq = .{ .base = .{ .tag = t }, .data = .{
+ .len = ty.arrayLenIncludingSentinel(),
+ .elem_type = child_idx,
+ } } };
+ self.value = .{ .cty = initPayload(&self.storage.seq) };
+ } else self.init(t);
+ },
+ .forward_parameter, .parameter => try self.initArrayParameter(ty, kind, lookup),
+ .payload => unreachable,
+ }
},
.Optional => {
var buf: Type.Payload.ElemType = undefined;
const payload_ty = ty.optionalChild(&buf);
if (payload_ty.hasRuntimeBitsIgnoreComptime()) {
- if (ty.optionalReprIsPayload())
- try self.initType(payload_ty, kind, lookup)
- else if (try lookup.typeToIndex(payload_ty, kind)) |payload_idx| {
- self.storage = .{ .anon = .{ .fields = .{
- .{
+ if (ty.optionalReprIsPayload()) {
+ try self.initType(payload_ty, kind, lookup);
+ } else if (switch (kind) {
+ .forward, .forward_parameter => @as(Index, undefined),
+ .complete, .parameter, .global => try lookup.typeToIndex(ty, .forward),
+ .payload => unreachable,
+ }) |fwd_idx| {
+ if (try lookup.typeToIndex(payload_ty, switch (kind) {
+ .forward, .forward_parameter => .forward,
+ .complete, .parameter => .complete,
+ .global => .global,
+ .payload => unreachable,
+ })) |payload_idx| {
+ self.storage = .{ .anon = undefined };
+ self.storage.anon.fields[0] = .{
.name = "payload",
.type = payload_idx,
- .alignas = payload_ty.abiAlignment(target),
- },
- .{
+ .alignas = Payload.Fields.AlignAs.abiAlign(payload_ty, target),
+ };
+ self.storage.anon.fields[1] = .{
.name = "is_null",
.type = Tag.bool.toIndex(),
- .alignas = Type.bool.abiAlignment(target),
- },
- }, .pl = undefined } };
- self.storage.anon.pl = .{
- .base = .{ .tag = .anon_struct },
- .data = self.storage.anon.fields[0..2],
- };
- self.value = .{ .cty = initPayload(&self.storage.anon.pl) };
+ .alignas = Payload.Fields.AlignAs.abiAlign(Type.bool, target),
+ };
+ self.initAnon(kind, fwd_idx, 2);
+ } else self.init(switch (kind) {
+ .forward, .forward_parameter => .fwd_anon_struct,
+ .complete, .parameter, .global => .anon_struct,
+ .payload => unreachable,
+ });
} else self.init(.anon_struct);
} else self.init(.bool);
},
.ErrorUnion => {
- const payload_ty = ty.errorUnionPayload();
- if (try lookup.typeToIndex(payload_ty, switch (kind) {
- .forward, .complete, .parameter => .complete,
- .global => .global,
- })) |payload_idx| {
- const error_ty = ty.errorUnionSet();
- if (payload_idx == Tag.void.toIndex())
- try self.initType(error_ty, kind, lookup)
- else if (try lookup.typeToIndex(error_ty, kind)) |error_idx| {
- self.storage = .{ .anon = .{ .fields = .{
- .{
+ if (switch (kind) {
+ .forward, .forward_parameter => @as(Index, undefined),
+ .complete, .parameter, .global => try lookup.typeToIndex(ty, .forward),
+ .payload => unreachable,
+ }) |fwd_idx| {
+ const payload_ty = ty.errorUnionPayload();
+ if (try lookup.typeToIndex(payload_ty, switch (kind) {
+ .forward, .forward_parameter => .forward,
+ .complete, .parameter => .complete,
+ .global => .global,
+ .payload => unreachable,
+ })) |payload_idx| {
+ const error_ty = ty.errorUnionSet();
+ if (payload_idx == Tag.void.toIndex()) {
+ try self.initType(error_ty, kind, lookup);
+ } else if (try lookup.typeToIndex(error_ty, kind)) |error_idx| {
+ self.storage = .{ .anon = undefined };
+ self.storage.anon.fields[0] = .{
.name = "payload",
.type = payload_idx,
- .alignas = payload_ty.abiAlignment(target),
- },
- .{
+ .alignas = Payload.Fields.AlignAs.abiAlign(payload_ty, target),
+ };
+ self.storage.anon.fields[1] = .{
.name = "error",
.type = error_idx,
- .alignas = error_ty.abiAlignment(target),
- },
- }, .pl = undefined } };
- self.storage.anon.pl = .{
- .base = .{ .tag = .anon_struct },
- .data = self.storage.anon.fields[0..2],
- };
- self.value = .{ .cty = initPayload(&self.storage.anon.pl) };
- } else self.init(.anon_struct);
+ .alignas = Payload.Fields.AlignAs.abiAlign(error_ty, target),
+ };
+ self.initAnon(kind, fwd_idx, 2);
+ } else self.init(switch (kind) {
+ .forward, .forward_parameter => .fwd_anon_struct,
+ .complete, .parameter, .global => .anon_struct,
+ .payload => unreachable,
+ });
+ } else self.init(switch (kind) {
+ .forward, .forward_parameter => .fwd_anon_struct,
+ .complete, .parameter, .global => .anon_struct,
+ .payload => unreachable,
+ });
} else self.init(.anon_struct);
},
@@ -959,16 +1293,15 @@ pub const CType = extern union {
.Fn => {
const info = ty.fnInfo();
if (lookup.isMutable()) {
- _ = try lookup.typeToIndex(info.return_type, switch (kind) {
- .forward => .forward,
- .complete, .parameter, .global => .complete,
- });
+ const param_kind: Kind = switch (kind) {
+ .forward, .forward_parameter => .forward_parameter,
+ .complete, .parameter, .global => .parameter,
+ .payload => unreachable,
+ };
+ _ = try lookup.typeToIndex(info.return_type, param_kind);
for (info.param_types) |param_type| {
if (!param_type.hasRuntimeBitsIgnoreComptime()) continue;
- _ = try lookup.typeToIndex(param_type, switch (kind) {
- .forward => .forward,
- .complete, .parameter, .global => unreachable,
- });
+ _ = try lookup.typeToIndex(param_type, param_kind);
}
}
self.init(if (info.is_var_args) .varargs_function else .function);
@@ -977,16 +1310,33 @@ pub const CType = extern union {
}
};
- fn copyFields(arena: Allocator, fields: Payload.Fields.Data) !Payload.Fields.Data {
- const new_fields = try arena.dupe(Payload.Fields.Field, fields);
- for (new_fields) |*new_field| {
- new_field.name = try arena.dupeZ(u8, mem.span(new_field.name));
- new_field.type = new_field.type;
+ pub fn copy(self: CType, arena: Allocator) !CType {
+ return self.copyContext(struct {
+ arena: Allocator,
+ pub fn copyIndex(_: @This(), idx: Index) Index {
+ return idx;
+ }
+ }{ .arena = arena });
+ }
+
+ fn copyFields(ctx: anytype, old_fields: Payload.Fields.Data) !Payload.Fields.Data {
+ const new_fields = try ctx.arena.alloc(Payload.Fields.Field, old_fields.len);
+ for (new_fields, old_fields) |*new_field, old_field| {
+ new_field.name = try ctx.arena.dupeZ(u8, mem.span(old_field.name));
+ new_field.type = ctx.copyIndex(old_field.type);
+ new_field.alignas = old_field.alignas;
}
return new_fields;
}
- pub fn copy(self: CType, arena: Allocator) !CType {
+ fn copyParams(ctx: anytype, old_param_types: []const Index) ![]const Index {
+ const new_param_types = try ctx.arena.alloc(Index, old_param_types.len);
+ for (new_param_types, old_param_types) |*new_param_type, old_param_type|
+ new_param_type.* = ctx.copyIndex(old_param_type);
+ return new_param_types;
+ }
+
+ pub fn copyContext(self: CType, ctx: anytype) !CType {
switch (self.tag()) {
.void,
.char,
@@ -1032,8 +1382,8 @@ pub const CType = extern union {
.pointer_const_volatile,
=> {
const pl = self.cast(Payload.Child).?;
- const new_pl = try arena.create(Payload.Child);
- new_pl.* = .{ .base = .{ .tag = pl.base.tag }, .data = pl.data };
+ const new_pl = try ctx.arena.create(Payload.Child);
+ new_pl.* = .{ .base = .{ .tag = pl.base.tag }, .data = ctx.copyIndex(pl.data) };
return initPayload(new_pl);
},
@@ -1041,48 +1391,62 @@ pub const CType = extern union {
.vector,
=> {
const pl = self.cast(Payload.Sequence).?;
- const new_pl = try arena.create(Payload.Sequence);
+ const new_pl = try ctx.arena.create(Payload.Sequence);
new_pl.* = .{
.base = .{ .tag = pl.base.tag },
- .data = .{ .len = pl.data.len, .elem_type = pl.data.elem_type },
+ .data = .{ .len = pl.data.len, .elem_type = ctx.copyIndex(pl.data.elem_type) },
};
return initPayload(new_pl);
},
- .fwd_struct,
- .fwd_union,
+ .fwd_anon_struct,
+ .fwd_anon_union,
=> {
- const pl = self.cast(Payload.FwdDecl).?;
- const new_pl = try arena.create(Payload.FwdDecl);
+ const pl = self.cast(Payload.Fields).?;
+ const new_pl = try ctx.arena.create(Payload.Fields);
new_pl.* = .{
.base = .{ .tag = pl.base.tag },
- .data = pl.data,
+ .data = try copyFields(ctx, pl.data),
};
return initPayload(new_pl);
},
- .anon_struct,
- .packed_anon_struct,
+ .fwd_struct,
+ .fwd_union,
=> {
- const pl = self.cast(Payload.Fields).?;
- const new_pl = try arena.create(Payload.Fields);
- new_pl.* = .{
- .base = .{ .tag = pl.base.tag },
- .data = try copyFields(arena, pl.data),
- };
+ const pl = self.cast(Payload.FwdDecl).?;
+ const new_pl = try ctx.arena.create(Payload.FwdDecl);
+ new_pl.* = .{ .base = .{ .tag = pl.base.tag }, .data = pl.data };
+ return initPayload(new_pl);
+ },
+
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ => {
+ const pl = self.cast(Payload.Unnamed).?;
+ const new_pl = try ctx.arena.create(Payload.Unnamed);
+ new_pl.* = .{ .base = .{ .tag = pl.base.tag }, .data = .{
+ .fields = try copyFields(ctx, pl.data.fields),
+ .owner_decl = pl.data.owner_decl,
+ .id = pl.data.id,
+ } };
return initPayload(new_pl);
},
+ .anon_struct,
+ .anon_union,
.@"struct",
.@"union",
.packed_struct,
.packed_union,
=> {
const pl = self.cast(Payload.Aggregate).?;
- const new_pl = try arena.create(Payload.Aggregate);
+ const new_pl = try ctx.arena.create(Payload.Aggregate);
new_pl.* = .{ .base = .{ .tag = pl.base.tag }, .data = .{
- .fields = try copyFields(arena, pl.data.fields),
- .fwd_decl = pl.data.fwd_decl,
+ .fields = try copyFields(ctx, pl.data.fields),
+ .fwd_decl = ctx.copyIndex(pl.data.fwd_decl),
} };
return initPayload(new_pl);
},
@@ -1091,10 +1455,10 @@ pub const CType = extern union {
.varargs_function,
=> {
const pl = self.cast(Payload.Function).?;
- const new_pl = try arena.create(Payload.Function);
+ const new_pl = try ctx.arena.create(Payload.Function);
new_pl.* = .{ .base = .{ .tag = pl.base.tag }, .data = .{
- .return_type = pl.data.return_type,
- .param_types = try arena.dupe(Index, pl.data.param_types),
+ .return_type = ctx.copyIndex(pl.data.return_type),
+ .param_types = try copyParams(ctx, pl.data.param_types),
} };
return initPayload(new_pl);
},
@@ -1118,8 +1482,14 @@ pub const CType = extern union {
switch (convert.value) {
.cty => |c| return c.copy(arena),
.tag => |t| switch (t) {
+ .fwd_anon_struct,
+ .fwd_anon_union,
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
.anon_struct,
- .packed_anon_struct,
+ .anon_union,
.@"struct",
.@"union",
.packed_struct,
@@ -1149,31 +1519,44 @@ pub const CType = extern union {
else
arena.dupeZ(u8, ty.structFieldName(field_i)),
.type = store.set.typeToIndex(field_ty, target, switch (kind) {
- .forward, .complete, .parameter => .complete,
+ .forward, .forward_parameter => .forward,
+ .complete, .parameter => .complete,
.global => .global,
+ .payload => unreachable,
}).?,
- .alignas = ty.structFieldAlign(field_i, target),
+ .alignas = Payload.Fields.AlignAs.fieldAlign(ty, field_i, target),
};
c_field_i += 1;
}
- if (ty.isTupleOrAnonStruct()) {
- const anon_pl = try arena.create(Payload.Fields);
- anon_pl.* = .{ .base = .{ .tag = .anon_struct }, .data = fields_pl };
- return initPayload(anon_pl);
- }
+ switch (t) {
+ .fwd_anon_struct => {
+ const anon_pl = try arena.create(Payload.Fields);
+ anon_pl.* = .{ .base = .{ .tag = t }, .data = fields_pl };
+ return initPayload(anon_pl);
+ },
- const struct_pl = try arena.create(Payload.Aggregate);
- struct_pl.* = .{ .base = .{ .tag = t }, .data = .{
- .fields = fields_pl,
- .fwd_decl = store.set.typeToIndex(ty, target, .forward).?,
- } };
- return initPayload(struct_pl);
+ .anon_struct,
+ .@"struct",
+ .@"union",
+ .packed_struct,
+ .packed_union,
+ => {
+ const struct_pl = try arena.create(Payload.Aggregate);
+ struct_pl.* = .{ .base = .{ .tag = t }, .data = .{
+ .fields = fields_pl,
+ .fwd_decl = store.set.typeToIndex(ty, target, .forward).?,
+ } };
+ return initPayload(struct_pl);
+ },
+
+ else => unreachable,
+ }
},
.Union => {
- const fields = ty.unionFields();
- const fields_len = fields.count();
+ const union_fields = ty.unionFields();
+ const fields_len = union_fields.count();
var c_fields_len: usize = 0;
for (0..fields_len) |field_i| {
@@ -1185,7 +1568,7 @@ pub const CType = extern union {
const fields_pl = try arena.alloc(Payload.Fields.Field, c_fields_len);
var field_i: usize = 0;
var c_field_i: usize = 0;
- var field_it = fields.iterator();
+ var field_it = union_fields.iterator();
while (field_it.next()) |field| {
defer field_i += 1;
if (!field.value_ptr.ty.hasRuntimeBitsIgnoreComptime()) continue;
@@ -1193,21 +1576,35 @@ pub const CType = extern union {
fields_pl[c_field_i] = .{
.name = try arena.dupeZ(u8, field.key_ptr.*),
.type = store.set.typeToIndex(field.value_ptr.ty, target, switch (kind) {
- .forward => unreachable,
- .complete, .parameter => .complete,
+ .forward, .forward_parameter => unreachable,
+ .complete, .parameter, .payload => .complete,
.global => .global,
}).?,
- .alignas = ty.structFieldAlign(field_i, target),
+ .alignas = Payload.Fields.AlignAs.fieldAlign(ty, field_i, target),
};
c_field_i += 1;
}
- const union_pl = try arena.create(Payload.Aggregate);
- union_pl.* = .{ .base = .{ .tag = t }, .data = .{
- .fields = fields_pl,
- .fwd_decl = store.set.typeToIndex(ty, target, .forward).?,
- } };
- return initPayload(union_pl);
+ switch (kind) {
+ .forward, .forward_parameter => unreachable,
+ .complete, .parameter, .global => {
+ const union_pl = try arena.create(Payload.Aggregate);
+ union_pl.* = .{ .base = .{ .tag = t }, .data = .{
+ .fields = fields_pl,
+ .fwd_decl = store.set.typeToIndex(ty, target, .forward).?,
+ } };
+ return initPayload(union_pl);
+ },
+ .payload => if (ty.unionTagTypeSafety()) |_| {
+ const union_pl = try arena.create(Payload.Unnamed);
+ union_pl.* = .{ .base = .{ .tag = t }, .data = .{
+ .fields = fields_pl,
+ .owner_decl = ty.getOwnerDecl(),
+ .id = 0,
+ } };
+ return initPayload(union_pl);
+ } else unreachable,
+ }
},
else => unreachable,
@@ -1217,9 +1614,10 @@ pub const CType = extern union {
.varargs_function,
=> {
const info = ty.fnInfo();
- const recurse_kind: Kind = switch (kind) {
- .forward => .forward,
- .complete, .parameter, .global => unreachable,
+ const param_kind: Kind = switch (kind) {
+ .forward, .forward_parameter => .forward_parameter,
+ .complete, .parameter, .global => .parameter,
+ .payload => unreachable,
};
var c_params_len: usize = 0;
@@ -1232,13 +1630,13 @@ pub const CType = extern union {
var c_param_i: usize = 0;
for (info.param_types) |param_type| {
if (!param_type.hasRuntimeBitsIgnoreComptime()) continue;
- params_pl[c_param_i] = store.set.typeToIndex(param_type, target, recurse_kind).?;
+ params_pl[c_param_i] = store.set.typeToIndex(param_type, target, param_kind).?;
c_param_i += 1;
}
const fn_pl = try arena.create(Payload.Function);
fn_pl.* = .{ .base = .{ .tag = t }, .data = .{
- .return_type = store.set.typeToIndex(info.return_type, target, recurse_kind).?,
+ .return_type = store.set.typeToIndex(info.return_type, target, param_kind).?,
.param_types = params_pl,
} };
return initPayload(fn_pl);
@@ -1294,8 +1692,8 @@ pub const CType = extern union {
const target = self.lookup.getTarget();
switch (t) {
- .anon_struct,
- .packed_anon_struct,
+ .fwd_anon_struct,
+ .fwd_anon_union,
=> {
if (!ty.isTupleOrAnonStruct()) return false;
@@ -1313,26 +1711,38 @@ pub const CType = extern union {
const c_field = &c_fields[c_field_i];
c_field_i += 1;
- if (!self.eqlRecurse(
- ty.structFieldType(field_i),
- c_field.type,
- switch (self.kind) {
- .forward, .complete, .parameter => .complete,
- .global => .global,
- },
- ) or !mem.eql(
+ if (!self.eqlRecurse(field_ty, c_field.type, switch (self.kind) {
+ .forward, .forward_parameter => .forward,
+ .complete, .parameter => .complete,
+ .global => .global,
+ .payload => unreachable,
+ }) or !mem.eql(
u8,
if (ty.isSimpleTuple())
std.fmt.bufPrint(&name_buf, "f{}", .{field_i}) catch unreachable
else
ty.structFieldName(field_i),
mem.span(c_field.name),
- ) or ty.structFieldAlign(field_i, target) != c_field.alignas)
- return false;
+ ) or Payload.Fields.AlignAs.fieldAlign(ty, field_i, target).@"align" !=
+ c_field.alignas.@"align") return false;
}
return true;
},
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ => switch (self.kind) {
+ .forward, .forward_parameter, .complete, .parameter, .global => unreachable,
+ .payload => if (ty.unionTagTypeSafety()) |_| {
+ const data = cty.cast(Payload.Unnamed).?.data;
+ return ty.getOwnerDecl() == data.owner_decl and data.id == 0;
+ } else unreachable,
+ },
+
+ .anon_struct,
+ .anon_union,
.@"struct",
.@"union",
.packed_struct,
@@ -1350,19 +1760,27 @@ pub const CType = extern union {
const info = ty.fnInfo();
const data = cty.cast(Payload.Function).?.data;
- const recurse_kind: Kind = switch (self.kind) {
- .forward => .forward,
- .complete, .parameter, .global => unreachable,
+ const param_kind: Kind = switch (self.kind) {
+ .forward, .forward_parameter => .forward_parameter,
+ .complete, .parameter, .global => .parameter,
+ .payload => unreachable,
};
- if (info.param_types.len != data.param_types.len or
- !self.eqlRecurse(info.return_type, data.return_type, recurse_kind))
+ if (!self.eqlRecurse(info.return_type, data.return_type, param_kind))
return false;
- for (info.param_types, data.param_types) |param_ty, param_cty| {
- if (!param_ty.hasRuntimeBitsIgnoreComptime()) continue;
- if (!self.eqlRecurse(param_ty, param_cty, recurse_kind)) return false;
+
+ var c_param_i: usize = 0;
+ for (info.param_types) |param_type| {
+ if (!param_type.hasRuntimeBitsIgnoreComptime()) continue;
+
+ if (c_param_i >= data.param_types.len) return false;
+ const param_cty = data.param_types[c_param_i];
+ c_param_i += 1;
+
+ if (!self.eqlRecurse(param_type, param_cty, param_kind))
+ return false;
}
- return true;
+ return c_param_i == data.param_types.len;
},
else => unreachable,
@@ -1395,13 +1813,17 @@ pub const CType = extern union {
const target = self.lookup.getTarget();
switch (t) {
- .anon_struct,
- .packed_anon_struct,
+ .fwd_anon_struct,
+ .fwd_anon_union,
=> {
var name_buf: [
std.fmt.count("f{}", .{std.math.maxInt(usize)})
]u8 = undefined;
- for (0..ty.structFieldCount()) |field_i| {
+ for (0..switch (ty.zigTypeTag()) {
+ .Struct => ty.structFieldCount(),
+ .Union => ty.unionFields().count(),
+ else => unreachable,
+ }) |field_i| {
const field_ty = ty.structFieldType(field_i);
if (ty.structFieldIsComptime(field_i) or
!field_ty.hasRuntimeBitsIgnoreComptime()) continue;
@@ -1410,18 +1832,37 @@ pub const CType = extern union {
hasher,
ty.structFieldType(field_i),
switch (self.kind) {
- .forward, .complete, .parameter => .complete,
+ .forward, .forward_parameter => .forward,
+ .complete, .parameter => .complete,
.global => .global,
+ .payload => unreachable,
},
);
hasher.update(if (ty.isSimpleTuple())
std.fmt.bufPrint(&name_buf, "f{}", .{field_i}) catch unreachable
else
ty.structFieldName(field_i));
- autoHash(hasher, ty.structFieldAlign(field_i, target));
+ autoHash(
+ hasher,
+ Payload.Fields.AlignAs.fieldAlign(ty, field_i, target).@"align",
+ );
}
},
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ => switch (self.kind) {
+ .forward, .forward_parameter, .complete, .parameter, .global => unreachable,
+ .payload => if (ty.unionTagTypeSafety()) |_| {
+ autoHash(hasher, ty.getOwnerDecl());
+ autoHash(hasher, @as(u32, 0));
+ } else unreachable,
+ },
+
+ .anon_struct,
+ .anon_union,
.@"struct",
.@"union",
.packed_struct,
@@ -1432,15 +1873,16 @@ pub const CType = extern union {
.varargs_function,
=> {
const info = ty.fnInfo();
- const recurse_kind: Kind = switch (self.kind) {
- .forward => .forward,
- .complete, .parameter, .global => unreachable,
+ const param_kind: Kind = switch (self.kind) {
+ .forward, .forward_parameter => .forward_parameter,
+ .complete, .parameter, .global => .parameter,
+ .payload => unreachable,
};
- self.updateHasherRecurse(hasher, info.return_type, recurse_kind);
+ self.updateHasherRecurse(hasher, info.return_type, param_kind);
for (info.param_types) |param_type| {
if (!param_type.hasRuntimeBitsIgnoreComptime()) continue;
- self.updateHasherRecurse(hasher, param_type, recurse_kind);
+ self.updateHasherRecurse(hasher, param_type, param_kind);
}
},
src/codegen/c.zig
@@ -31,6 +31,7 @@ pub const CType = @import("c/type.zig").CType;
pub const CValue = union(enum) {
none: void,
+ new_local: LocalIndex,
local: LocalIndex,
/// Address of a local.
local_ref: LocalIndex,
@@ -38,6 +39,8 @@ pub const CValue = union(enum) {
constant: Air.Inst.Ref,
/// Index into the parameters
arg: usize,
+ /// The payload field of a parameter
+ arg_array: usize,
/// Index into a tuple's fields
field: usize,
/// By-value
@@ -298,7 +301,7 @@ pub const Function = struct {
const alignment = 0;
const decl_c_value = try f.allocLocalValue(ty, alignment);
const gpa = f.object.dg.gpa;
- try f.allocs.put(gpa, decl_c_value.local, true);
+ try f.allocs.put(gpa, decl_c_value.new_local, true);
try writer.writeAll("static ");
try f.object.dg.renderTypeAndName(writer, ty, decl_c_value, .@"const", alignment, .Complete);
try writer.writeAll(" = ");
@@ -330,12 +333,12 @@ pub const Function = struct {
.alignment = alignment,
.loop_depth = @intCast(LoopDepth, f.free_locals_stack.items.len - 1),
});
- return CValue{ .local = @intCast(LocalIndex, f.locals.items.len - 1) };
+ return CValue{ .new_local = @intCast(LocalIndex, f.locals.items.len - 1) };
}
fn allocLocal(f: *Function, inst: Air.Inst.Index, ty: Type) !CValue {
const result = try f.allocAlignedLocal(ty, .mut, 0);
- log.debug("%{d}: allocating t{d}", .{ inst, result.local });
+ log.debug("%{d}: allocating t{d}", .{ inst, result.new_local });
return result;
}
@@ -349,7 +352,7 @@ pub const Function = struct {
if (local.alignment >= alignment) {
local.loop_depth = @intCast(LoopDepth, f.free_locals_stack.items.len - 1);
_ = locals_list.swapRemove(i);
- return CValue{ .local = local_index };
+ return CValue{ .new_local = local_index };
}
}
}
@@ -488,8 +491,8 @@ pub const Object = struct {
pub const DeclGen = struct {
gpa: std.mem.Allocator,
module: *Module,
- decl: *Decl,
- decl_index: Decl.Index,
+ decl: ?*Decl,
+ decl_index: Decl.OptionalIndex,
fwd_decl: std.ArrayList(u8),
error_msg: ?*Module.ErrorMsg,
ctypes: CType.Store,
@@ -497,7 +500,7 @@ pub const DeclGen = struct {
fn fail(dg: *DeclGen, comptime format: []const u8, args: anytype) error{ AnalysisFail, OutOfMemory } {
@setCold(true);
const src = LazySrcLoc.nodeOffset(0);
- const src_loc = src.toSrcLoc(dg.decl);
+ const src_loc = src.toSrcLoc(dg.decl.?);
dg.error_msg = try Module.ErrorMsg.create(dg.gpa, src_loc, format, args);
return error.AnalysisFail;
}
@@ -816,7 +819,7 @@ pub const DeclGen = struct {
empty = false;
}
- if (empty) try writer.print("{x}", .{try dg.fmtIntLiteral(Type.u8, Value.undef)});
+
return writer.writeByte('}');
},
.Packed => return writer.print("{x}", .{try dg.fmtIntLiteral(ty, Value.undef)}),
@@ -1287,7 +1290,6 @@ pub const DeclGen = struct {
empty = false;
}
- if (empty) try writer.print("{}", .{try dg.fmtIntLiteral(Type.u8, Value.zero)});
try writer.writeByte('}');
},
.Packed => {
@@ -1304,7 +1306,7 @@ pub const DeclGen = struct {
const bit_offset_val = Value.initPayload(&bit_offset_val_pl.base);
var eff_num_fields: usize = 0;
- for (field_vals, 0..) |_, index| {
+ for (0..field_vals.len) |index| {
const field_ty = ty.structFieldType(index);
if (!field_ty.hasRuntimeBitsIgnoreComptime()) continue;
@@ -1408,6 +1410,7 @@ pub const DeclGen = struct {
return;
}
+ var has_payload_init = false;
try writer.writeByte('{');
if (ty.unionTagTypeSafety()) |tag_ty| {
const layout = ty.unionGetLayout(target);
@@ -1416,7 +1419,10 @@ pub const DeclGen = struct {
try dg.renderValue(writer, tag_ty, union_obj.tag, initializer_type);
try writer.writeAll(", ");
}
- try writer.writeAll(".payload = {");
+ if (!ty.unionHasAllZeroBitFieldTypes()) {
+ try writer.writeAll(".payload = {");
+ has_payload_init = true;
+ }
}
var it = ty.unionFields().iterator();
@@ -1428,8 +1434,8 @@ pub const DeclGen = struct {
try writer.print(".{ } = ", .{fmtIdent(field.key_ptr.*)});
try dg.renderValue(writer, field.value_ptr.ty, Value.undef, initializer_type);
break;
- } else try writer.writeAll(".empty_union = 0");
- if (ty.unionTagTypeSafety()) |_| try writer.writeByte('}');
+ }
+ if (has_payload_init) try writer.writeByte('}');
try writer.writeByte('}');
},
@@ -1452,337 +1458,61 @@ pub const DeclGen = struct {
}
fn renderFunctionSignature(dg: *DeclGen, w: anytype, kind: TypedefKind, export_index: u32) !void {
- const fn_info = dg.decl.ty.fnInfo();
+ const store = &dg.ctypes.set;
+ const module = dg.module;
+
+ const fn_ty = dg.decl.?.ty;
+ const fn_cty_idx = try dg.typeToIndex(fn_ty, switch (kind) {
+ .Forward => .forward,
+ .Complete => .complete,
+ });
+
+ const fn_info = fn_ty.fnInfo();
if (fn_info.cc == .Naked) {
switch (kind) {
.Forward => try w.writeAll("zig_naked_decl "),
.Complete => try w.writeAll("zig_naked "),
}
}
- if (dg.decl.val.castTag(.function)) |func_payload|
+ if (dg.decl.?.val.castTag(.function)) |func_payload|
if (func_payload.data.is_cold) try w.writeAll("zig_cold ");
- const target = dg.module.getTarget();
- var ret_buf: LowerFnRetTyBuffer = undefined;
- const ret_ty = lowerFnRetTy(fn_info.return_type, &ret_buf, target);
-
- try dg.renderType(w, ret_ty, kind);
- try w.writeByte(' ');
+ const trailing = try renderTypePrefix(
+ dg.decl_index,
+ store.*,
+ module,
+ w,
+ fn_cty_idx,
+ .suffix,
+ CQualifiers.init(.{}),
+ );
+ try w.print("{}", .{trailing});
if (toCallingConvention(fn_info.cc)) |call_conv| {
try w.print("zig_callconv({s}) ", .{call_conv});
}
- if (fn_info.alignment > 0 and kind == .Complete) try w.print(" zig_align_fn({})", .{fn_info.alignment});
+ if (fn_info.alignment > 0 and kind == .Complete) {
+ try w.print(" zig_align_fn({})", .{fn_info.alignment});
+ }
- try dg.renderDeclName(w, dg.decl_index, export_index);
- try w.writeByte('(');
+ try dg.renderDeclName(w, dg.decl_index.unwrap().?, export_index);
- var index: usize = 0;
- for (fn_info.param_types) |param_type| {
- if (!param_type.hasRuntimeBitsIgnoreComptime()) continue;
- if (index > 0) try w.writeAll(", ");
- const name = CValue{ .arg = index };
- try dg.renderTypeAndName(w, param_type, name, .@"const", 0, kind);
- index += 1;
- }
+ try renderTypeSuffix(dg.decl_index, store.*, module, w, fn_cty_idx, .suffix);
- if (fn_info.is_var_args) {
- if (index > 0) try w.writeAll(", ");
- try w.writeAll("...");
- } else if (index == 0) {
- try dg.renderType(w, Type.void, kind);
+ if (fn_info.alignment > 0 and kind == .Forward) {
+ try w.print(" zig_align_fn({})", .{fn_info.alignment});
}
- try w.writeByte(')');
- if (fn_info.alignment > 0 and kind == .Forward) try w.print(" zig_align_fn({})", .{fn_info.alignment});
}
fn indexToCType(dg: *DeclGen, idx: CType.Index) CType {
return dg.ctypes.indexToCType(idx);
}
- fn typeToCType(dg: *DeclGen, ty: Type) !CType {
- return dg.ctypes.typeToCType(dg.gpa, ty, dg.module);
- }
- fn typeToIndex(dg: *DeclGen, ty: Type) !CType.Index {
- return dg.ctypes.typeToIndex(dg.gpa, ty, dg.module);
- }
-
- const CTypeFix = enum { prefix, suffix };
- const CQualifiers = std.enums.EnumSet(enum { @"const", @"volatile", restrict });
- const CTypeRenderTrailing = enum {
- no_space,
- maybe_space,
-
- pub fn format(
- self: @This(),
- comptime fmt: []const u8,
- _: std.fmt.FormatOptions,
- w: anytype,
- ) @TypeOf(w).Error!void {
- if (fmt.len != 0)
- @compileError("invalid format string '" ++ fmt ++ "' for type '" ++
- @typeName(@This()) ++ "'");
- comptime assert(fmt.len == 0);
- switch (self) {
- .no_space => {},
- .maybe_space => try w.writeByte(' '),
- }
- }
- };
- fn renderTypePrefix(
- dg: *DeclGen,
- w: anytype,
- idx: CType.Index,
- parent_fix: CTypeFix,
- qualifiers: CQualifiers,
- ) @TypeOf(w).Error!CTypeRenderTrailing {
- var trailing = CTypeRenderTrailing.maybe_space;
-
- const cty = dg.indexToCType(idx);
- switch (cty.tag()) {
- .void,
- .char,
- .@"signed char",
- .short,
- .int,
- .long,
- .@"long long",
- ._Bool,
- .@"unsigned char",
- .@"unsigned short",
- .@"unsigned int",
- .@"unsigned long",
- .@"unsigned long long",
- .float,
- .double,
- .@"long double",
- .bool,
- .size_t,
- .ptrdiff_t,
- .uint8_t,
- .int8_t,
- .uint16_t,
- .int16_t,
- .uint32_t,
- .int32_t,
- .uint64_t,
- .int64_t,
- .uintptr_t,
- .intptr_t,
- .zig_u128,
- .zig_i128,
- .zig_f16,
- .zig_f32,
- .zig_f64,
- .zig_f80,
- .zig_f128,
- => |tag| try w.writeAll(@tagName(tag)),
-
- .pointer,
- .pointer_const,
- .pointer_volatile,
- .pointer_const_volatile,
- => |tag| {
- const child_idx = cty.cast(CType.Payload.Child).?.data;
- try w.print("{}*", .{try dg.renderTypePrefix(w, child_idx, .prefix, CQualifiers.init(.{
- .@"const" = switch (tag) {
- .pointer, .pointer_volatile => false,
- .pointer_const, .pointer_const_volatile => true,
- else => unreachable,
- },
- .@"volatile" = switch (tag) {
- .pointer, .pointer_const => false,
- .pointer_volatile, .pointer_const_volatile => true,
- else => unreachable,
- },
- }))});
- trailing = .no_space;
- },
-
- .array,
- .vector,
- => {
- const child_idx = cty.cast(CType.Payload.Sequence).?.data.elem_type;
- const child_trailing = try dg.renderTypePrefix(w, child_idx, .suffix, qualifiers);
- switch (parent_fix) {
- .prefix => {
- try w.print("{}(", .{child_trailing});
- return .no_space;
- },
- .suffix => return child_trailing,
- }
- },
-
- .fwd_struct,
- .fwd_union,
- .anon_struct,
- .packed_anon_struct,
- => |tag| try w.print("{s} {}__{d}", .{
- switch (tag) {
- .fwd_struct,
- .anon_struct,
- .packed_anon_struct,
- => "struct",
- .fwd_union => "union",
- else => unreachable,
- },
- fmtIdent(switch (tag) {
- .fwd_struct,
- .fwd_union,
- => mem.span(dg.module.declPtr(cty.cast(CType.Payload.FwdDecl).?.data).name),
- .anon_struct,
- .packed_anon_struct,
- => "anon",
- else => unreachable,
- }),
- idx,
- }),
-
- .@"struct",
- .packed_struct,
- .@"union",
- .packed_union,
- => return dg.renderTypePrefix(
- w,
- cty.cast(CType.Payload.Aggregate).?.data.fwd_decl,
- parent_fix,
- qualifiers,
- ),
-
- .function,
- .varargs_function,
- => {
- const child_trailing = try dg.renderTypePrefix(
- w,
- cty.cast(CType.Payload.Function).?.data.return_type,
- .suffix,
- CQualifiers.initEmpty(),
- );
- switch (parent_fix) {
- .prefix => {
- try w.print("{}(", .{child_trailing});
- return .no_space;
- },
- .suffix => return child_trailing,
- }
- },
- }
-
- var qualifier_it = qualifiers.iterator();
- while (qualifier_it.next()) |qualifier| {
- try w.print("{}{s}", .{ trailing, @tagName(qualifier) });
- trailing = .maybe_space;
- }
-
- return trailing;
+ fn typeToIndex(dg: *DeclGen, ty: Type, kind: CType.Kind) !CType.Index {
+ return dg.ctypes.typeToIndex(dg.gpa, ty, dg.module, kind);
}
- fn renderTypeSuffix(
- dg: *DeclGen,
- w: anytype,
- idx: CType.Index,
- parent_fix: CTypeFix,
- ) @TypeOf(w).Error!void {
- const cty = dg.indexToCType(idx);
- switch (cty.tag()) {
- .void,
- .char,
- .@"signed char",
- .short,
- .int,
- .long,
- .@"long long",
- ._Bool,
- .@"unsigned char",
- .@"unsigned short",
- .@"unsigned int",
- .@"unsigned long",
- .@"unsigned long long",
- .float,
- .double,
- .@"long double",
- .bool,
- .size_t,
- .ptrdiff_t,
- .uint8_t,
- .int8_t,
- .uint16_t,
- .int16_t,
- .uint32_t,
- .int32_t,
- .uint64_t,
- .int64_t,
- .uintptr_t,
- .intptr_t,
- .zig_u128,
- .zig_i128,
- .zig_f16,
- .zig_f32,
- .zig_f64,
- .zig_f80,
- .zig_f128,
- => {},
-
- .pointer,
- .pointer_const,
- .pointer_volatile,
- .pointer_const_volatile,
- => try dg.renderTypeSuffix(w, cty.cast(CType.Payload.Child).?.data, .prefix),
-
- .array,
- .vector,
- => {
- switch (parent_fix) {
- .prefix => try w.writeByte(')'),
- .suffix => {},
- }
-
- try w.print("[{}]", .{cty.cast(CType.Payload.Sequence).?.data.len});
- try dg.renderTypeSuffix(w, cty.cast(CType.Payload.Sequence).?.data.elem_type, .suffix);
- },
-
- .fwd_struct,
- .fwd_union,
- .anon_struct,
- .packed_anon_struct,
- .@"struct",
- .@"union",
- .packed_struct,
- .packed_union,
- => {},
-
- .function,
- .varargs_function,
- => |tag| {
- switch (parent_fix) {
- .prefix => try w.writeByte(')'),
- .suffix => {},
- }
-
- const data = cty.cast(CType.Payload.Function).?.data;
-
- try w.writeByte('(');
- var need_comma = false;
- for (data.param_types) |param_type| {
- if (need_comma) try w.writeAll(", ");
- need_comma = true;
- _ = try dg.renderTypePrefix(w, param_type, .suffix, CQualifiers.initEmpty());
- try dg.renderTypeSuffix(w, param_type, .suffix);
- }
- switch (tag) {
- .function => {},
- .varargs_function => {
- if (need_comma) try w.writeAll(", ");
- need_comma = true;
- try w.writeAll("...");
- },
- else => unreachable,
- }
- if (!need_comma) try w.writeAll("void");
- try w.writeByte(')');
-
- try dg.renderTypeSuffix(w, data.return_type, .suffix);
- },
- }
+ fn typeToCType(dg: *DeclGen, ty: Type, kind: CType.Kind) !CType {
+ return dg.ctypes.typeToCType(dg.gpa, ty, dg.module, kind);
}
/// Renders a type as a single identifier, generating intermediate typedefs
@@ -1803,9 +1533,19 @@ pub const DeclGen = struct {
t: Type,
_: TypedefKind,
) error{ OutOfMemory, AnalysisFail }!void {
- const idx = try dg.typeToIndex(t);
- _ = try dg.renderTypePrefix(w, idx, .suffix, CQualifiers.initEmpty());
- try dg.renderTypeSuffix(w, idx, .suffix);
+ const store = &dg.ctypes.set;
+ const module = dg.module;
+ const idx = try dg.typeToIndex(t, .complete);
+ _ = try renderTypePrefix(
+ dg.decl_index,
+ store.*,
+ module,
+ w,
+ idx,
+ .suffix,
+ CQualifiers.init(.{}),
+ );
+ try renderTypeSuffix(dg.decl_index, store.*, module, w, idx, .suffix);
}
const IntCastContext = union(enum) {
@@ -1939,24 +1679,28 @@ pub const DeclGen = struct {
alignment: u32,
_: TypedefKind,
) error{ OutOfMemory, AnalysisFail }!void {
- if (alignment != 0) {
- const abi_alignment = ty.abiAlignment(dg.module.getTarget());
- if (alignment < abi_alignment) {
- try w.print("zig_under_align({}) ", .{alignment});
- } else if (alignment > abi_alignment) {
- try w.print("zig_align({}) ", .{alignment});
- }
- }
+ const store = &dg.ctypes.set;
+ const module = dg.module;
- const idx = try dg.typeToIndex(ty);
- try w.print("{}", .{try dg.renderTypePrefix(w, idx, .suffix, CQualifiers.init(.{
- .@"const" = switch (mutability) {
- .mut => false,
- .@"const" => true,
- },
- }))});
+ if (alignment != 0) switch (std.math.order(alignment, ty.abiAlignment(dg.module.getTarget()))) {
+ .lt => try w.print("zig_under_align({}) ", .{alignment}),
+ .eq => {},
+ .gt => try w.print("zig_align({}) ", .{alignment}),
+ };
+
+ const idx = try dg.typeToIndex(ty, .complete);
+ const trailing = try renderTypePrefix(
+ dg.decl_index,
+ store.*,
+ module,
+ w,
+ idx,
+ .suffix,
+ CQualifiers.init(.{ .@"const" = mutability == .@"const" }),
+ );
+ try w.print("{}", .{trailing});
try dg.writeCValue(w, name);
- try dg.renderTypeSuffix(w, idx, .suffix);
+ try renderTypeSuffix(dg.decl_index, store.*, module, w, idx, .suffix);
}
fn renderTagNameFn(dg: *DeclGen, w: anytype, fn_name: []const u8, enum_ty: Type) !void {
@@ -2029,10 +1773,11 @@ pub const DeclGen = struct {
fn writeCValue(dg: *DeclGen, w: anytype, c_value: CValue) !void {
switch (c_value) {
.none => unreachable,
- .local => |i| return w.print("t{d}", .{i}),
+ .local, .new_local => |i| return w.print("t{d}", .{i}),
.local_ref => |i| return w.print("&t{d}", .{i}),
.constant => unreachable,
.arg => |i| return w.print("a{d}", .{i}),
+ .arg_array => |i| return dg.writeCValueMember(w, .{ .arg = i }, .{ .identifier = "array" }),
.field => |i| return w.print("f{d}", .{i}),
.decl => |decl| return dg.renderDeclName(w, decl, 0),
.decl_ref => |decl| {
@@ -2048,10 +1793,15 @@ pub const DeclGen = struct {
fn writeCValueDeref(dg: *DeclGen, w: anytype, c_value: CValue) !void {
switch (c_value) {
.none => unreachable,
- .local => |i| return w.print("(*t{d})", .{i}),
+ .local, .new_local => |i| return w.print("(*t{d})", .{i}),
.local_ref => |i| return w.print("t{d}", .{i}),
.constant => unreachable,
.arg => |i| return w.print("(*a{d})", .{i}),
+ .arg_array => |i| {
+ try w.writeAll("(*");
+ try dg.writeCValueMember(w, .{ .arg = i }, .{ .identifier = "array" });
+ return w.writeByte(')');
+ },
.field => |i| return w.print("f{d}", .{i}),
.decl => |decl| {
try w.writeAll("(*");
@@ -2078,7 +1828,7 @@ pub const DeclGen = struct {
fn writeCValueDerefMember(dg: *DeclGen, writer: anytype, c_value: CValue, member: CValue) !void {
switch (c_value) {
.none, .constant, .field, .undef => unreachable,
- .local, .arg, .decl, .identifier, .bytes => {
+ .new_local, .local, .arg, .arg_array, .decl, .identifier, .bytes => {
try dg.writeCValue(writer, c_value);
try writer.writeAll("->");
},
@@ -2205,10 +1955,491 @@ pub const DeclGen = struct {
}
};
-pub fn genGlobalAsm(mod: *Module, code: *std.ArrayList(u8)) !void {
+const CTypeFix = enum { prefix, suffix };
+const CQualifiers = std.enums.EnumSet(enum { @"const", @"volatile", restrict });
+const CTypeRenderTrailing = enum {
+ no_space,
+ maybe_space,
+
+ pub fn format(
+ self: @This(),
+ comptime fmt: []const u8,
+ _: std.fmt.FormatOptions,
+ w: anytype,
+ ) @TypeOf(w).Error!void {
+ if (fmt.len != 0)
+ @compileError("invalid format string '" ++ fmt ++ "' for type '" ++
+ @typeName(@This()) ++ "'");
+ comptime assert(fmt.len == 0);
+ switch (self) {
+ .no_space => {},
+ .maybe_space => try w.writeByte(' '),
+ }
+ }
+};
+fn renderTypeName(
+ mod: *Module,
+ w: anytype,
+ idx: CType.Index,
+ cty: CType,
+ attributes: []const u8,
+) !void {
+ switch (cty.tag()) {
+ else => unreachable,
+
+ .fwd_anon_struct,
+ .fwd_anon_union,
+ => |tag| try w.print("{s} {s}anon__lazy_{d}", .{
+ @tagName(tag)["fwd_anon_".len..],
+ attributes,
+ idx,
+ }),
+
+ .fwd_struct,
+ .fwd_union,
+ => |tag| {
+ const owner_decl = cty.cast(CType.Payload.FwdDecl).?.data;
+ try w.print("{s} {s}{}__{d}", .{
+ @tagName(tag)["fwd_".len..],
+ attributes,
+ fmtIdent(mem.span(mod.declPtr(owner_decl).name)),
+ @enumToInt(owner_decl),
+ });
+ },
+ }
+}
+fn renderTypePrefix(
+ decl: Decl.OptionalIndex,
+ store: CType.Store.Set,
+ mod: *Module,
+ w: anytype,
+ idx: CType.Index,
+ parent_fix: CTypeFix,
+ qualifiers: CQualifiers,
+) @TypeOf(w).Error!CTypeRenderTrailing {
+ var trailing = CTypeRenderTrailing.maybe_space;
+
+ const cty = store.indexToCType(idx);
+ switch (cty.tag()) {
+ .void,
+ .char,
+ .@"signed char",
+ .short,
+ .int,
+ .long,
+ .@"long long",
+ ._Bool,
+ .@"unsigned char",
+ .@"unsigned short",
+ .@"unsigned int",
+ .@"unsigned long",
+ .@"unsigned long long",
+ .float,
+ .double,
+ .@"long double",
+ .bool,
+ .size_t,
+ .ptrdiff_t,
+ .uint8_t,
+ .int8_t,
+ .uint16_t,
+ .int16_t,
+ .uint32_t,
+ .int32_t,
+ .uint64_t,
+ .int64_t,
+ .uintptr_t,
+ .intptr_t,
+ .zig_u128,
+ .zig_i128,
+ .zig_f16,
+ .zig_f32,
+ .zig_f64,
+ .zig_f80,
+ .zig_f128,
+ => |tag| try w.writeAll(@tagName(tag)),
+
+ .pointer,
+ .pointer_const,
+ .pointer_volatile,
+ .pointer_const_volatile,
+ => |tag| {
+ const child_idx = cty.cast(CType.Payload.Child).?.data;
+ const child_trailing = try renderTypePrefix(
+ decl,
+ store,
+ mod,
+ w,
+ child_idx,
+ .prefix,
+ CQualifiers.init(.{ .@"const" = switch (tag) {
+ .pointer, .pointer_volatile => false,
+ .pointer_const, .pointer_const_volatile => true,
+ else => unreachable,
+ }, .@"volatile" = switch (tag) {
+ .pointer, .pointer_const => false,
+ .pointer_volatile, .pointer_const_volatile => true,
+ else => unreachable,
+ } }),
+ );
+ try w.print("{}*", .{child_trailing});
+ trailing = .no_space;
+ },
+
+ .array,
+ .vector,
+ => {
+ const child_idx = cty.cast(CType.Payload.Sequence).?.data.elem_type;
+ const child_trailing = try renderTypePrefix(
+ decl,
+ store,
+ mod,
+ w,
+ child_idx,
+ .suffix,
+ qualifiers,
+ );
+ switch (parent_fix) {
+ .prefix => {
+ try w.print("{}(", .{child_trailing});
+ return .no_space;
+ },
+ .suffix => return child_trailing,
+ }
+ },
+
+ .fwd_anon_struct,
+ .fwd_anon_union,
+ => if (decl.unwrap()) |decl_index|
+ try w.print("anon__{d}_{d}", .{ @enumToInt(decl_index), idx })
+ else
+ try renderTypeName(mod, w, idx, cty, ""),
+
+ .fwd_struct,
+ .fwd_union,
+ => try renderTypeName(mod, w, idx, cty, ""),
+
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ => |tag| {
+ try w.print("{s} {s}", .{
+ @tagName(tag)["unnamed_".len..],
+ if (cty.isPacked()) "zig_packed(" else "",
+ });
+ try renderAggregateFields(mod, w, store, cty, 1);
+ if (cty.isPacked()) try w.writeByte(')');
+ },
+
+ .anon_struct,
+ .anon_union,
+ .@"struct",
+ .@"union",
+ .packed_struct,
+ .packed_union,
+ => return renderTypePrefix(
+ decl,
+ store,
+ mod,
+ w,
+ cty.cast(CType.Payload.Aggregate).?.data.fwd_decl,
+ parent_fix,
+ qualifiers,
+ ),
+
+ .function,
+ .varargs_function,
+ => {
+ const child_trailing = try renderTypePrefix(
+ decl,
+ store,
+ mod,
+ w,
+ cty.cast(CType.Payload.Function).?.data.return_type,
+ .suffix,
+ CQualifiers.init(.{}),
+ );
+ switch (parent_fix) {
+ .prefix => {
+ try w.print("{}(", .{child_trailing});
+ return .no_space;
+ },
+ .suffix => return child_trailing,
+ }
+ },
+ }
+
+ var qualifier_it = qualifiers.iterator();
+ while (qualifier_it.next()) |qualifier| {
+ try w.print("{}{s}", .{ trailing, @tagName(qualifier) });
+ trailing = .maybe_space;
+ }
+
+ return trailing;
+}
+fn renderTypeSuffix(
+ decl: Decl.OptionalIndex,
+ store: CType.Store.Set,
+ mod: *Module,
+ w: anytype,
+ idx: CType.Index,
+ parent_fix: CTypeFix,
+) @TypeOf(w).Error!void {
+ const cty = store.indexToCType(idx);
+ switch (cty.tag()) {
+ .void,
+ .char,
+ .@"signed char",
+ .short,
+ .int,
+ .long,
+ .@"long long",
+ ._Bool,
+ .@"unsigned char",
+ .@"unsigned short",
+ .@"unsigned int",
+ .@"unsigned long",
+ .@"unsigned long long",
+ .float,
+ .double,
+ .@"long double",
+ .bool,
+ .size_t,
+ .ptrdiff_t,
+ .uint8_t,
+ .int8_t,
+ .uint16_t,
+ .int16_t,
+ .uint32_t,
+ .int32_t,
+ .uint64_t,
+ .int64_t,
+ .uintptr_t,
+ .intptr_t,
+ .zig_u128,
+ .zig_i128,
+ .zig_f16,
+ .zig_f32,
+ .zig_f64,
+ .zig_f80,
+ .zig_f128,
+ => {},
+
+ .pointer,
+ .pointer_const,
+ .pointer_volatile,
+ .pointer_const_volatile,
+ => try renderTypeSuffix(decl, store, mod, w, cty.cast(CType.Payload.Child).?.data, .prefix),
+
+ .array,
+ .vector,
+ => {
+ switch (parent_fix) {
+ .prefix => try w.writeByte(')'),
+ .suffix => {},
+ }
+
+ try w.print("[{}]", .{cty.cast(CType.Payload.Sequence).?.data.len});
+ try renderTypeSuffix(
+ decl,
+ store,
+ mod,
+ w,
+ cty.cast(CType.Payload.Sequence).?.data.elem_type,
+ .suffix,
+ );
+ },
+
+ .fwd_anon_struct,
+ .fwd_anon_union,
+ .fwd_struct,
+ .fwd_union,
+ .unnamed_struct,
+ .unnamed_union,
+ .packed_unnamed_struct,
+ .packed_unnamed_union,
+ .anon_struct,
+ .anon_union,
+ .@"struct",
+ .@"union",
+ .packed_struct,
+ .packed_union,
+ => {},
+
+ .function,
+ .varargs_function,
+ => |tag| {
+ switch (parent_fix) {
+ .prefix => try w.writeByte(')'),
+ .suffix => {},
+ }
+
+ const data = cty.cast(CType.Payload.Function).?.data;
+
+ try w.writeByte('(');
+ var need_comma = false;
+ for (data.param_types, 0..) |param_type, param_i| {
+ if (need_comma) try w.writeAll(", ");
+ need_comma = true;
+ const trailing = try renderTypePrefix(
+ decl,
+ store,
+ mod,
+ w,
+ param_type,
+ .suffix,
+ CQualifiers.init(.{}),
+ );
+ try w.print("{}a{d}", .{ trailing, param_i });
+ try renderTypeSuffix(decl, store, mod, w, param_type, .suffix);
+ }
+ switch (tag) {
+ .function => {},
+ .varargs_function => {
+ if (need_comma) try w.writeAll(", ");
+ need_comma = true;
+ try w.writeAll("...");
+ },
+ else => unreachable,
+ }
+ if (!need_comma) try w.writeAll("void");
+ try w.writeByte(')');
+
+ try renderTypeSuffix(decl, store, mod, w, data.return_type, .suffix);
+ },
+ }
+}
+fn renderAggregateFields(
+ mod: *Module,
+ writer: anytype,
+ store: CType.Store.Set,
+ cty: CType,
+ indent: usize,
+) !void {
+ try writer.writeAll("{\n");
+ const fields = cty.fields();
+ for (fields) |field| {
+ try writer.writeByteNTimes(' ', indent + 1);
+ switch (std.math.order(field.alignas.@"align", field.alignas.abi)) {
+ .lt => try writer.print("zig_under_align({}) ", .{field.alignas.getAlign()}),
+ .eq => {},
+ .gt => try writer.print("zig_align({}) ", .{field.alignas.getAlign()}),
+ }
+ const trailing = try renderTypePrefix(
+ .none,
+ store,
+ mod,
+ writer,
+ field.type,
+ .suffix,
+ CQualifiers.init(.{}),
+ );
+ try writer.print("{}{ }", .{ trailing, fmtIdent(mem.span(field.name)) });
+ try renderTypeSuffix(.none, store, mod, writer, field.type, .suffix);
+ try writer.writeAll(";\n");
+ }
+ try writer.writeByteNTimes(' ', indent);
+ try writer.writeByte('}');
+}
+
+pub fn genTypeDecl(
+ mod: *Module,
+ writer: anytype,
+ global_store: CType.Store.Set,
+ global_idx: CType.Index,
+ decl: Decl.OptionalIndex,
+ decl_store: CType.Store.Set,
+ decl_idx: CType.Index,
+ found_existing: bool,
+) !void {
+ const global_cty = global_store.indexToCType(global_idx);
+ switch (global_cty.tag()) {
+ .fwd_anon_struct => if (decl != .none) {
+ try writer.writeAll("typedef ");
+ _ = try renderTypePrefix(
+ .none,
+ global_store,
+ mod,
+ writer,
+ global_idx,
+ .suffix,
+ CQualifiers.init(.{}),
+ );
+ try writer.writeByte(' ');
+ _ = try renderTypePrefix(
+ decl,
+ decl_store,
+ mod,
+ writer,
+ decl_idx,
+ .suffix,
+ CQualifiers.init(.{}),
+ );
+ try writer.writeAll(";\n");
+ },
+
+ .fwd_struct,
+ .fwd_union,
+ .anon_struct,
+ .anon_union,
+ .@"struct",
+ .@"union",
+ .packed_struct,
+ .packed_union,
+ => |tag| if (!found_existing) {
+ switch (tag) {
+ .fwd_struct,
+ .fwd_union,
+ => {
+ const owner_decl = global_cty.cast(CType.Payload.FwdDecl).?.data;
+ _ = try renderTypePrefix(
+ .none,
+ global_store,
+ mod,
+ writer,
+ global_idx,
+ .suffix,
+ CQualifiers.init(.{}),
+ );
+ try writer.writeAll("; // ");
+ try mod.declPtr(owner_decl).renderFullyQualifiedName(mod, writer);
+ try writer.writeByte('\n');
+ },
+
+ .anon_struct,
+ .anon_union,
+ .@"struct",
+ .@"union",
+ .packed_struct,
+ .packed_union,
+ => {
+ const fwd_idx = global_cty.cast(CType.Payload.Aggregate).?.data.fwd_decl;
+ try renderTypeName(
+ mod,
+ writer,
+ fwd_idx,
+ global_store.indexToCType(fwd_idx),
+ if (global_cty.isPacked()) "zig_packed(" else "",
+ );
+ try writer.writeByte(' ');
+ try renderAggregateFields(mod, writer, global_store, global_cty, 0);
+ if (global_cty.isPacked()) try writer.writeByte(')');
+ try writer.writeAll(";\n");
+ },
+
+ else => unreachable,
+ }
+ },
+
+ else => {},
+ }
+}
+
+pub fn genGlobalAsm(mod: *Module, writer: anytype) !void {
var it = mod.global_assembly.valueIterator();
while (it.next()) |asm_source| {
- try code.writer().print("__asm({s});\n", .{fmtStringLiteral(asm_source.*)});
+ try writer.print("__asm({s});\n", .{fmtStringLiteral(asm_source.*)});
}
}
@@ -2279,14 +2510,16 @@ fn genExports(o: *Object) !void {
defer tracy.end();
const fwd_decl_writer = o.dg.fwd_decl.writer();
- if (o.dg.module.decl_exports.get(o.dg.decl_index)) |exports| for (exports.items[1..], 0..) |@"export", i| {
- try fwd_decl_writer.writeAll("zig_export(");
- try o.dg.renderFunctionSignature(fwd_decl_writer, .Forward, @intCast(u32, 1 + i));
- try fwd_decl_writer.print(", {s}, {s});\n", .{
- fmtStringLiteral(exports.items[0].options.name),
- fmtStringLiteral(@"export".options.name),
- });
- };
+ if (o.dg.module.decl_exports.get(o.dg.decl_index.unwrap().?)) |exports| {
+ for (exports.items[1..], 1..) |@"export", i| {
+ try fwd_decl_writer.writeAll("zig_export(");
+ try o.dg.renderFunctionSignature(fwd_decl_writer, .Forward, @intCast(u32, i));
+ try fwd_decl_writer.print(", {s}, {s});\n", .{
+ fmtStringLiteral(exports.items[0].options.name),
+ fmtStringLiteral(@"export".options.name),
+ });
+ }
+ }
}
pub fn genLazyFn(o: *Object, lazy_fn: LazyFnMap.Entry) !void {
@@ -2307,8 +2540,8 @@ pub fn genFunc(f: *Function) !void {
const o = &f.object;
const gpa = o.dg.gpa;
const tv: TypedValue = .{
- .ty = o.dg.decl.ty,
- .val = o.dg.decl.val,
+ .ty = o.dg.decl.?.ty,
+ .val = o.dg.decl.?.val,
};
o.code_header = std.ArrayList(u8).init(gpa);
@@ -2347,9 +2580,8 @@ pub fn genFunc(f: *Function) !void {
// missing. These are added now to complete the map. Then we can sort by
// alignment, descending.
const free_locals = f.getFreeLocals();
- const values = f.allocs.values();
- for (f.allocs.keys(), 0..) |local_index, i| {
- if (values[i]) continue; // static
+ for (f.allocs.keys(), f.allocs.values()) |local_index, value| {
+ if (value) continue; // static
const local = f.locals.items[local_index];
log.debug("inserting local {d} into free_locals", .{local_index});
const gop = try free_locals.getOrPutContext(gpa, local.ty, f.tyHashCtx());
@@ -2398,10 +2630,10 @@ pub fn genDecl(o: *Object) !void {
const tracy = trace(@src());
defer tracy.end();
- const tv: TypedValue = .{
- .ty = o.dg.decl.ty,
- .val = o.dg.decl.val,
- };
+ const decl = o.dg.decl.?;
+ const decl_c_value: CValue = .{ .decl = o.dg.decl_index.unwrap().? };
+ const tv: TypedValue = .{ .ty = decl.ty, .val = decl.val };
+
if (!tv.ty.isFnOrHasRuntimeBitsIgnoreComptime()) return;
if (tv.val.tag() == .extern_fn) {
const fwd_decl_writer = o.dg.fwd_decl.writer();
@@ -2415,11 +2647,9 @@ pub fn genDecl(o: *Object) !void {
const is_global = o.dg.declIsGlobal(tv) or variable.is_extern;
const fwd_decl_writer = o.dg.fwd_decl.writer();
- const decl_c_value = CValue{ .decl = o.dg.decl_index };
-
try fwd_decl_writer.writeAll(if (is_global) "zig_extern " else "static ");
if (variable.is_threadlocal) try fwd_decl_writer.writeAll("zig_threadlocal ");
- try o.dg.renderTypeAndName(fwd_decl_writer, o.dg.decl.ty, decl_c_value, .mut, o.dg.decl.@"align", .Complete);
+ try o.dg.renderTypeAndName(fwd_decl_writer, decl.ty, decl_c_value, .mut, decl.@"align", .Complete);
try fwd_decl_writer.writeAll(";\n");
try genExports(o);
@@ -2428,27 +2658,26 @@ pub fn genDecl(o: *Object) !void {
const w = o.writer();
if (!is_global) try w.writeAll("static ");
if (variable.is_threadlocal) try w.writeAll("zig_threadlocal ");
- if (o.dg.decl.@"linksection") |section| try w.print("zig_linksection(\"{s}\", ", .{section});
- try o.dg.renderTypeAndName(w, o.dg.decl.ty, decl_c_value, .mut, o.dg.decl.@"align", .Complete);
- if (o.dg.decl.@"linksection" != null) try w.writeAll(", read, write)");
+ if (decl.@"linksection") |section| try w.print("zig_linksection(\"{s}\", ", .{section});
+ try o.dg.renderTypeAndName(w, tv.ty, decl_c_value, .mut, decl.@"align", .Complete);
+ if (decl.@"linksection" != null) try w.writeAll(", read, write)");
try w.writeAll(" = ");
try o.dg.renderValue(w, tv.ty, variable.init, .StaticInitializer);
try w.writeByte(';');
try o.indent_writer.insertNewline();
} else {
- const is_global = o.dg.module.decl_exports.contains(o.dg.decl_index);
+ const is_global = o.dg.module.decl_exports.contains(decl_c_value.decl);
const fwd_decl_writer = o.dg.fwd_decl.writer();
- const decl_c_value: CValue = .{ .decl = o.dg.decl_index };
try fwd_decl_writer.writeAll(if (is_global) "zig_extern " else "static ");
- try o.dg.renderTypeAndName(fwd_decl_writer, tv.ty, decl_c_value, .@"const", o.dg.decl.@"align", .Complete);
+ try o.dg.renderTypeAndName(fwd_decl_writer, tv.ty, decl_c_value, .@"const", decl.@"align", .Complete);
try fwd_decl_writer.writeAll(";\n");
const w = o.writer();
if (!is_global) try w.writeAll("static ");
- if (o.dg.decl.@"linksection") |section| try w.print("zig_linksection(\"{s}\", ", .{section});
- try o.dg.renderTypeAndName(w, tv.ty, decl_c_value, .@"const", o.dg.decl.@"align", .Complete);
- if (o.dg.decl.@"linksection" != null) try w.writeAll(", read)");
+ if (decl.@"linksection") |section| try w.print("zig_linksection(\"{s}\", ", .{section});
+ try o.dg.renderTypeAndName(w, tv.ty, decl_c_value, .@"const", decl.@"align", .Complete);
+ if (decl.@"linksection" != null) try w.writeAll(", read)");
try w.writeAll(" = ");
try o.dg.renderValue(w, tv.ty, tv.val, .StaticInitializer);
try w.writeAll(";\n");
@@ -2460,8 +2689,8 @@ pub fn genHeader(dg: *DeclGen) error{ AnalysisFail, OutOfMemory }!void {
defer tracy.end();
const tv: TypedValue = .{
- .ty = dg.decl.ty,
- .val = dg.decl.val,
+ .ty = dg.decl.?.ty,
+ .val = dg.decl.?.val,
};
const writer = dg.fwd_decl.writer();
@@ -2499,7 +2728,7 @@ fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail,
// zig fmt: off
.constant => unreachable, // excluded from function bodies
.const_ty => unreachable, // excluded from function bodies
- .arg => airArg(f),
+ .arg => try airArg(f, inst),
.breakpoint => try airBreakpoint(f.object.writer()),
.ret_addr => try airRetAddr(f, inst),
@@ -2748,13 +2977,14 @@ fn genBodyInner(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail,
.c_va_start => return f.fail("TODO implement c_va_start", .{}),
// zig fmt: on
};
- if (result_value == .local) {
- log.debug("map %{d} to t{d}", .{ inst, result_value.local });
- }
- switch (result_value) {
- .none => {},
- else => try f.value_map.putNoClobber(Air.indexToRef(inst), result_value),
+ if (result_value == .new_local) {
+ log.debug("map %{d} to t{d}", .{ inst, result_value.new_local });
}
+ try f.value_map.putNoClobber(Air.indexToRef(inst), switch (result_value) {
+ .none => continue,
+ .new_local => |i| .{ .local = i },
+ else => result_value,
+ });
}
}
@@ -2979,10 +3209,10 @@ fn airAlloc(f: *Function, inst: Air.Inst.Index) !CValue {
const mutability: Mutability = if (inst_ty.isConstPtr()) .@"const" else .mut;
const target = f.object.dg.module.getTarget();
const local = try f.allocAlignedLocal(elem_type, mutability, inst_ty.ptrAlignment(target));
- log.debug("%{d}: allocated unfreeable t{d}", .{ inst, local.local });
+ log.debug("%{d}: allocated unfreeable t{d}", .{ inst, local.new_local });
const gpa = f.object.dg.module.gpa;
- try f.allocs.put(gpa, local.local, false);
- return CValue{ .local_ref = local.local };
+ try f.allocs.put(gpa, local.new_local, false);
+ return CValue{ .local_ref = local.new_local };
}
fn airRetPtr(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -2996,16 +3226,22 @@ fn airRetPtr(f: *Function, inst: Air.Inst.Index) !CValue {
const mutability: Mutability = if (inst_ty.isConstPtr()) .@"const" else .mut;
const target = f.object.dg.module.getTarget();
const local = try f.allocAlignedLocal(elem_ty, mutability, inst_ty.ptrAlignment(target));
- log.debug("%{d}: allocated unfreeable t{d}", .{ inst, local.local });
+ log.debug("%{d}: allocated unfreeable t{d}", .{ inst, local.new_local });
const gpa = f.object.dg.module.gpa;
- try f.allocs.put(gpa, local.local, false);
- return CValue{ .local_ref = local.local };
+ try f.allocs.put(gpa, local.new_local, false);
+ return CValue{ .local_ref = local.new_local };
}
-fn airArg(f: *Function) CValue {
+fn airArg(f: *Function, inst: Air.Inst.Index) !CValue {
+ const inst_ty = f.air.typeOfIndex(inst);
+ const inst_cty = try f.object.dg.typeToIndex(inst_ty, .parameter);
+
const i = f.next_arg_index;
f.next_arg_index += 1;
- return .{ .arg = i };
+ return if (inst_cty != try f.object.dg.typeToIndex(inst_ty, .complete))
+ .{ .arg_array = i }
+ else
+ .{ .arg = i };
}
fn airLoad(f: *Function, inst: Air.Inst.Index) !CValue {
@@ -3115,7 +3351,7 @@ fn airRet(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValue {
const ret_val = if (is_array) ret_val: {
const array_local = try f.allocLocal(inst, try lowered_ret_ty.copy(f.arena.allocator()));
try writer.writeAll("memcpy(");
- try f.writeCValueMember(writer, array_local, .{ .field = 0 });
+ try f.writeCValueMember(writer, array_local, .{ .identifier = "array" });
try writer.writeAll(", ");
if (deref)
try f.writeCValueDeref(writer, operand)
@@ -3135,14 +3371,13 @@ fn airRet(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValue {
try f.writeCValue(writer, ret_val, .Other);
try writer.writeAll(";\n");
if (is_array) {
- try freeLocal(f, inst, ret_val.local, 0);
+ try freeLocal(f, inst, ret_val.new_local, 0);
}
} else {
try reap(f, inst, &.{un_op});
- if (f.object.dg.decl.ty.fnCallingConvention() != .Naked) {
+ if (f.object.dg.decl) |decl| if (decl.ty.fnCallingConvention() != .Naked)
// Not even allowed to return void in a naked function.
try writer.writeAll("return;\n");
- }
}
return CValue.none;
}
@@ -3344,7 +3579,7 @@ fn airStore(f: *Function, inst: Air.Inst.Index) !CValue {
try f.renderTypecast(writer, src_ty);
try writer.writeAll("))");
if (src_val == .constant) {
- try freeLocal(f, inst, array_src.local, 0);
+ try freeLocal(f, inst, array_src.new_local, 0);
}
} else if (ptr_info.host_size != 0) {
const host_bits = ptr_info.host_size * 8;
@@ -3770,8 +4005,12 @@ fn airCall(
modifier: std.builtin.CallModifier,
) !CValue {
// Not even allowed to call panic in a naked function.
- if (f.object.dg.decl.ty.fnCallingConvention() == .Naked) return .none;
+ if (f.object.dg.decl) |decl| if (decl.ty.fnCallingConvention() == .Naked) return .none;
+
const gpa = f.object.dg.gpa;
+ const module = f.object.dg.module;
+ const target = module.getTarget();
+ const writer = f.object.writer();
switch (modifier) {
.auto => {},
@@ -3786,8 +4025,28 @@ fn airCall(
const resolved_args = try gpa.alloc(CValue, args.len);
defer gpa.free(resolved_args);
- for (args, 0..) |arg, i| {
- resolved_args[i] = try f.resolveInst(arg);
+ for (resolved_args, args) |*resolved_arg, arg| {
+ const arg_ty = f.air.typeOf(arg);
+ const arg_cty = try f.object.dg.typeToIndex(arg_ty, .parameter);
+ if (f.object.dg.indexToCType(arg_cty).tag() == .void) {
+ resolved_arg.* = .none;
+ continue;
+ }
+ resolved_arg.* = try f.resolveInst(arg);
+ if (arg_cty != try f.object.dg.typeToIndex(arg_ty, .complete)) {
+ var lowered_arg_buf: LowerFnRetTyBuffer = undefined;
+ const lowered_arg_ty = lowerFnRetTy(arg_ty, &lowered_arg_buf, target);
+
+ const array_local = try f.allocLocal(inst, try lowered_arg_ty.copy(f.arena.allocator()));
+ try writer.writeAll("memcpy(");
+ try f.writeCValueMember(writer, array_local, .{ .identifier = "array" });
+ try writer.writeAll(", ");
+ try f.writeCValue(writer, resolved_arg.*, .FunctionArgument);
+ try writer.writeAll(", sizeof(");
+ try f.renderTypecast(writer, lowered_arg_ty);
+ try writer.writeAll("));\n");
+ resolved_arg.* = array_local;
+ }
}
const callee = try f.resolveInst(pl_op.operand);
@@ -3804,9 +4063,7 @@ fn airCall(
.Pointer => callee_ty.childType(),
else => unreachable,
};
- const writer = f.object.writer();
- const target = f.object.dg.module.getTarget();
const ret_ty = fn_ty.fnReturnType();
var lowered_ret_buf: LowerFnRetTyBuffer = undefined;
const lowered_ret_ty = lowerFnRetTy(ret_ty, &lowered_ret_buf, target);
@@ -3841,7 +4098,7 @@ fn airCall(
else => break :known,
};
};
- name = f.object.dg.module.declPtr(fn_decl).name;
+ name = module.declPtr(fn_decl).name;
try f.object.dg.renderDeclName(writer, fn_decl, 0);
break :callee;
}
@@ -3851,22 +4108,11 @@ fn airCall(
try writer.writeByte('(');
var args_written: usize = 0;
- for (args, 0..) |arg, arg_i| {
- const ty = f.air.typeOf(arg);
- if (!ty.hasRuntimeBitsIgnoreComptime()) continue;
- if (args_written != 0) {
- try writer.writeAll(", ");
- }
- if ((is_extern or std.mem.eql(u8, std.mem.span(name), "main")) and
- ty.isCPtr() and ty.childType().tag() == .u8)
- {
- // Corresponds with hack in renderType .Pointer case.
- try writer.writeAll("(char");
- if (ty.isConstPtr()) try writer.writeAll(" const");
- if (ty.isVolatilePtr()) try writer.writeAll(" volatile");
- try writer.writeAll(" *)");
- }
- try f.writeCValue(writer, resolved_args[arg_i], .FunctionArgument);
+ for (resolved_args) |resolved_arg| {
+ if (resolved_arg == .none) continue;
+ if (args_written != 0) try writer.writeAll(", ");
+ try f.writeCValue(writer, resolved_arg, .FunctionArgument);
+ if (resolved_arg == .new_local) try freeLocal(f, inst, resolved_arg.new_local, 0);
args_written += 1;
}
try writer.writeAll(");\n");
@@ -3879,11 +4125,11 @@ fn airCall(
try writer.writeAll("memcpy(");
try f.writeCValue(writer, array_local, .FunctionArgument);
try writer.writeAll(", ");
- try f.writeCValueMember(writer, result_local, .{ .field = 0 });
+ try f.writeCValueMember(writer, result_local, .{ .identifier = "array" });
try writer.writeAll(", sizeof(");
try f.renderTypecast(writer, ret_ty);
try writer.writeAll("));\n");
- try freeLocal(f, inst, result_local.local, 0);
+ try freeLocal(f, inst, result_local.new_local, 0);
break :r array_local;
};
@@ -4147,7 +4393,7 @@ fn airBitcast(f: *Function, inst: Air.Inst.Index) !CValue {
}
if (operand == .constant) {
- try freeLocal(f, inst, operand_lval.local, 0);
+ try freeLocal(f, inst, operand_lval.new_local, 0);
}
return local;
@@ -4193,7 +4439,7 @@ fn airFence(f: *Function, inst: Air.Inst.Index) !CValue {
fn airUnreach(f: *Function) !CValue {
// Not even allowed to call unreachable in a naked function.
- if (f.object.dg.decl.ty.fnCallingConvention() == .Naked) return .none;
+ if (f.object.dg.decl) |decl| if (decl.ty.fnCallingConvention() == .Naked) return .none;
try f.object.writer().writeAll("zig_unreachable();\n");
return CValue.none;
@@ -4667,7 +4913,7 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
const is_reg = constraint[1] == '{';
if (is_reg) {
try f.writeCValueDeref(writer, if (output == .none)
- CValue{ .local_ref = local.local }
+ CValue{ .local_ref = local.new_local }
else
try f.resolveInst(output));
try writer.writeAll(" = ");
@@ -4967,18 +5213,20 @@ fn structFieldPtr(f: *Function, inst: Air.Inst.Index, struct_ptr_ty: Type, struc
else => .none,
};
- const FieldLoc = union(enum) {
+ const field_loc: union(enum) {
begin: void,
field: CValue,
end: void,
- };
- const field_loc = switch (struct_ty.tag()) {
- .@"struct" => switch (struct_ty.containerLayout()) {
- .Auto, .Extern => for (struct_ty.structFields().values()[index..], 0..) |field, offset| {
- if (field.ty.hasRuntimeBitsIgnoreComptime()) break FieldLoc{ .field = .{
- .identifier = struct_ty.structFieldName(index + offset),
- } };
- } else @as(FieldLoc, .end),
+ } = switch (struct_ty.tag()) {
+ .tuple, .anon_struct, .@"struct" => switch (struct_ty.containerLayout()) {
+ .Auto, .Extern => for (index..struct_ty.structFieldCount()) |field_i| {
+ if (!struct_ty.structFieldIsComptime(field_i) and
+ struct_ty.structFieldType(field_i).hasRuntimeBitsIgnoreComptime())
+ break .{ .field = if (struct_ty.isSimpleTuple())
+ .{ .field = field_i }
+ else
+ .{ .identifier = struct_ty.structFieldName(field_i) } };
+ } else .end,
.Packed => if (field_ptr_info.data.host_size == 0) {
const target = f.object.dg.module.getTarget();
@@ -5003,27 +5251,15 @@ fn structFieldPtr(f: *Function, inst: Air.Inst.Index, struct_ptr_ty: Type, struc
try f.writeCValue(writer, struct_ptr, .Other);
try writer.print(")[{}];\n", .{try f.fmtIntLiteral(Type.usize, byte_offset_val)});
return local;
- } else @as(FieldLoc, .begin),
+ } else .begin,
},
.@"union", .union_safety_tagged, .union_tagged => if (struct_ty.containerLayout() == .Packed) {
try f.writeCValue(writer, struct_ptr, .Other);
try writer.writeAll(";\n");
return local;
- } else if (field_ty.hasRuntimeBitsIgnoreComptime()) FieldLoc{ .field = .{
+ } else if (field_ty.hasRuntimeBitsIgnoreComptime()) .{ .field = .{
.identifier = struct_ty.unionFields().keys()[index],
- } } else @as(FieldLoc, .end),
- .tuple, .anon_struct => field_name: {
- const tuple = struct_ty.tupleFields();
- if (tuple.values[index].tag() != .unreachable_value) return CValue.none;
-
- var id: usize = 0;
- break :field_name for (tuple.values, 0..) |value, i| {
- if (value.tag() != .unreachable_value) continue;
- if (!tuple.types[i].hasRuntimeBitsIgnoreComptime()) continue;
- if (i >= index) break FieldLoc{ .field = .{ .field = id } };
- id += 1;
- } else @as(FieldLoc, .end);
- },
+ } } else .end,
else => unreachable,
};
@@ -5076,8 +5312,11 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
};
const field_name: CValue = switch (struct_ty.tag()) {
- .@"struct" => switch (struct_ty.containerLayout()) {
- .Auto, .Extern => .{ .identifier = struct_ty.structFieldName(extra.field_index) },
+ .tuple, .anon_struct, .@"struct" => switch (struct_ty.containerLayout()) {
+ .Auto, .Extern => if (struct_ty.isSimpleTuple())
+ .{ .field = extra.field_index }
+ else
+ .{ .identifier = struct_ty.structFieldName(extra.field_index) },
.Packed => {
const struct_obj = struct_ty.castTag(.@"struct").?.data;
const int_info = struct_ty.intInfo(target);
@@ -5135,13 +5374,13 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
const local = try f.allocLocal(inst, inst_ty);
try writer.writeAll("memcpy(");
- try f.writeCValue(writer, .{ .local_ref = local.local }, .FunctionArgument);
+ try f.writeCValue(writer, .{ .local_ref = local.new_local }, .FunctionArgument);
try writer.writeAll(", ");
- try f.writeCValue(writer, .{ .local_ref = temp_local.local }, .FunctionArgument);
+ try f.writeCValue(writer, .{ .local_ref = temp_local.new_local }, .FunctionArgument);
try writer.writeAll(", sizeof(");
try f.renderTypecast(writer, inst_ty);
try writer.writeAll("));\n");
- try freeLocal(f, inst, temp_local.local, 0);
+ try freeLocal(f, inst, temp_local.new_local, 0);
return local;
},
},
@@ -5165,22 +5404,13 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll("));\n");
if (struct_byval == .constant) {
- try freeLocal(f, inst, operand_lval.local, 0);
+ try freeLocal(f, inst, operand_lval.new_local, 0);
}
return local;
} else .{
.identifier = struct_ty.unionFields().keys()[extra.field_index],
},
- .tuple, .anon_struct => blk: {
- const tuple = struct_ty.tupleFields();
- if (tuple.values[extra.field_index].tag() != .unreachable_value) return CValue.none;
-
- var id: usize = 0;
- for (tuple.values[0..extra.field_index]) |value|
- id += @boolToInt(value.tag() == .unreachable_value);
- break :blk .{ .field = id };
- },
else => unreachable,
};
@@ -5765,7 +5995,7 @@ fn airCmpxchg(f: *Function, inst: Air.Inst.Index, flavor: [*:0]const u8) !CValue
}
if (f.liveness.isUnused(inst)) {
- try freeLocal(f, inst, local.local, 0);
+ try freeLocal(f, inst, local.new_local, 0);
return CValue.none;
}
@@ -5808,7 +6038,7 @@ fn airAtomicRmw(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(");\n");
if (f.liveness.isUnused(inst)) {
- try freeLocal(f, inst, local.local, 0);
+ try freeLocal(f, inst, local.new_local, 0);
return CValue.none;
}
@@ -5905,7 +6135,7 @@ fn airMemset(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(";\n");
try reap(f, inst, &.{ pl_op.operand, extra.lhs, extra.rhs });
- try freeLocal(f, inst, index.local, 0);
+ try freeLocal(f, inst, index.new_local, 0);
return CValue.none;
}
@@ -6222,7 +6452,7 @@ fn airReduce(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(";\n");
- try freeLocal(f, inst, it.local, 0);
+ try freeLocal(f, inst, it.new_local, 0);
return accum;
}
@@ -6235,8 +6465,8 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
const gpa = f.object.dg.gpa;
const resolved_elements = try gpa.alloc(CValue, elements.len);
defer gpa.free(resolved_elements);
- for (elements, 0..) |element, i| {
- resolved_elements[i] = try f.resolveInst(element);
+ for (resolved_elements, elements) |*resolved_element, element| {
+ resolved_element.* = try f.resolveInst(element);
}
{
var bt = iterateBigTomb(f, inst);
@@ -6275,46 +6505,47 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
try writer.writeAll(")");
try writer.writeByte('{');
var empty = true;
- for (elements, 0..) |element, index| {
- if (inst_ty.structFieldValueComptime(index)) |_| continue;
+ for (elements, resolved_elements, 0..) |element, resolved_element, field_i| {
+ if (inst_ty.structFieldValueComptime(field_i)) |_| continue;
if (!empty) try writer.writeAll(", ");
- if (!inst_ty.isTupleOrAnonStruct()) {
- try writer.print(".{ } = ", .{fmtIdent(inst_ty.structFieldName(index))});
- }
+
+ const field_name: CValue = if (inst_ty.isSimpleTuple())
+ .{ .field = field_i }
+ else
+ .{ .identifier = inst_ty.structFieldName(field_i) };
+ try writer.writeByte('.');
+ try f.object.dg.writeCValue(writer, field_name);
+ try writer.writeAll(" = ");
const element_ty = f.air.typeOf(element);
try f.writeCValue(writer, switch (element_ty.zigTypeTag()) {
.Array => CValue{ .undef = element_ty },
- else => resolved_elements[index],
+ else => resolved_element,
}, .Initializer);
empty = false;
}
- if (empty) try writer.print("{}", .{try f.fmtIntLiteral(Type.u8, Value.zero)});
try writer.writeAll("};\n");
- var field_id: usize = 0;
- for (elements, 0..) |element, index| {
- if (inst_ty.structFieldValueComptime(index)) |_| continue;
+ for (elements, resolved_elements, 0..) |element, resolved_element, field_i| {
+ if (inst_ty.structFieldValueComptime(field_i)) |_| continue;
const element_ty = f.air.typeOf(element);
if (element_ty.zigTypeTag() != .Array) continue;
- const field_name = if (inst_ty.isTupleOrAnonStruct())
- CValue{ .field = field_id }
+ const field_name: CValue = if (inst_ty.isSimpleTuple())
+ .{ .field = field_i }
else
- CValue{ .identifier = inst_ty.structFieldName(index) };
+ .{ .identifier = inst_ty.structFieldName(field_i) };
try writer.writeAll(";\n");
try writer.writeAll("memcpy(");
try f.writeCValueMember(writer, local, field_name);
try writer.writeAll(", ");
- try f.writeCValue(writer, resolved_elements[index], .FunctionArgument);
+ try f.writeCValue(writer, resolved_element, .FunctionArgument);
try writer.writeAll(", sizeof(");
try f.renderTypecast(writer, element_ty);
try writer.writeAll("));\n");
-
- field_id += 1;
}
},
.Packed => {
@@ -6332,7 +6563,7 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
const bit_offset_val = Value.initPayload(&bit_offset_val_pl.base);
var empty = true;
- for (elements, 0..) |_, index| {
+ for (0..elements.len) |index| {
const field_ty = inst_ty.structFieldType(index);
if (!field_ty.hasRuntimeBitsIgnoreComptime()) continue;
@@ -6381,13 +6612,6 @@ fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
empty = false;
}
- if (empty) {
- try writer.writeByte('(');
- try f.renderTypecast(writer, inst_ty);
- try writer.writeByte(')');
- try f.writeCValue(writer, .{ .undef = inst_ty }, .Initializer);
- }
-
try writer.writeAll(";\n");
},
},
@@ -7020,17 +7244,20 @@ fn isByRef(ty: Type) bool {
}
const LowerFnRetTyBuffer = struct {
+ names: [1][]const u8,
types: [1]Type,
values: [1]Value,
- payload: Type.Payload.Tuple,
+ payload: Type.Payload.AnonStruct,
};
fn lowerFnRetTy(ret_ty: Type, buffer: *LowerFnRetTyBuffer, target: std.Target) Type {
if (ret_ty.zigTypeTag() == .NoReturn) return Type.initTag(.noreturn);
if (lowersToArray(ret_ty, target)) {
+ buffer.names = [1][]const u8{"array"};
buffer.types = [1]Type{ret_ty};
buffer.values = [1]Value{Value.initTag(.unreachable_value)};
buffer.payload = .{ .data = .{
+ .names = &buffer.names,
.types = &buffer.types,
.values = &buffer.values,
} };
@@ -7086,7 +7313,7 @@ fn die(f: *Function, inst: Air.Inst.Index, ref: Air.Inst.Ref) !void {
if (f.air.instructions.items(.tag)[ref_inst] == .constant) return;
const c_value = (f.value_map.fetchRemove(ref) orelse return).value;
const local_index = switch (c_value) {
- .local => |l| l,
+ .local, .new_local => |l| l,
else => return,
};
try freeLocal(f, inst, local_index, ref_inst);
@@ -7161,8 +7388,8 @@ fn deinitFreeLocalsMap(gpa: mem.Allocator, map: *LocalsMap) void {
}
fn noticeBranchFrees(f: *Function, pre_locals_len: LocalIndex, inst: Air.Inst.Index) !void {
- for (f.locals.items[pre_locals_len..], 0..) |*local, local_offset| {
- const local_index = pre_locals_len + @intCast(LocalIndex, local_offset);
+ for (f.locals.items[pre_locals_len..], pre_locals_len..) |*local, local_i| {
+ const local_index = @intCast(LocalIndex, local_i);
if (f.allocs.contains(local_index)) continue; // allocs are not freeable
// free more deeply nested locals from other branches at current depth
src/link/C.zig
@@ -117,7 +117,7 @@ pub fn updateFunc(self: *C, module: *Module, func: *Module.Fn, air: Air, livenes
.gpa = gpa,
.module = module,
.error_msg = null,
- .decl_index = decl_index,
+ .decl_index = decl_index.toOptional(),
.decl = module.declPtr(decl_index),
.fwd_decl = fwd_decl.toManaged(gpa),
.ctypes = ctypes.*,
@@ -146,7 +146,7 @@ pub fn updateFunc(self: *C, module: *Module, func: *Module.Fn, air: Air, livenes
code.* = function.object.code.moveToUnmanaged();
// Free excess allocated memory for this Decl.
- ctypes.shrinkToFit(gpa);
+ ctypes.shrinkAndFree(gpa, ctypes.count());
lazy_fns.shrinkAndFree(gpa, lazy_fns.count());
fwd_decl.shrinkAndFree(gpa, fwd_decl.items.len);
code.shrinkAndFree(gpa, code.items.len);
@@ -176,7 +176,7 @@ pub fn updateDecl(self: *C, module: *Module, decl_index: Module.Decl.Index) !voi
.gpa = gpa,
.module = module,
.error_msg = null,
- .decl_index = decl_index,
+ .decl_index = decl_index.toOptional(),
.decl = decl,
.fwd_decl = fwd_decl.toManaged(gpa),
.ctypes = ctypes.*,
@@ -204,7 +204,7 @@ pub fn updateDecl(self: *C, module: *Module, decl_index: Module.Decl.Index) !voi
code.* = object.code.moveToUnmanaged();
// Free excess allocated memory for this Decl.
- ctypes.shrinkToFit(gpa);
+ ctypes.shrinkAndFree(gpa, ctypes.count());
fwd_decl.shrinkAndFree(gpa, fwd_decl.items.len);
code.shrinkAndFree(gpa, code.items.len);
}
@@ -247,8 +247,8 @@ pub fn flushModule(self: *C, comp: *Compilation, prog_node: *std.Progress.Node)
const abi_define = abiDefine(comp);
- // Covers defines, zig.h, ctypes, asm.
- try f.all_buffers.ensureUnusedCapacity(gpa, 4);
+ // Covers defines, zig.h, ctypes, asm, lazy fwd, lazy code.
+ try f.all_buffers.ensureUnusedCapacity(gpa, 6);
if (abi_define) |buf| f.appendBufAssumeCapacity(buf);
f.appendBufAssumeCapacity(zig_h);
@@ -258,15 +258,15 @@ pub fn flushModule(self: *C, comp: *Compilation, prog_node: *std.Progress.Node)
{
var asm_buf = f.asm_buf.toManaged(gpa);
- defer asm_buf.deinit();
-
- try codegen.genGlobalAsm(module, &asm_buf);
-
- f.asm_buf = asm_buf.moveToUnmanaged();
- f.appendBufAssumeCapacity(f.asm_buf.items);
+ defer f.asm_buf = asm_buf.moveToUnmanaged();
+ try codegen.genGlobalAsm(module, asm_buf.writer());
+ f.appendBufAssumeCapacity(asm_buf.items);
}
- try self.flushErrDecls(&f);
+ const lazy_indices = f.all_buffers.items.len;
+ f.all_buffers.items.len += 2;
+
+ try self.flushErrDecls(&f.lazy_db);
// `CType`s, forward decls, and non-functions first.
// Unlike other backends, the .c code we are emitting is order-dependent. Therefore
@@ -295,6 +295,30 @@ pub fn flushModule(self: *C, comp: *Compilation, prog_node: *std.Progress.Node)
}
}
+ {
+ // We need to flush lazy ctypes after flushing all decls but before flushing any decl ctypes.
+ assert(f.ctypes.count() == 0);
+ try self.flushCTypes(&f, .none, f.lazy_db.ctypes);
+
+ var it = self.decl_table.iterator();
+ while (it.next()) |entry|
+ try self.flushCTypes(&f, entry.key_ptr.toOptional(), entry.value_ptr.ctypes);
+ }
+
+ {
+ f.all_buffers.items[lazy_indices + 0] = .{
+ .iov_base = if (f.lazy_db.fwd_decl.items.len > 0) f.lazy_db.fwd_decl.items.ptr else "",
+ .iov_len = f.lazy_db.fwd_decl.items.len,
+ };
+ f.file_size += f.lazy_db.fwd_decl.items.len;
+
+ f.all_buffers.items[lazy_indices + 1] = .{
+ .iov_base = if (f.lazy_db.code.items.len > 0) f.lazy_db.code.items.ptr else "",
+ .iov_len = f.lazy_db.code.items.len,
+ };
+ f.file_size += f.lazy_db.code.items.len;
+ }
+
f.all_buffers.items[ctypes_index] = .{
.iov_base = if (f.ctypes_buf.items.len > 0) f.ctypes_buf.items.ptr else "",
.iov_len = f.ctypes_buf.items.len,
@@ -318,17 +342,17 @@ const Flush = struct {
ctypes_map: std.ArrayListUnmanaged(codegen.CType.Index) = .{},
ctypes_buf: std.ArrayListUnmanaged(u8) = .{},
- err_decls: DeclBlock = .{},
-
+ lazy_db: DeclBlock = .{},
lazy_fns: LazyFns = .{},
asm_buf: std.ArrayListUnmanaged(u8) = .{},
+
/// We collect a list of buffers to write, and write them all at once with pwritev ๐
all_buffers: std.ArrayListUnmanaged(std.os.iovec_const) = .{},
/// Keeps track of the total bytes of `all_buffers`.
file_size: u64 = 0,
- const LazyFns = std.AutoHashMapUnmanaged(codegen.LazyFnKey, DeclBlock);
+ const LazyFns = std.AutoHashMapUnmanaged(codegen.LazyFnKey, void);
fn appendBufAssumeCapacity(f: *Flush, buf: []const u8) void {
if (buf.len == 0) return;
@@ -338,10 +362,9 @@ const Flush = struct {
fn deinit(f: *Flush, gpa: Allocator) void {
f.all_buffers.deinit(gpa);
- var lazy_fns_it = f.lazy_fns.valueIterator();
- while (lazy_fns_it.next()) |db| db.deinit(gpa);
+ f.asm_buf.deinit(gpa);
f.lazy_fns.deinit(gpa);
- f.err_decls.deinit(gpa);
+ f.lazy_db.deinit(gpa);
f.ctypes_buf.deinit(gpa);
f.ctypes_map.deinit(gpa);
f.ctypes.deinit(gpa);
@@ -353,26 +376,106 @@ const FlushDeclError = error{
OutOfMemory,
};
-fn flushCTypes(self: *C, f: *Flush, ctypes: codegen.CType.Store) FlushDeclError!void {
- _ = self;
- _ = f;
- _ = ctypes;
+fn flushCTypes(
+ self: *C,
+ f: *Flush,
+ decl_index: Module.Decl.OptionalIndex,
+ decl_ctypes: codegen.CType.Store,
+) FlushDeclError!void {
+ const gpa = self.base.allocator;
+ const mod = self.base.options.module.?;
+
+ const decl_ctypes_len = decl_ctypes.count();
+ f.ctypes_map.clearRetainingCapacity();
+ try f.ctypes_map.ensureTotalCapacity(gpa, decl_ctypes_len);
+
+ var global_ctypes = f.ctypes.promote(gpa);
+ defer f.ctypes.demote(global_ctypes);
+
+ var ctypes_buf = f.ctypes_buf.toManaged(gpa);
+ defer f.ctypes_buf = ctypes_buf.moveToUnmanaged();
+ const writer = ctypes_buf.writer();
+
+ const slice = decl_ctypes.set.map.entries.slice();
+ for (slice.items(.key), 0..) |decl_cty, decl_i| {
+ const Context = struct {
+ arena: Allocator,
+ ctypes_map: []codegen.CType.Index,
+ cached_hash: codegen.CType.Store.Set.Map.Hash,
+ idx: codegen.CType.Index,
+
+ pub fn hash(ctx: @This(), _: codegen.CType) codegen.CType.Store.Set.Map.Hash {
+ return ctx.cached_hash;
+ }
+ pub fn eql(ctx: @This(), lhs: codegen.CType, rhs: codegen.CType, _: usize) bool {
+ return lhs.eqlContext(rhs, ctx);
+ }
+ pub fn eqlIndex(
+ ctx: @This(),
+ lhs_idx: codegen.CType.Index,
+ rhs_idx: codegen.CType.Index,
+ ) bool {
+ if (lhs_idx < codegen.CType.Tag.no_payload_count or
+ rhs_idx < codegen.CType.Tag.no_payload_count) return lhs_idx == rhs_idx;
+ const lhs_i = lhs_idx - codegen.CType.Tag.no_payload_count;
+ if (lhs_i >= ctx.ctypes_map.len) return false;
+ return ctx.ctypes_map[lhs_i] == rhs_idx;
+ }
+ pub fn copyIndex(ctx: @This(), idx: codegen.CType.Index) codegen.CType.Index {
+ if (idx < codegen.CType.Tag.no_payload_count) return idx;
+ return ctx.ctypes_map[idx - codegen.CType.Tag.no_payload_count];
+ }
+ };
+ const decl_idx = @intCast(codegen.CType.Index, codegen.CType.Tag.no_payload_count + decl_i);
+ const ctx = Context{
+ .arena = global_ctypes.arena.allocator(),
+ .ctypes_map = f.ctypes_map.items,
+ .cached_hash = decl_ctypes.indexToHash(decl_idx),
+ .idx = decl_idx,
+ };
+ const gop = try global_ctypes.set.map.getOrPutContextAdapted(gpa, decl_cty, ctx, .{
+ .store = &global_ctypes.set,
+ });
+ const global_idx =
+ @intCast(codegen.CType.Index, codegen.CType.Tag.no_payload_count + gop.index);
+ f.ctypes_map.appendAssumeCapacity(global_idx);
+ if (!gop.found_existing) {
+ errdefer _ = global_ctypes.set.map.pop();
+ gop.key_ptr.* = try decl_cty.copyContext(ctx);
+ }
+ if (std.debug.runtime_safety) {
+ const global_cty = &global_ctypes.set.map.entries.items(.key)[gop.index];
+ assert(global_cty == gop.key_ptr);
+ assert(decl_cty.eqlContext(global_cty.*, ctx));
+ assert(decl_cty.hash(decl_ctypes.set) == global_cty.hash(global_ctypes.set));
+ }
+ try codegen.genTypeDecl(
+ mod,
+ writer,
+ global_ctypes.set,
+ global_idx,
+ decl_index,
+ decl_ctypes.set,
+ decl_idx,
+ gop.found_existing,
+ );
+ }
}
-fn flushErrDecls(self: *C, f: *Flush) FlushDeclError!void {
+fn flushErrDecls(self: *C, db: *DeclBlock) FlushDeclError!void {
const gpa = self.base.allocator;
- const fwd_decl = &f.err_decls.fwd_decl;
- const ctypes = &f.err_decls.ctypes;
- const code = &f.err_decls.code;
+ const fwd_decl = &db.fwd_decl;
+ const ctypes = &db.ctypes;
+ const code = &db.code;
var object = codegen.Object{
.dg = .{
.gpa = gpa,
.module = self.base.options.module.?,
.error_msg = null,
- .decl_index = undefined,
- .decl = undefined,
+ .decl_index = .none,
+ .decl = null,
.fwd_decl = fwd_decl.toManaged(gpa),
.ctypes = ctypes.*,
},
@@ -394,19 +497,9 @@ fn flushErrDecls(self: *C, f: *Flush) FlushDeclError!void {
fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged();
ctypes.* = object.dg.ctypes.move();
code.* = object.code.moveToUnmanaged();
-
- try self.flushCTypes(f, ctypes.*);
- try f.all_buffers.ensureUnusedCapacity(gpa, 2);
- f.appendBufAssumeCapacity(fwd_decl.items);
- f.appendBufAssumeCapacity(code.items);
}
-fn flushLazyFn(
- self: *C,
- f: *Flush,
- db: *DeclBlock,
- lazy_fn: codegen.LazyFnMap.Entry,
-) FlushDeclError!void {
+fn flushLazyFn(self: *C, db: *DeclBlock, lazy_fn: codegen.LazyFnMap.Entry) FlushDeclError!void {
const gpa = self.base.allocator;
const fwd_decl = &db.fwd_decl;
@@ -418,8 +511,8 @@ fn flushLazyFn(
.gpa = gpa,
.module = self.base.options.module.?,
.error_msg = null,
- .decl_index = undefined,
- .decl = undefined,
+ .decl_index = .none,
+ .decl = null,
.fwd_decl = fwd_decl.toManaged(gpa),
.ctypes = ctypes.*,
},
@@ -441,11 +534,6 @@ fn flushLazyFn(
fwd_decl.* = object.dg.fwd_decl.moveToUnmanaged();
ctypes.* = object.dg.ctypes.move();
code.* = object.code.moveToUnmanaged();
-
- try self.flushCTypes(f, ctypes.*);
- try f.all_buffers.ensureUnusedCapacity(gpa, 2);
- f.appendBufAssumeCapacity(fwd_decl.items);
- f.appendBufAssumeCapacity(code.items);
}
fn flushLazyFns(self: *C, f: *Flush, lazy_fns: codegen.LazyFnMap) FlushDeclError!void {
@@ -456,8 +544,8 @@ fn flushLazyFns(self: *C, f: *Flush, lazy_fns: codegen.LazyFnMap) FlushDeclError
while (it.next()) |entry| {
const gop = f.lazy_fns.getOrPutAssumeCapacity(entry.key_ptr.*);
if (gop.found_existing) continue;
- gop.value_ptr.* = .{};
- try self.flushLazyFn(f, gop.value_ptr, entry);
+ gop.value_ptr.* = {};
+ try self.flushLazyFn(&f.lazy_db, entry);
}
}
@@ -481,7 +569,6 @@ fn flushDecl(
const decl_block = self.decl_table.getPtr(decl_index).?;
- try self.flushCTypes(f, decl_block.ctypes);
try self.flushLazyFns(f, decl_block.lazy_fns);
try f.all_buffers.ensureUnusedCapacity(gpa, 1);
if (!(decl.isExtern() and export_names.contains(mem.span(decl.name))))
src/Compilation.zig
@@ -3273,7 +3273,7 @@ fn processOneJob(comp: *Compilation, job: Job) !void {
.gpa = gpa,
.module = module,
.error_msg = null,
- .decl_index = decl_index,
+ .decl_index = decl_index.toOptional(),
.decl = decl,
.fwd_decl = fwd_decl.toManaged(gpa),
.ctypes = .{},