Commit a7088fd9a3
Changed files (9)
src
arch
wasm
x86_64
src/arch/wasm/CodeGen.zig
@@ -3779,7 +3779,7 @@ fn airStructFieldVal(func: *CodeGen, inst: Air.Inst.Index) InnerError!void {
.Packed => switch (struct_ty.zigTypeTag(mod)) {
.Struct => result: {
const packed_struct = mod.typeToPackedStruct(struct_ty).?;
- const offset = mod.structPackedFieldBitOffset(packed_struct, field_index);
+ const offset = packed_struct.fieldBitOffset(ip, field_index);
const backing_ty = packed_struct.backingIntType(ip).toType();
const wasm_bits = toWasmBits(backing_ty.intInfo(mod).bits) orelse {
return func.fail("TODO: airStructFieldVal for packed structs larger than 128 bits", .{});
src/arch/x86_64/CodeGen.zig
@@ -5593,6 +5593,7 @@ fn fieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, index: u32
fn airStructFieldVal(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
+ const ip = &mod.intern_pool;
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const extra = self.air.extraData(Air.StructField, ty_pl.payload).data;
const result: MCValue = result: {
@@ -5610,7 +5611,7 @@ fn airStructFieldVal(self: *Self, inst: Air.Inst.Index) !void {
const field_off: u32 = switch (container_ty.containerLayout(mod)) {
.Auto, .Extern => @intCast(container_ty.structFieldOffset(index, mod) * 8),
.Packed => if (mod.typeToStruct(container_ty)) |struct_type|
- mod.structPackedFieldBitOffset(struct_type, index)
+ struct_type.fieldBitOffset(ip, index)
else
0,
};
@@ -11410,6 +11411,7 @@ fn airReduce(self: *Self, inst: Air.Inst.Index) !void {
fn airAggregateInit(self: *Self, inst: Air.Inst.Index) !void {
const mod = self.bin_file.options.module.?;
+ const ip = &mod.intern_pool;
const result_ty = self.typeOfIndex(inst);
const len: usize = @intCast(result_ty.arrayLen(mod));
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
@@ -11440,7 +11442,7 @@ fn airAggregateInit(self: *Self, inst: Air.Inst.Index) !void {
}
const elem_abi_size: u32 = @intCast(elem_ty.abiSize(mod));
const elem_abi_bits = elem_abi_size * 8;
- const elem_off = mod.structPackedFieldBitOffset(struct_type, elem_i);
+ const elem_off = struct_type.fieldBitOffset(ip, elem_i);
const elem_byte_off: i32 = @intCast(elem_off / elem_abi_bits * elem_abi_size);
const elem_bit_off = elem_off % elem_abi_bits;
const elem_mcv = try self.resolveInst(elem);
src/codegen/c.zig
@@ -5429,7 +5429,7 @@ fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
const bit_offset_ty = try mod.intType(.unsigned, Type.smallestUnsignedBits(int_info.bits - 1));
- const bit_offset = mod.structPackedFieldBitOffset(struct_type, extra.field_index);
+ const bit_offset = struct_type.fieldBitOffset(ip, extra.field_index);
const bit_offset_val = try mod.intValue(bit_offset_ty, bit_offset);
const field_int_signedness = if (inst_ty.isAbiInt(mod))
src/codegen/llvm.zig
@@ -6192,6 +6192,7 @@ pub const FuncGen = struct {
fn airStructFieldVal(self: *FuncGen, body_tail: []const Air.Inst.Index) !Builder.Value {
const o = self.dg.object;
const mod = o.module;
+ const ip = &mod.intern_pool;
const inst = body_tail[0];
const ty_pl = self.air.instructions.items(.data)[inst].ty_pl;
const struct_field = self.air.extraData(Air.StructField, ty_pl.payload).data;
@@ -6207,7 +6208,7 @@ pub const FuncGen = struct {
.Struct => switch (struct_ty.containerLayout(mod)) {
.Packed => {
const struct_type = mod.typeToStruct(struct_ty).?;
- const bit_offset = mod.structPackedFieldBitOffset(struct_type, field_index);
+ const bit_offset = struct_type.fieldBitOffset(ip, field_index);
const containing_int = struct_llvm_val;
const shift_amt =
try o.builder.intValue(containing_int.typeOfWip(&self.wip), bit_offset);
src/codegen.zig
@@ -630,7 +630,8 @@ fn lowerParentPtr(
reloc_info: RelocInfo,
) CodeGenError!Result {
const mod = bin_file.options.module.?;
- const ptr = mod.intern_pool.indexToKey(parent_ptr).ptr;
+ const ip = &mod.intern_pool;
+ const ptr = ip.indexToKey(parent_ptr).ptr;
assert(ptr.len == .none);
return switch (ptr.addr) {
.decl, .mut_decl => try lowerDeclRef(
@@ -656,7 +657,7 @@ fn lowerParentPtr(
code,
debug_output,
reloc_info.offset(@as(u32, @intCast(errUnionPayloadOffset(
- mod.intern_pool.typeOf(eu_payload).toType(),
+ ip.typeOf(eu_payload).toType(),
mod,
)))),
),
@@ -675,17 +676,17 @@ fn lowerParentPtr(
code,
debug_output,
reloc_info.offset(@as(u32, @intCast(elem.index *
- mod.intern_pool.typeOf(elem.base).toType().elemType2(mod).abiSize(mod)))),
+ ip.typeOf(elem.base).toType().elemType2(mod).abiSize(mod)))),
),
.field => |field| {
- const base_type = mod.intern_pool.indexToKey(mod.intern_pool.typeOf(field.base)).ptr_type.child;
+ const base_type = ip.indexToKey(ip.typeOf(field.base)).ptr_type.child;
return lowerParentPtr(
bin_file,
src_loc,
field.base,
code,
debug_output,
- reloc_info.offset(switch (mod.intern_pool.indexToKey(base_type)) {
+ reloc_info.offset(switch (ip.indexToKey(base_type)) {
.ptr_type => |ptr_type| switch (ptr_type.flags.size) {
.One, .Many, .C => unreachable,
.Slice => switch (field.index) {
@@ -703,9 +704,9 @@ fn lowerParentPtr(
mod,
)),
.Packed => if (mod.typeToStruct(base_type.toType())) |struct_type|
- math.divExact(u16, mod.structPackedFieldBitOffset(
- struct_type,
- @intCast(field.index),
+ math.divExact(u32, struct_type.fieldBitOffset(
+ ip,
+ field.index,
), 8) catch |err| switch (err) {
error.UnexpectedRemainder => 0,
error.DivisionByZero => unreachable,
src/InternPool.zig
@@ -105,6 +105,25 @@ pub const MapIndex = enum(u32) {
}
};
+pub const OptionalInt = enum(u32) {
+ none = std.math.maxInt(u32),
+ _,
+
+ pub fn init(x: u32) @This() {
+ const result: @This() = @enumFromInt(x);
+ assert(result != .none);
+ return result;
+ }
+
+ pub fn initOptional(opt_x: ?u32) @This() {
+ return @This().init(opt_x orelse return .none);
+ }
+
+ pub fn unwrap(this: @This()) ?u32 {
+ return if (this == .none) null else @intFromEnum(this);
+ }
+};
+
pub const RuntimeIndex = enum(u32) {
zero = 0,
comptime_field_ptr = std.math.maxInt(u32),
@@ -377,6 +396,8 @@ pub const Key = union(enum) {
field_aligns: Alignment.Slice,
runtime_order: RuntimeOrder.Slice,
comptime_bits: ComptimeBits,
+ /// In the case of packed structs these are bit offsets; in the case of
+ /// non-packed structs these are byte offsets.
offsets: Offsets,
names_map: OptionalMapIndex,
@@ -475,6 +496,15 @@ pub const Key = union(enum) {
return s.field_names.get(ip)[i].toOptional();
}
+ /// Asserts it is a packed struct.
+ /// Asserts the layout is resolved.
+ pub fn fieldBitOffset(s: @This(), ip: *InternPool, i: usize) u32 {
+ assert(s.layout == .Packed);
+ assert(s.haveLayout(ip));
+ const result: OptionalInt = @enumFromInt(s.offsets.get(ip)[i]);
+ return result.unwrap().?;
+ }
+
pub fn fieldIsComptime(s: @This(), ip: *const InternPool, i: usize) bool {
return s.comptime_bits.getBit(ip, i);
}
@@ -593,7 +623,11 @@ pub const Key = union(enum) {
pub fn haveLayout(s: @This(), ip: *InternPool) bool {
return switch (s.layout) {
- .Packed => s.backingIntType(ip).* != .none,
+ .Packed => {
+ if (s.offsets.len == 0) return true;
+ const first_offset: OptionalInt = @enumFromInt(ip.extra.items[s.offsets.start]);
+ return first_offset != .none;
+ },
.Auto, .Extern => s.flagsPtr(ip).layout_resolved,
};
}
@@ -2936,7 +2970,8 @@ pub const Tag = enum(u8) {
/// Trailing:
/// 0. type: Index for each fields_len
/// 1. name: NullTerminatedString for each fields_len
- /// 2. init: Index for each fields_len // if tag is type_struct_packed_inits
+ /// 2. bit_offset: OptionalInt for each fields_len // none until layout resolved
+ /// 3. init: Index for each fields_len // if tag is type_struct_packed_inits
pub const TypeStructPacked = struct {
decl: Module.Decl.Index,
zir_index: Zir.Inst.Index,
@@ -4194,8 +4229,12 @@ fn extraPackedStructType(ip: *const InternPool, extra_index: u32, inits: bool) K
.start = type_struct_packed.end + fields_len,
.len = fields_len,
},
+ .offsets = .{
+ .start = type_struct_packed.end + fields_len * 2,
+ .len = fields_len,
+ },
.field_inits = if (inits) .{
- .start = type_struct_packed.end + fields_len + fields_len,
+ .start = type_struct_packed.end + fields_len * 3,
.len = fields_len,
} else .{
.start = 0,
@@ -4204,7 +4243,6 @@ fn extraPackedStructType(ip: *const InternPool, extra_index: u32, inits: bool) K
.field_aligns = .{ .start = 0, .len = 0 },
.runtime_order = .{ .start = 0, .len = 0 },
.comptime_bits = .{ .start = 0, .len = 0 },
- .offsets = .{ .start = 0, .len = 0 },
.names_map = type_struct_packed.data.names_map.toOptional(),
};
}
@@ -5279,6 +5317,7 @@ pub fn getStructType(
try ip.extra.ensureUnusedCapacity(gpa, @typeInfo(Tag.TypeStructPacked).Struct.fields.len +
ini.fields_len + // types
ini.fields_len + // names
+ ini.fields_len + // offsets
ini.fields_len); // inits
try ip.items.append(gpa, .{
.tag = if (ini.any_default_inits) .type_struct_packed_inits else .type_struct_packed,
@@ -5293,6 +5332,7 @@ pub fn getStructType(
});
ip.extra.appendNTimesAssumeCapacity(@intFromEnum(Index.none), ini.fields_len);
ip.extra.appendNTimesAssumeCapacity(@intFromEnum(OptionalNullTerminatedString.none), ini.fields_len);
+ ip.extra.appendNTimesAssumeCapacity(@intFromEnum(OptionalInt.none), ini.fields_len);
if (ini.any_default_inits) {
ip.extra.appendNTimesAssumeCapacity(@intFromEnum(Index.none), ini.fields_len);
}
@@ -7113,12 +7153,12 @@ fn dumpStatsFallible(ip: *const InternPool, arena: Allocator) anyerror!void {
.type_struct_packed => b: {
const info = ip.extraData(Tag.TypeStructPacked, data);
break :b @sizeOf(u32) * (@typeInfo(Tag.TypeStructPacked).Struct.fields.len +
- info.fields_len + info.fields_len);
+ info.fields_len + info.fields_len + info.fields_len);
},
.type_struct_packed_inits => b: {
const info = ip.extraData(Tag.TypeStructPacked, data);
break :b @sizeOf(u32) * (@typeInfo(Tag.TypeStructPacked).Struct.fields.len +
- info.fields_len + info.fields_len + info.fields_len);
+ info.fields_len + info.fields_len + info.fields_len + info.fields_len);
},
.type_tuple_anon => b: {
const info = ip.extraData(TypeStructAnon, data);
src/Module.zig
@@ -6649,26 +6649,3 @@ pub fn structFieldAlignmentExtern(mod: *Module, field_ty: Type) Alignment {
return ty_abi_align;
}
-
-/// TODO: avoid linear search by storing these in trailing data of packed struct types
-/// then packedStructFieldByteOffset can be expressed in terms of bits / 8, fixing
-/// that one too.
-/// https://github.com/ziglang/zig/issues/17178
-pub fn structPackedFieldBitOffset(
- mod: *Module,
- struct_type: InternPool.Key.StructType,
- field_index: u32,
-) u16 {
- const ip = &mod.intern_pool;
- assert(struct_type.layout == .Packed);
- assert(struct_type.haveLayout(ip));
- var bit_sum: u64 = 0;
- for (0..struct_type.field_types.len) |i| {
- if (i == field_index) {
- return @intCast(bit_sum);
- }
- const field_ty = struct_type.field_types.get(ip)[i].toType();
- bit_sum += field_ty.bitSize(mod);
- }
- unreachable; // index out of bounds
-}
src/Sema.zig
@@ -21342,8 +21342,10 @@ fn reifyStruct(
}
var fields_bit_sum: u64 = 0;
- for (struct_type.field_types.get(ip)) |field_ty| {
- fields_bit_sum += field_ty.toType().bitSize(mod);
+ for (0..struct_type.field_types.len) |i| {
+ struct_type.offsets.get(ip)[i] = @intCast(fields_bit_sum);
+ const field_ty = struct_type.field_types.get(ip)[i].toType();
+ fields_bit_sum += field_ty.bitSize(mod);
}
if (backing_int_val.optionalValue(mod)) |backing_int_ty_val| {
@@ -34772,6 +34774,7 @@ fn semaBackingIntType(mod: *Module, struct_type: InternPool.Key.StructType) Comp
var accumulator: u64 = 0;
for (0..struct_type.field_types.len) |i| {
const field_ty = struct_type.field_types.get(ip)[i].toType();
+ struct_type.offsets.get(ip)[i] = @intCast(accumulator);
accumulator += try field_ty.bitSizeAdvanced(mod, &sema);
}
break :blk accumulator;
src/type.zig
@@ -3021,27 +3021,16 @@ pub const Type = struct {
};
}
- pub fn packedStructFieldByteOffset(ty: Type, field_index: usize, mod: *Module) u32 {
+ pub fn packedStructFieldBitOffset(ty: Type, field_index: usize, mod: *Module) u32 {
const ip = &mod.intern_pool;
const struct_type = ip.indexToKey(ty.toIntern()).struct_type;
assert(struct_type.layout == .Packed);
- comptime assert(Type.packed_struct_layout_version == 2);
-
- var bit_offset: u16 = undefined;
- var elem_size_bits: u16 = undefined;
- var running_bits: u16 = 0;
- for (struct_type.field_types.get(ip), 0..) |field_ty, i| {
- if (!field_ty.toType().hasRuntimeBits(mod)) continue;
-
- const field_bits: u16 = @intCast(field_ty.toType().bitSize(mod));
- if (i == field_index) {
- bit_offset = running_bits;
- elem_size_bits = field_bits;
- }
- running_bits += field_bits;
- }
- const byte_offset = bit_offset / 8;
- return byte_offset;
+ assert(struct_type.haveLayout(ip));
+ return struct_type.offsets.get(ip)[field_index];
+ }
+
+ pub fn packedStructFieldByteOffset(ty: Type, field_index: usize, mod: *Module) u32 {
+ return packedStructFieldBitOffset(ty, field_index, mod) / 8;
}
pub const FieldOffset = struct {