Commit 101299e856

Jakub Konka <kubkon@jakubkonka.com>
2024-07-04 17:49:35
macho: move unwind info records ownership to Objects
1 parent a57479a
src/link/MachO/Archive.zig
@@ -67,9 +67,9 @@ pub fn parse(self: *Archive, macho_file: *MachO, path: []const u8, handle_index:
             mem.eql(u8, name, SYMDEF64_SORTED)) continue;
 
         const object = Object{
-            .archive = .{
+            .offset = pos,
+            .in_archive = .{
                 .path = try gpa.dupe(u8, path),
-                .offset = pos,
                 .size = hdr_size,
             },
             .path = try gpa.dupe(u8, name),
src/link/MachO/Atom.zig
@@ -91,14 +91,16 @@ pub fn getUnwindRecords(self: Atom, macho_file: *MachO) []const UnwindInfo.Recor
     if (!self.flags.unwind) return &[0]UnwindInfo.Record.Index{};
     const extra = self.getExtra(macho_file).?;
     return switch (self.getFile(macho_file)) {
-        .dylib, .zig_object, .internal => unreachable,
-        .object => |x| x.unwind_records.items[extra.unwind_index..][0..extra.unwind_count],
+        .dylib => unreachable,
+        .zig_object, .internal => &[0]UnwindInfo.Record.Index{},
+        .object => |x| x.unwind_records_indexes.items[extra.unwind_index..][0..extra.unwind_count],
     };
 }
 
 pub fn markUnwindRecordsDead(self: Atom, macho_file: *MachO) void {
+    const object = self.getFile(macho_file).object;
     for (self.getUnwindRecords(macho_file)) |cu_index| {
-        const cu = macho_file.getUnwindRecord(cu_index);
+        const cu = object.getUnwindRecord(cu_index);
         cu.alive = false;
 
         if (cu.getFdePtr(macho_file)) |fde| {
@@ -1170,6 +1172,7 @@ fn format2(
     _ = unused_fmt_string;
     const atom = ctx.atom;
     const macho_file = ctx.macho_file;
+    const file = atom.getFile(macho_file);
     try writer.print("atom({d}) : {s} : @{x} : sect({d}) : align({x}) : size({x}) : nreloc({d})", .{
         atom.atom_index,                atom.getName(macho_file), atom.getAddress(macho_file),
         atom.out_n_sect,                atom.alignment,           atom.size,
@@ -1181,7 +1184,7 @@ fn format2(
         try writer.writeAll(" : unwind{ ");
         const extra = atom.getExtra(macho_file).?;
         for (atom.getUnwindRecords(macho_file), extra.unwind_index..) |index, i| {
-            const rec = macho_file.getUnwindRecord(index);
+            const rec = file.object.getUnwindRecord(index);
             try writer.print("{d}", .{index});
             if (!rec.alive) try writer.writeAll("([*])");
             if (i < extra.unwind_index + extra.unwind_count - 1) try writer.writeAll(", ");
src/link/MachO/dead_strip.zig
@@ -41,8 +41,9 @@ fn collectRoots(roots: *std.ArrayList(*Atom), objects: []const File.Index, macho
     }
 
     for (macho_file.objects.items) |index| {
-        for (macho_file.getFile(index).?.object.unwind_records.items) |cu_index| {
-            const cu = macho_file.getUnwindRecord(cu_index);
+        const object = macho_file.getFile(index).?.object;
+        for (object.unwind_records_indexes.items) |cu_index| {
+            const cu = object.getUnwindRecord(cu_index);
             if (!cu.alive) continue;
             if (cu.getFde(macho_file)) |fde| {
                 if (fde.getCie(macho_file).getPersonality(macho_file)) |sym| try markSymbol(sym, roots, macho_file);
@@ -127,8 +128,9 @@ fn markLive(atom: *Atom, macho_file: *MachO) void {
         }
     }
 
+    const file = atom.getFile(macho_file);
     for (atom.getUnwindRecords(macho_file)) |cu_index| {
-        const cu = macho_file.getUnwindRecord(cu_index);
+        const cu = file.object.getUnwindRecord(cu_index);
         const cu_atom = cu.getAtom(macho_file);
         if (markAtom(cu_atom)) markLive(cu_atom, macho_file);
 
src/link/MachO/Object.zig
@@ -1,8 +1,10 @@
-archive: ?InArchive = null,
+/// Non-zero for fat object files or archives
+offset: u64,
 path: []const u8,
 file_handle: File.HandleIndex,
 mtime: u64,
 index: File.Index,
+in_archive: ?InArchive = null,
 
 header: ?macho.mach_header_64 = null,
 sections: std.MultiArrayList(Section) = .{},
@@ -21,7 +23,8 @@ compact_unwind_sect_index: ?u8 = null,
 cies: std.ArrayListUnmanaged(Cie) = .{},
 fdes: std.ArrayListUnmanaged(Fde) = .{},
 eh_frame_data: std.ArrayListUnmanaged(u8) = .{},
-unwind_records: std.ArrayListUnmanaged(UnwindInfo.Record.Index) = .{},
+unwind_records: std.ArrayListUnmanaged(UnwindInfo.Record) = .{},
+unwind_records_indexes: std.ArrayListUnmanaged(UnwindInfo.Record.Index) = .{},
 data_in_code: std.ArrayListUnmanaged(macho.data_in_code_entry) = .{},
 
 alive: bool = true,
@@ -39,7 +42,7 @@ pub fn isObject(path: []const u8) !bool {
 }
 
 pub fn deinit(self: *Object, allocator: Allocator) void {
-    if (self.archive) |*ar| allocator.free(ar.path);
+    if (self.in_archive) |*ar| allocator.free(ar.path);
     allocator.free(self.path);
     for (self.sections.items(.relocs), self.sections.items(.subsections)) |*relocs, *sub| {
         relocs.deinit(allocator);
@@ -54,6 +57,7 @@ pub fn deinit(self: *Object, allocator: Allocator) void {
     self.fdes.deinit(allocator);
     self.eh_frame_data.deinit(allocator);
     self.unwind_records.deinit(allocator);
+    self.unwind_records_indexes.deinit(allocator);
     for (self.stab_files.items) |*sf| {
         sf.stabs.deinit(allocator);
     }
@@ -66,12 +70,11 @@ pub fn parse(self: *Object, macho_file: *MachO) !void {
     defer tracy.end();
 
     const gpa = macho_file.base.comp.gpa;
-    const offset = if (self.archive) |ar| ar.offset else 0;
     const handle = macho_file.getFileHandle(self.file_handle);
 
     var header_buffer: [@sizeOf(macho.mach_header_64)]u8 = undefined;
     {
-        const amt = try handle.preadAll(&header_buffer, offset);
+        const amt = try handle.preadAll(&header_buffer, self.offset);
         if (amt != @sizeOf(macho.mach_header_64)) return error.InputOutput;
     }
     self.header = @as(*align(1) const macho.mach_header_64, @ptrCast(&header_buffer)).*;
@@ -92,7 +95,7 @@ pub fn parse(self: *Object, macho_file: *MachO) !void {
     const lc_buffer = try gpa.alloc(u8, self.header.?.sizeofcmds);
     defer gpa.free(lc_buffer);
     {
-        const amt = try handle.preadAll(lc_buffer, offset + @sizeOf(macho.mach_header_64));
+        const amt = try handle.preadAll(lc_buffer, self.offset + @sizeOf(macho.mach_header_64));
         if (amt != self.header.?.sizeofcmds) return error.InputOutput;
     }
 
@@ -119,14 +122,14 @@ pub fn parse(self: *Object, macho_file: *MachO) !void {
             const cmd = lc.cast(macho.symtab_command).?;
             try self.strtab.resize(gpa, cmd.strsize);
             {
-                const amt = try handle.preadAll(self.strtab.items, cmd.stroff + offset);
+                const amt = try handle.preadAll(self.strtab.items, cmd.stroff + self.offset);
                 if (amt != self.strtab.items.len) return error.InputOutput;
             }
 
             const symtab_buffer = try gpa.alloc(u8, cmd.nsyms * @sizeOf(macho.nlist_64));
             defer gpa.free(symtab_buffer);
             {
-                const amt = try handle.preadAll(symtab_buffer, cmd.symoff + offset);
+                const amt = try handle.preadAll(symtab_buffer, cmd.symoff + self.offset);
                 if (amt != symtab_buffer.len) return error.InputOutput;
             }
             const symtab = @as([*]align(1) const macho.nlist_64, @ptrCast(symtab_buffer.ptr))[0..cmd.nsyms];
@@ -144,7 +147,7 @@ pub fn parse(self: *Object, macho_file: *MachO) !void {
             const buffer = try gpa.alloc(u8, cmd.datasize);
             defer gpa.free(buffer);
             {
-                const amt = try handle.preadAll(buffer, offset + cmd.dataoff);
+                const amt = try handle.preadAll(buffer, self.offset + cmd.dataoff);
                 if (amt != buffer.len) return error.InputOutput;
             }
             const ndice = @divExact(cmd.datasize, @sizeOf(macho.data_in_code_entry));
@@ -218,11 +221,11 @@ pub fn parse(self: *Object, macho_file: *MachO) !void {
 
     // Parse Apple's __LD,__compact_unwind section
     if (self.compact_unwind_sect_index) |index| {
-        try self.initUnwindRecords(index, macho_file);
+        try self.initUnwindRecords(gpa, index, handle, macho_file);
     }
 
     if (self.hasUnwindRecords() or self.hasEhFrameRecords()) {
-        try self.parseUnwindRecords(macho_file);
+        try self.parseUnwindRecords(gpa, macho_file.getTarget().cpu.arch, macho_file);
     }
 
     if (self.platform) |platform| {
@@ -987,7 +990,7 @@ fn initEhFrameRecords(self: *Object, sect_id: u8, macho_file: *MachO) !void {
     }
 }
 
-fn initUnwindRecords(self: *Object, sect_id: u8, macho_file: *MachO) !void {
+fn initUnwindRecords(self: *Object, allocator: Allocator, sect_id: u8, file: File.Handle, macho_file: *MachO) !void {
     const tracy = trace(@src());
     defer tracy.end();
 
@@ -1003,19 +1006,22 @@ fn initUnwindRecords(self: *Object, sect_id: u8, macho_file: *MachO) !void {
         }
     };
 
-    const gpa = macho_file.base.comp.gpa;
-    const data = try self.getSectionData(sect_id, macho_file);
-    defer gpa.free(data);
+    const header = self.sections.items(.header)[sect_id];
+    const data = try allocator.alloc(u8, header.size);
+    defer allocator.free(data);
+    const amt = try file.preadAll(data, header.offset + self.offset);
+    if (amt != data.len) return error.InputOutput;
+
     const nrecs = @divExact(data.len, @sizeOf(macho.compact_unwind_entry));
     const recs = @as([*]align(1) const macho.compact_unwind_entry, @ptrCast(data.ptr))[0..nrecs];
     const sym_lookup = SymbolLookup{ .ctx = self };
 
-    try self.unwind_records.resize(gpa, nrecs);
+    try self.unwind_records.ensureTotalCapacityPrecise(allocator, nrecs);
+    try self.unwind_records_indexes.ensureTotalCapacityPrecise(allocator, nrecs);
 
-    const header = self.sections.items(.header)[sect_id];
     const relocs = self.sections.items(.relocs)[sect_id].items;
     var reloc_idx: usize = 0;
-    for (recs, self.unwind_records.items, 0..) |rec, *out_index, rec_idx| {
+    for (recs, 0..) |rec, rec_idx| {
         const rec_start = rec_idx * @sizeOf(macho.compact_unwind_entry);
         const rec_end = rec_start + @sizeOf(macho.compact_unwind_entry);
         const reloc_start = reloc_idx;
@@ -1023,11 +1029,11 @@ fn initUnwindRecords(self: *Object, sect_id: u8, macho_file: *MachO) !void {
             relocs[reloc_idx].offset < rec_end) : (reloc_idx += 1)
         {}
 
-        out_index.* = try macho_file.addUnwindRecord();
-        const out = macho_file.getUnwindRecord(out_index.*);
+        const out_index = self.addUnwindRecordAssumeCapacity();
+        self.unwind_records_indexes.appendAssumeCapacity(out_index);
+        const out = self.getUnwindRecord(out_index);
         out.length = rec.rangeLength;
         out.enc = .{ .enc = rec.compactUnwindEncoding };
-        out.file = self.index;
 
         for (relocs[reloc_start..reloc_idx]) |rel| {
             if (rel.type != .unsigned or rel.meta.length != 3) {
@@ -1090,7 +1096,7 @@ fn initUnwindRecords(self: *Object, sect_id: u8, macho_file: *MachO) !void {
     }
 }
 
-fn parseUnwindRecords(self: *Object, macho_file: *MachO) !void {
+fn parseUnwindRecords(self: *Object, allocator: Allocator, cpu_arch: std.Target.Cpu.Arch, macho_file: *MachO) !void {
     // Synthesise missing unwind records.
     // The logic here is as follows:
     // 1. if an atom has unwind info record that is not DWARF, FDE is marked dead
@@ -1100,8 +1106,7 @@ fn parseUnwindRecords(self: *Object, macho_file: *MachO) !void {
 
     const Superposition = struct { atom: Atom.Index, size: u64, cu: ?UnwindInfo.Record.Index = null, fde: ?Fde.Index = null };
 
-    const gpa = macho_file.base.comp.gpa;
-    var superposition = std.AutoArrayHashMap(u64, Superposition).init(gpa);
+    var superposition = std.AutoArrayHashMap(u64, Superposition).init(allocator);
     defer superposition.deinit();
 
     const slice = self.symtab.slice();
@@ -1119,8 +1124,8 @@ fn parseUnwindRecords(self: *Object, macho_file: *MachO) !void {
         }
     }
 
-    for (self.unwind_records.items) |rec_index| {
-        const rec = macho_file.getUnwindRecord(rec_index);
+    for (self.unwind_records_indexes.items) |rec_index| {
+        const rec = self.getUnwindRecord(rec_index);
         const atom = rec.getAtom(macho_file);
         const addr = atom.getInputAddress(macho_file) + rec.atom_offset;
         superposition.getPtr(addr).?.cu = rec_index;
@@ -1137,7 +1142,7 @@ fn parseUnwindRecords(self: *Object, macho_file: *MachO) !void {
             const fde = &self.fdes.items[fde_index];
 
             if (meta.cu) |rec_index| {
-                const rec = macho_file.getUnwindRecord(rec_index);
+                const rec = self.getUnwindRecord(rec_index);
                 if (!rec.enc.isDwarf(macho_file)) {
                     // Mark FDE dead
                     fde.alive = false;
@@ -1147,15 +1152,14 @@ fn parseUnwindRecords(self: *Object, macho_file: *MachO) !void {
                 }
             } else {
                 // Synthesise new unwind info record
-                const rec_index = try macho_file.addUnwindRecord();
-                const rec = macho_file.getUnwindRecord(rec_index);
-                try self.unwind_records.append(gpa, rec_index);
+                const rec_index = try self.addUnwindRecord(allocator);
+                const rec = self.getUnwindRecord(rec_index);
+                try self.unwind_records_indexes.append(allocator, rec_index);
                 rec.length = @intCast(meta.size);
                 rec.atom = fde.atom;
                 rec.atom_offset = fde.atom_offset;
                 rec.fde = fde_index;
-                rec.file = fde.file;
-                switch (macho_file.getTarget().cpu.arch) {
+                switch (cpu_arch) {
                     .x86_64 => rec.enc.setMode(macho.UNWIND_X86_64_MODE.DWARF),
                     .aarch64 => rec.enc.setMode(macho.UNWIND_ARM64_MODE.DWARF),
                     else => unreachable,
@@ -1163,10 +1167,10 @@ fn parseUnwindRecords(self: *Object, macho_file: *MachO) !void {
             }
         } else if (meta.cu == null and meta.fde == null) {
             // Create a null record
-            const rec_index = try macho_file.addUnwindRecord();
-            const rec = macho_file.getUnwindRecord(rec_index);
+            const rec_index = try self.addUnwindRecord(allocator);
+            const rec = self.getUnwindRecord(rec_index);
             const atom = macho_file.getAtom(meta.atom).?;
-            try self.unwind_records.append(gpa, rec_index);
+            try self.unwind_records_indexes.append(allocator, rec_index);
             rec.length = @intCast(meta.size);
             rec.atom = meta.atom;
             rec.atom_offset = @intCast(addr - atom.getInputAddress(macho_file));
@@ -1174,25 +1178,31 @@ fn parseUnwindRecords(self: *Object, macho_file: *MachO) !void {
         }
     }
 
-    const sortFn = struct {
-        fn sortFn(ctx: *MachO, lhs_index: UnwindInfo.Record.Index, rhs_index: UnwindInfo.Record.Index) bool {
-            const lhs = ctx.getUnwindRecord(lhs_index);
-            const rhs = ctx.getUnwindRecord(rhs_index);
-            const lhsa = lhs.getAtom(ctx);
-            const rhsa = rhs.getAtom(ctx);
-            return lhsa.getInputAddress(ctx) + lhs.atom_offset < rhsa.getInputAddress(ctx) + rhs.atom_offset;
+    const SortCtx = struct {
+        object: *Object,
+        mfile: *MachO,
+
+        fn sort(ctx: @This(), lhs_index: UnwindInfo.Record.Index, rhs_index: UnwindInfo.Record.Index) bool {
+            const lhs = ctx.object.getUnwindRecord(lhs_index);
+            const rhs = ctx.object.getUnwindRecord(rhs_index);
+            const lhsa = lhs.getAtom(ctx.mfile);
+            const rhsa = rhs.getAtom(ctx.mfile);
+            return lhsa.getInputAddress(ctx.mfile) + lhs.atom_offset < rhsa.getInputAddress(ctx.mfile) + rhs.atom_offset;
         }
-    }.sortFn;
-    mem.sort(UnwindInfo.Record.Index, self.unwind_records.items, macho_file, sortFn);
+    };
+    mem.sort(UnwindInfo.Record.Index, self.unwind_records_indexes.items, SortCtx{
+        .object = self,
+        .mfile = macho_file,
+    }, SortCtx.sort);
 
     // Associate unwind records to atoms
     var next_cu: u32 = 0;
-    while (next_cu < self.unwind_records.items.len) {
+    while (next_cu < self.unwind_records_indexes.items.len) {
         const start = next_cu;
-        const rec_index = self.unwind_records.items[start];
-        const rec = macho_file.getUnwindRecord(rec_index);
-        while (next_cu < self.unwind_records.items.len and
-            macho_file.getUnwindRecord(self.unwind_records.items[next_cu]).atom == rec.atom) : (next_cu += 1)
+        const rec_index = self.unwind_records_indexes.items[start];
+        const rec = self.getUnwindRecord(rec_index);
+        while (next_cu < self.unwind_records_indexes.items.len and
+            self.getUnwindRecord(self.unwind_records_indexes.items[next_cu]).atom == rec.atom) : (next_cu += 1)
         {}
 
         const atom = rec.getAtom(macho_file);
@@ -1441,7 +1451,7 @@ pub fn checkDuplicates(self: *Object, dupes: anytype, macho_file: *MachO) error{
     }
 }
 
-pub fn scanRelocs(self: Object, macho_file: *MachO) !void {
+pub fn scanRelocs(self: *Object, macho_file: *MachO) !void {
     const tracy = trace(@src());
     defer tracy.end();
 
@@ -1453,8 +1463,8 @@ pub fn scanRelocs(self: Object, macho_file: *MachO) !void {
         try atom.scanRelocs(macho_file);
     }
 
-    for (self.unwind_records.items) |rec_index| {
-        const rec = macho_file.getUnwindRecord(rec_index);
+    for (self.unwind_records_indexes.items) |rec_index| {
+        const rec = self.getUnwindRecord(rec_index);
         if (!rec.alive) continue;
         if (rec.getFde(macho_file)) |fde| {
             if (fde.getCie(macho_file).getPersonality(macho_file)) |sym| {
@@ -1532,12 +1542,11 @@ pub fn parseAr(self: *Object, macho_file: *MachO) !void {
     defer tracy.end();
 
     const gpa = macho_file.base.comp.gpa;
-    const offset = if (self.archive) |ar| ar.offset else 0;
     const handle = macho_file.getFileHandle(self.file_handle);
 
     var header_buffer: [@sizeOf(macho.mach_header_64)]u8 = undefined;
     {
-        const amt = try handle.preadAll(&header_buffer, offset);
+        const amt = try handle.preadAll(&header_buffer, self.offset);
         if (amt != @sizeOf(macho.mach_header_64)) return error.InputOutput;
     }
     self.header = @as(*align(1) const macho.mach_header_64, @ptrCast(&header_buffer)).*;
@@ -1558,7 +1567,7 @@ pub fn parseAr(self: *Object, macho_file: *MachO) !void {
     const lc_buffer = try gpa.alloc(u8, self.header.?.sizeofcmds);
     defer gpa.free(lc_buffer);
     {
-        const amt = try handle.preadAll(lc_buffer, offset + @sizeOf(macho.mach_header_64));
+        const amt = try handle.preadAll(lc_buffer, self.offset + @sizeOf(macho.mach_header_64));
         if (amt != self.header.?.sizeofcmds) return error.InputOutput;
     }
 
@@ -1571,14 +1580,14 @@ pub fn parseAr(self: *Object, macho_file: *MachO) !void {
             const cmd = lc.cast(macho.symtab_command).?;
             try self.strtab.resize(gpa, cmd.strsize);
             {
-                const amt = try handle.preadAll(self.strtab.items, cmd.stroff + offset);
+                const amt = try handle.preadAll(self.strtab.items, cmd.stroff + self.offset);
                 if (amt != self.strtab.items.len) return error.InputOutput;
             }
 
             const symtab_buffer = try gpa.alloc(u8, cmd.nsyms * @sizeOf(macho.nlist_64));
             defer gpa.free(symtab_buffer);
             {
-                const amt = try handle.preadAll(symtab_buffer, cmd.symoff + offset);
+                const amt = try handle.preadAll(symtab_buffer, cmd.symoff + self.offset);
                 if (amt != symtab_buffer.len) return error.InputOutput;
             }
             const symtab = @as([*]align(1) const macho.nlist_64, @ptrCast(symtab_buffer.ptr))[0..cmd.nsyms];
@@ -1613,7 +1622,7 @@ pub fn updateArSymtab(self: Object, ar_symtab: *Archive.ArSymtab, macho_file: *M
 }
 
 pub fn updateArSize(self: *Object, macho_file: *MachO) !void {
-    self.output_ar_state.size = if (self.archive) |ar| ar.size else size: {
+    self.output_ar_state.size = if (self.in_archive) |ar| ar.size else size: {
         const file = macho_file.getFileHandle(self.file_handle);
         break :size (try file.stat()).size;
     };
@@ -1622,7 +1631,6 @@ pub fn updateArSize(self: *Object, macho_file: *MachO) !void {
 pub fn writeAr(self: Object, ar_format: Archive.Format, macho_file: *MachO, writer: anytype) !void {
     // Header
     const size = std.math.cast(usize, self.output_ar_state.size) orelse return error.Overflow;
-    const offset: u64 = if (self.archive) |ar| ar.offset else 0;
     try Archive.writeHeader(self.path, size, ar_format, writer);
     // Data
     const file = macho_file.getFileHandle(self.file_handle);
@@ -1630,7 +1638,7 @@ pub fn writeAr(self: Object, ar_format: Archive.Format, macho_file: *MachO, writ
     const gpa = macho_file.base.comp.gpa;
     const data = try gpa.alloc(u8, size);
     defer gpa.free(data);
-    const amt = try file.preadAll(data, offset);
+    const amt = try file.preadAll(data, self.offset);
     if (amt != size) return error.InputOutput;
     try writer.writeAll(data);
 }
@@ -1680,7 +1688,7 @@ pub fn calcStabsSize(self: *Object, macho_file: *MachO) error{Overflow}!void {
         self.output_symtab_ctx.strsize += @as(u32, @intCast(comp_dir.len + 1)); // comp_dir
         self.output_symtab_ctx.strsize += @as(u32, @intCast(tu_name.len + 1)); // tu_name
 
-        if (self.archive) |ar| {
+        if (self.in_archive) |ar| {
             self.output_symtab_ctx.strsize += @as(u32, @intCast(ar.path.len + 1 + self.path.len + 1 + 1));
         } else {
             self.output_symtab_ctx.strsize += @as(u32, @intCast(self.path.len + 1));
@@ -1820,7 +1828,7 @@ pub fn writeStabs(self: *const Object, macho_file: *MachO, ctx: anytype) error{O
         index += 1;
         // N_OSO path
         n_strx = @as(u32, @intCast(ctx.strtab.items.len));
-        if (self.archive) |ar| {
+        if (self.in_archive) |ar| {
             ctx.strtab.appendSliceAssumeCapacity(ar.path);
             ctx.strtab.appendAssumeCapacity('(');
             ctx.strtab.appendSliceAssumeCapacity(self.path);
@@ -1989,11 +1997,10 @@ fn getSectionData(self: *const Object, index: u32, macho_file: *MachO) ![]u8 {
     assert(index < slice.items(.header).len);
     const sect = slice.items(.header)[index];
     const handle = macho_file.getFileHandle(self.file_handle);
-    const offset = if (self.archive) |ar| ar.offset else 0;
     const size = math.cast(usize, sect.size) orelse return error.Overflow;
     const buffer = try gpa.alloc(u8, size);
     errdefer gpa.free(buffer);
-    const amt = try handle.preadAll(buffer, sect.offset + offset);
+    const amt = try handle.preadAll(buffer, sect.offset + self.offset);
     if (amt != buffer.len) return error.InputOutput;
     return buffer;
 }
@@ -2002,9 +2009,8 @@ pub fn getAtomData(self: *const Object, macho_file: *MachO, atom: Atom, buffer:
     assert(buffer.len == atom.size);
     const slice = self.sections.slice();
     const handle = macho_file.getFileHandle(self.file_handle);
-    const offset = if (self.archive) |ar| ar.offset else 0;
     const sect = slice.items(.header)[atom.n_sect];
-    const amt = try handle.preadAll(buffer, sect.offset + offset + atom.off);
+    const amt = try handle.preadAll(buffer, sect.offset + self.offset + atom.off);
     if (amt != buffer.len) return error.InputOutput;
 }
 
@@ -2068,6 +2074,23 @@ pub fn asFile(self: *Object) File {
     return .{ .object = self };
 }
 
+fn addUnwindRecord(self: *Object, allocator: Allocator) !UnwindInfo.Record.Index {
+    try self.unwind_records.ensureUnusedCapacity(allocator, 1);
+    return self.addUnwindRecordAssumeCapacity();
+}
+
+fn addUnwindRecordAssumeCapacity(self: *Object) UnwindInfo.Record.Index {
+    const index = @as(UnwindInfo.Record.Index, @intCast(self.unwind_records.items.len));
+    const rec = self.unwind_records.addOneAssumeCapacity();
+    rec.* = .{ .file = self.index };
+    return index;
+}
+
+pub fn getUnwindRecord(self: *Object, index: UnwindInfo.Record.Index) *UnwindInfo.Record {
+    assert(index < self.unwind_records.items.len);
+    return &self.unwind_records.items[index];
+}
+
 pub fn format(
     self: *Object,
     comptime unused_fmt_string: []const u8,
@@ -2171,8 +2194,8 @@ fn formatUnwindRecords(
     const object = ctx.object;
     const macho_file = ctx.macho_file;
     try writer.writeAll("  unwind records\n");
-    for (object.unwind_records.items) |rec| {
-        try writer.print("    rec({d}) : {}\n", .{ rec, macho_file.getUnwindRecord(rec).fmt(macho_file) });
+    for (object.unwind_records_indexes.items) |rec| {
+        try writer.print("    rec({d}) : {}\n", .{ rec, object.getUnwindRecord(rec).fmt(macho_file) });
     }
 }
 
@@ -2211,7 +2234,7 @@ fn formatPath(
 ) !void {
     _ = unused_fmt_string;
     _ = options;
-    if (object.archive) |ar| {
+    if (object.in_archive) |ar| {
         try writer.writeAll(ar.path);
         try writer.writeByte('(');
         try writer.writeAll(object.path);
@@ -2285,7 +2308,6 @@ const CompileUnit = struct {
 
 const InArchive = struct {
     path: []const u8,
-    offset: u64,
     size: u32,
 };
 
@@ -2300,11 +2322,10 @@ const x86_64 = struct {
         const gpa = macho_file.base.comp.gpa;
 
         const handle = macho_file.getFileHandle(self.file_handle);
-        const offset = if (self.archive) |ar| ar.offset else 0;
         const relocs_buffer = try gpa.alloc(u8, sect.nreloc * @sizeOf(macho.relocation_info));
         defer gpa.free(relocs_buffer);
         {
-            const amt = try handle.preadAll(relocs_buffer, sect.reloff + offset);
+            const amt = try handle.preadAll(relocs_buffer, sect.reloff + self.offset);
             if (amt != relocs_buffer.len) return error.InputOutput;
         }
         const relocs = @as([*]align(1) const macho.relocation_info, @ptrCast(relocs_buffer.ptr))[0..sect.nreloc];
@@ -2463,11 +2484,10 @@ const aarch64 = struct {
         const gpa = macho_file.base.comp.gpa;
 
         const handle = macho_file.getFileHandle(self.file_handle);
-        const offset = if (self.archive) |ar| ar.offset else 0;
         const relocs_buffer = try gpa.alloc(u8, sect.nreloc * @sizeOf(macho.relocation_info));
         defer gpa.free(relocs_buffer);
         {
-            const amt = try handle.preadAll(relocs_buffer, sect.reloff + offset);
+            const amt = try handle.preadAll(relocs_buffer, sect.reloff + self.offset);
             if (amt != relocs_buffer.len) return error.InputOutput;
         }
         const relocs = @as([*]align(1) const macho.relocation_info, @ptrCast(relocs_buffer.ptr))[0..sect.nreloc];
src/link/MachO/relocatable.zig
@@ -286,12 +286,15 @@ fn parseObject(macho_file: *MachO, path: []const u8) MachO.ParseError!void {
         break :mtime @as(u64, @intCast(@divFloor(stat.mtime, 1_000_000_000)));
     };
     const index = @as(File.Index, @intCast(try macho_file.files.addOne(gpa)));
-    macho_file.files.set(index, .{ .object = .{
-        .path = try gpa.dupe(u8, path),
-        .file_handle = handle,
-        .mtime = mtime,
-        .index = index,
-    } });
+    macho_file.files.set(index, .{
+        .object = .{
+            .offset = 0, // TODO FAT objects
+            .path = try gpa.dupe(u8, path),
+            .file_handle = handle,
+            .mtime = mtime,
+            .index = index,
+        },
+    });
     try macho_file.objects.append(gpa, index);
 
     const object = macho_file.getFile(index).?.object;
@@ -420,8 +423,8 @@ fn calcCompactUnwindSize(macho_file: *MachO, sect_index: u8) void {
 
     for (macho_file.objects.items) |index| {
         const object = macho_file.getFile(index).?.object;
-        for (object.unwind_records.items) |irec| {
-            const rec = macho_file.getUnwindRecord(irec);
+        for (object.unwind_records_indexes.items) |irec| {
+            const rec = object.getUnwindRecord(irec);
             if (!rec.alive) continue;
             size += @sizeOf(macho.compact_unwind_entry);
             nreloc += 1;
@@ -670,8 +673,8 @@ fn writeCompactUnwind(macho_file: *MachO) !void {
     var offset: i32 = 0;
     for (macho_file.objects.items) |index| {
         const object = macho_file.getFile(index).?.object;
-        for (object.unwind_records.items) |irec| {
-            const rec = macho_file.getUnwindRecord(irec);
+        for (object.unwind_records_indexes.items) |irec| {
+            const rec = object.getUnwindRecord(irec);
             if (!rec.alive) continue;
 
             var out: macho.compact_unwind_entry = .{
src/link/MachO/UnwindInfo.zig
@@ -1,6 +1,6 @@
 /// List of all unwind records gathered from all objects and sorted
 /// by allocated relative function address within the section.
-records: std.ArrayListUnmanaged(Record.Index) = .{},
+records: std.ArrayListUnmanaged(Record.Ref) = .{},
 
 /// List of all personalities referenced by either unwind info entries
 /// or __eh_frame entries.
@@ -25,10 +25,10 @@ pub fn deinit(info: *UnwindInfo, allocator: Allocator) void {
     info.lsdas_lookup.deinit(allocator);
 }
 
-fn canFold(macho_file: *MachO, lhs_index: Record.Index, rhs_index: Record.Index) bool {
+fn canFold(macho_file: *MachO, lhs_ref: Record.Ref, rhs_ref: Record.Ref) bool {
     const cpu_arch = macho_file.getTarget().cpu.arch;
-    const lhs = macho_file.getUnwindRecord(lhs_index);
-    const rhs = macho_file.getUnwindRecord(rhs_index);
+    const lhs = lhs_ref.getUnwindRecord(macho_file);
+    const rhs = rhs_ref.getUnwindRecord(macho_file);
     if (cpu_arch == .x86_64) {
         if (lhs.enc.getMode() == @intFromEnum(macho.UNWIND_X86_64_MODE.STACK_IND) or
             rhs.enc.getMode() == @intFromEnum(macho.UNWIND_X86_64_MODE.STACK_IND)) return false;
@@ -52,17 +52,18 @@ pub fn generate(info: *UnwindInfo, macho_file: *MachO) !void {
             const atom = macho_file.getAtom(atom_index) orelse continue;
             if (!atom.flags.alive) continue;
             const recs = atom.getUnwindRecords(macho_file);
+            const file = atom.getFile(macho_file);
             try info.records.ensureUnusedCapacity(gpa, recs.len);
             for (recs) |rec| {
-                if (!macho_file.getUnwindRecord(rec).alive) continue;
-                info.records.appendAssumeCapacity(rec);
+                if (!file.object.getUnwindRecord(rec).alive) continue;
+                info.records.appendAssumeCapacity(.{ .record = rec, .file = file.getIndex() });
             }
         }
     }
 
     // Encode records
-    for (info.records.items) |index| {
-        const rec = macho_file.getUnwindRecord(index);
+    for (info.records.items) |ref| {
+        const rec = ref.getUnwindRecord(macho_file);
         if (rec.getFde(macho_file)) |fde| {
             rec.enc.setDwarfSectionOffset(@intCast(fde.out_offset));
             if (fde.getLsdaAtom(macho_file)) |lsda| {
@@ -83,16 +84,16 @@ pub fn generate(info: *UnwindInfo, macho_file: *MachO) !void {
 
     // Sort by assigned relative address within each output section
     const sortFn = struct {
-        fn sortFn(ctx: *MachO, lhs_index: Record.Index, rhs_index: Record.Index) bool {
-            const lhs = ctx.getUnwindRecord(lhs_index);
-            const rhs = ctx.getUnwindRecord(rhs_index);
+        fn sortFn(ctx: *MachO, lhs_ref: Record.Ref, rhs_ref: Record.Ref) bool {
+            const lhs = lhs_ref.getUnwindRecord(ctx);
+            const rhs = rhs_ref.getUnwindRecord(ctx);
             const lhsa = lhs.getAtom(ctx);
             const rhsa = rhs.getAtom(ctx);
             if (lhsa.out_n_sect == rhsa.out_n_sect) return lhs.getAtomAddress(ctx) < rhs.getAtomAddress(ctx);
             return lhsa.out_n_sect < rhsa.out_n_sect;
         }
     }.sortFn;
-    mem.sort(Record.Index, info.records.items, macho_file, sortFn);
+    mem.sort(Record.Ref, info.records.items, macho_file, sortFn);
 
     // Fold the records
     // Any adjacent two records that share encoding can be folded into one.
@@ -101,8 +102,8 @@ pub fn generate(info: *UnwindInfo, macho_file: *MachO) !void {
         var j: usize = 1;
         while (j < info.records.items.len) : (j += 1) {
             if (canFold(macho_file, info.records.items[i], info.records.items[j])) {
-                const rec = macho_file.getUnwindRecord(info.records.items[i]);
-                rec.length += macho_file.getUnwindRecord(info.records.items[j]).length + 1;
+                const rec = info.records.items[i].getUnwindRecord(macho_file);
+                rec.length += info.records.items[j].getUnwindRecord(macho_file).length + 1;
             } else {
                 i += 1;
                 info.records.items[i] = info.records.items[j];
@@ -111,14 +112,15 @@ pub fn generate(info: *UnwindInfo, macho_file: *MachO) !void {
         info.records.shrinkAndFree(gpa, i + 1);
     }
 
-    for (info.records.items) |rec_index| {
-        const rec = macho_file.getUnwindRecord(rec_index);
+    for (info.records.items) |ref| {
+        const rec = ref.getUnwindRecord(macho_file);
         const atom = rec.getAtom(macho_file);
-        log.debug("@{x}-{x} : {s} : rec({d}) : {}", .{
+        log.debug("@{x}-{x} : {s} : rec({d}) : object({d}) : {}", .{
             rec.getAtomAddress(macho_file),
             rec.getAtomAddress(macho_file) + rec.length,
             atom.getName(macho_file),
-            rec_index,
+            ref.record,
+            ref.file,
             rec.enc,
         });
     }
@@ -161,8 +163,8 @@ pub fn generate(info: *UnwindInfo, macho_file: *MachO) !void {
         ).init(gpa);
         defer common_encodings_counts.deinit();
 
-        for (info.records.items) |rec_index| {
-            const rec = macho_file.getUnwindRecord(rec_index);
+        for (info.records.items) |ref| {
+            const rec = ref.getUnwindRecord(macho_file);
             if (rec.enc.isDwarf(macho_file)) continue;
             const gop = try common_encodings_counts.getOrPut(rec.enc);
             if (!gop.found_existing) {
@@ -190,7 +192,7 @@ pub fn generate(info: *UnwindInfo, macho_file: *MachO) !void {
     {
         var i: u32 = 0;
         while (i < info.records.items.len) {
-            const rec = macho_file.getUnwindRecord(info.records.items[i]);
+            const rec = info.records.items[i].getUnwindRecord(macho_file);
             const range_start_max: u64 = rec.getAtomAddress(macho_file) + compressed_entry_func_offset_mask;
             var encoding_count: u9 = info.common_encodings_count;
             var space_left: u32 = second_level_page_words -
@@ -202,7 +204,7 @@ pub fn generate(info: *UnwindInfo, macho_file: *MachO) !void {
             };
 
             while (space_left >= 1 and i < info.records.items.len) {
-                const next = macho_file.getUnwindRecord(info.records.items[i]);
+                const next = info.records.items[i].getUnwindRecord(macho_file);
                 const is_dwarf = next.enc.isDwarf(macho_file);
 
                 if (next.getAtomAddress(macho_file) >= range_start_max) {
@@ -244,8 +246,8 @@ pub fn generate(info: *UnwindInfo, macho_file: *MachO) !void {
     // Save records having an LSDA pointer
     log.debug("LSDA pointers:", .{});
     try info.lsdas_lookup.ensureTotalCapacityPrecise(gpa, info.records.items.len);
-    for (info.records.items, 0..) |index, i| {
-        const rec = macho_file.getUnwindRecord(index);
+    for (info.records.items, 0..) |ref, i| {
+        const rec = ref.getUnwindRecord(macho_file);
         info.lsdas_lookup.appendAssumeCapacity(@intCast(info.lsdas.items.len));
         if (rec.getLsdaAtom(macho_file)) |lsda| {
             log.debug("  @{x} => lsda({d})", .{ rec.getAtomAddress(macho_file), lsda.atom_index });
@@ -301,7 +303,7 @@ pub fn write(info: UnwindInfo, macho_file: *MachO, buffer: []u8) !void {
         (info.lsdas.items.len * @sizeOf(macho.unwind_info_section_header_lsda_index_entry))));
     for (info.pages.items, 0..) |page, i| {
         assert(page.count > 0);
-        const rec = macho_file.getUnwindRecord(info.records.items[page.start]);
+        const rec = info.records.items[page.start].getUnwindRecord(macho_file);
         try writer.writeStruct(macho.unwind_info_section_header_index_entry{
             .functionOffset = @as(u32, @intCast(rec.getAtomAddress(macho_file) - seg.vmaddr)),
             .secondLevelPagesSectionOffset = @as(u32, @intCast(pages_base_offset + i * second_level_page_bytes)),
@@ -310,7 +312,7 @@ pub fn write(info: UnwindInfo, macho_file: *MachO, buffer: []u8) !void {
         });
     }
 
-    const last_rec = macho_file.getUnwindRecord(info.records.items[info.records.items.len - 1]);
+    const last_rec = info.records.items[info.records.items.len - 1].getUnwindRecord(macho_file);
     const sentinel_address = @as(u32, @intCast(last_rec.getAtomAddress(macho_file) + last_rec.length - seg.vmaddr));
     try writer.writeStruct(macho.unwind_info_section_header_index_entry{
         .functionOffset = sentinel_address,
@@ -320,7 +322,7 @@ pub fn write(info: UnwindInfo, macho_file: *MachO, buffer: []u8) !void {
     });
 
     for (info.lsdas.items) |index| {
-        const rec = macho_file.getUnwindRecord(info.records.items[index]);
+        const rec = info.records.items[index].getUnwindRecord(macho_file);
         try writer.writeStruct(macho.unwind_info_section_header_lsda_index_entry{
             .functionOffset = @as(u32, @intCast(rec.getAtomAddress(macho_file) - seg.vmaddr)),
             .lsdaOffset = @as(u32, @intCast(rec.getLsdaAddress(macho_file) - seg.vmaddr)),
@@ -537,6 +539,15 @@ pub const Record = struct {
     }
 
     pub const Index = u32;
+
+    const Ref = struct {
+        record: Index,
+        file: File.Index,
+
+        pub fn getUnwindRecord(ref: Ref, macho_file: *MachO) *Record {
+            return macho_file.getFile(ref.file).?.object.getUnwindRecord(ref.record);
+        }
+    };
 };
 
 const max_personalities = 3;
@@ -635,8 +646,8 @@ const Page = struct {
                     .entryCount = page.count,
                 });
 
-                for (info.records.items[page.start..][0..page.count]) |index| {
-                    const rec = macho_file.getUnwindRecord(index);
+                for (info.records.items[page.start..][0..page.count]) |ref| {
+                    const rec = ref.getUnwindRecord(macho_file);
                     try writer.writeStruct(macho.unwind_info_regular_second_level_entry{
                         .functionOffset = @as(u32, @intCast(rec.getAtomAddress(macho_file) - seg.vmaddr)),
                         .encoding = rec.enc.enc,
@@ -658,9 +669,9 @@ const Page = struct {
                 }
 
                 assert(page.count > 0);
-                const first_rec = macho_file.getUnwindRecord(info.records.items[page.start]);
-                for (info.records.items[page.start..][0..page.count]) |index| {
-                    const rec = macho_file.getUnwindRecord(index);
+                const first_rec = info.records.items[page.start].getUnwindRecord(macho_file);
+                for (info.records.items[page.start..][0..page.count]) |ref| {
+                    const rec = ref.getUnwindRecord(macho_file);
                     const enc_index = blk: {
                         if (info.getCommonEncoding(rec.enc)) |id| break :blk id;
                         const ncommon = info.common_encodings_count;
src/link/MachO.zig
@@ -67,7 +67,6 @@ entry_index: ?Symbol.Index = null,
 atoms: std.ArrayListUnmanaged(Atom) = .{},
 atoms_extra: std.ArrayListUnmanaged(u32) = .{},
 thunks: std.ArrayListUnmanaged(Thunk) = .{},
-unwind_records: std.ArrayListUnmanaged(UnwindInfo.Record) = .{},
 
 /// String interning table
 strings: StringTable = .{},
@@ -357,7 +356,6 @@ pub fn deinit(self: *MachO) void {
         thunk.deinit(gpa);
     }
     self.thunks.deinit(gpa);
-    self.unwind_records.deinit(gpa);
 }
 
 pub fn flush(self: *MachO, arena: Allocator, tid: Zcu.PerThread.Id, prog_node: std.Progress.Node) link.File.FlushError!void {
@@ -982,12 +980,15 @@ fn parseObject(self: *MachO, path: []const u8) ParseError!void {
         break :mtime @as(u64, @intCast(@divFloor(stat.mtime, 1_000_000_000)));
     };
     const index = @as(File.Index, @intCast(try self.files.addOne(gpa)));
-    self.files.set(index, .{ .object = .{
-        .path = try gpa.dupe(u8, path),
-        .file_handle = handle,
-        .mtime = mtime,
-        .index = index,
-    } });
+    self.files.set(index, .{
+        .object = .{
+            .offset = 0, // TODO FAT objects
+            .path = try gpa.dupe(u8, path),
+            .file_handle = handle,
+            .mtime = mtime,
+            .index = index,
+        },
+    });
     try self.objects.append(gpa, index);
 
     const object = self.getFile(index).?.object;
@@ -4058,18 +4059,6 @@ pub fn getGlobalByName(self: *MachO, name: []const u8) ?Symbol.Index {
     return self.globals.get(off);
 }
 
-pub fn addUnwindRecord(self: *MachO) !UnwindInfo.Record.Index {
-    const index = @as(UnwindInfo.Record.Index, @intCast(self.unwind_records.items.len));
-    const rec = try self.unwind_records.addOne(self.base.comp.gpa);
-    rec.* = .{};
-    return index;
-}
-
-pub fn getUnwindRecord(self: *MachO, index: UnwindInfo.Record.Index) *UnwindInfo.Record {
-    assert(index < self.unwind_records.items.len);
-    return &self.unwind_records.items[index];
-}
-
 pub fn addThunk(self: *MachO) !Thunk.Index {
     const index = @as(Thunk.Index, @intCast(self.thunks.items.len));
     const thunk = try self.thunks.addOne(self.base.comp.gpa);