Commit eba280ce20

Jakub Konka <kubkon@jakubkonka.com>
2023-04-05 08:23:46
macho: refactor relocation type in incremental linker
1 parent 1795b8e
Changed files (5)
src
src/arch/aarch64/Emit.zig
@@ -673,7 +673,7 @@ fn mirCallExtern(emit: *Emit, inst: Mir.Inst.Index) !void {
         const atom_index = macho_file.getAtomIndexForSymbol(.{ .sym_index = relocation.atom_index, .file = null }).?;
         const target = macho_file.getGlobalByIndex(relocation.sym_index);
         try link.File.MachO.Atom.addRelocation(macho_file, atom_index, .{
-            .type = @enumToInt(std.macho.reloc_type_arm64.ARM64_RELOC_BRANCH26),
+            .type = .branch,
             .target = target,
             .offset = offset,
             .addend = 0,
@@ -883,41 +883,32 @@ fn mirLoadMemoryPie(emit: *Emit, inst: Mir.Inst.Index) !void {
     }
 
     if (emit.bin_file.cast(link.File.MachO)) |macho_file| {
+        const Atom = link.File.MachO.Atom;
+        const Relocation = Atom.Relocation;
         const atom_index = macho_file.getAtomIndexForSymbol(.{ .sym_index = data.atom_index, .file = null }).?;
-        // TODO this causes segfault in stage1
-        // try atom.addRelocations(macho_file, 2, .{
-        try link.File.MachO.Atom.addRelocation(macho_file, atom_index, .{
+        try Atom.addRelocations(macho_file, atom_index, &[_]Relocation{ .{
             .target = .{ .sym_index = data.sym_index, .file = null },
             .offset = offset,
             .addend = 0,
             .pcrel = true,
             .length = 2,
             .type = switch (tag) {
-                .load_memory_got,
-                .load_memory_ptr_got,
-                => @enumToInt(std.macho.reloc_type_arm64.ARM64_RELOC_GOT_LOAD_PAGE21),
-                .load_memory_direct,
-                .load_memory_ptr_direct,
-                => @enumToInt(std.macho.reloc_type_arm64.ARM64_RELOC_PAGE21),
+                .load_memory_got, .load_memory_ptr_got => Relocation.Type.got_page,
+                .load_memory_direct, .load_memory_ptr_direct => Relocation.Type.page,
                 else => unreachable,
             },
-        });
-        try link.File.MachO.Atom.addRelocation(macho_file, atom_index, .{
+        }, .{
             .target = .{ .sym_index = data.sym_index, .file = null },
             .offset = offset + 4,
             .addend = 0,
             .pcrel = false,
             .length = 2,
             .type = switch (tag) {
-                .load_memory_got,
-                .load_memory_ptr_got,
-                => @enumToInt(std.macho.reloc_type_arm64.ARM64_RELOC_GOT_LOAD_PAGEOFF12),
-                .load_memory_direct,
-                .load_memory_ptr_direct,
-                => @enumToInt(std.macho.reloc_type_arm64.ARM64_RELOC_PAGEOFF12),
+                .load_memory_got, .load_memory_ptr_got => Relocation.Type.got_pageoff,
+                .load_memory_direct, .load_memory_ptr_direct => Relocation.Type.pageoff,
                 else => unreachable,
             },
-        });
+        } });
     } else if (emit.bin_file.cast(link.File.Coff)) |coff_file| {
         const atom_index = coff_file.getAtomIndexForSymbol(.{ .sym_index = data.atom_index, .file = null }).?;
         const target = switch (tag) {
src/arch/x86_64/Emit.zig
@@ -42,7 +42,7 @@ pub fn emitMir(emit: *Emit) Error!void {
                 ).?;
                 const target = macho_file.getGlobalByIndex(inst.data.relocation.sym_index);
                 try link.File.MachO.Atom.addRelocation(macho_file, atom_index, .{
-                    .type = @enumToInt(std.macho.reloc_type_x86_64.X86_64_RELOC_BRANCH),
+                    .type = .branch,
                     .target = target,
                     .offset = end_offset - 4,
                     .addend = 0,
@@ -68,17 +68,16 @@ pub fn emitMir(emit: *Emit) Error!void {
             .mov_linker, .lea_linker => if (emit.bin_file.cast(link.File.MachO)) |macho_file| {
                 const metadata =
                     emit.lower.mir.extraData(Mir.LeaRegisterReloc, inst.data.payload).data;
-                const reloc_type = switch (inst.ops) {
-                    .got_reloc => @enumToInt(std.macho.reloc_type_x86_64.X86_64_RELOC_GOT),
-                    .direct_reloc => @enumToInt(std.macho.reloc_type_x86_64.X86_64_RELOC_SIGNED),
-                    else => unreachable,
-                };
                 const atom_index = macho_file.getAtomIndexForSymbol(.{
                     .sym_index = metadata.atom_index,
                     .file = null,
                 }).?;
                 try link.File.MachO.Atom.addRelocation(macho_file, atom_index, .{
-                    .type = reloc_type,
+                    .type = switch (inst.ops) {
+                        .got_reloc => .got,
+                        .direct_reloc => .signed,
+                        else => unreachable,
+                    },
                     .target = .{ .sym_index = metadata.sym_index, .file = null },
                     .offset = @intCast(u32, end_offset - 4),
                     .addend = 0,
src/link/MachO/Atom.zig
@@ -14,7 +14,7 @@ const trace = @import("../../tracy.zig").trace;
 const Allocator = mem.Allocator;
 const Arch = std.Target.Cpu.Arch;
 const MachO = @import("../MachO.zig");
-const Relocation = @import("Relocation.zig");
+pub const Relocation = @import("Relocation.zig");
 const SymbolWithLoc = MachO.SymbolWithLoc;
 
 /// Each decl always gets a local symbol with the fully qualified name.
@@ -113,25 +113,19 @@ pub fn freeListEligible(self: Atom, macho_file: *MachO) bool {
 }
 
 pub fn addRelocation(macho_file: *MachO, atom_index: Index, reloc: Relocation) !void {
-    return addRelocations(macho_file, atom_index, 1, .{reloc});
+    return addRelocations(macho_file, atom_index, &[_]Relocation{reloc});
 }
 
-pub fn addRelocations(
-    macho_file: *MachO,
-    atom_index: Index,
-    comptime count: comptime_int,
-    relocs: [count]Relocation,
-) !void {
+pub fn addRelocations(macho_file: *MachO, atom_index: Index, relocs: []Relocation) !void {
     const gpa = macho_file.base.allocator;
-    const target = macho_file.base.options.target;
     const gop = try macho_file.relocs.getOrPut(gpa, atom_index);
     if (!gop.found_existing) {
         gop.value_ptr.* = .{};
     }
-    try gop.value_ptr.ensureUnusedCapacity(gpa, count);
+    try gop.value_ptr.ensureUnusedCapacity(gpa, relocs.len);
     for (relocs) |reloc| {
         log.debug("  (adding reloc of type {s} to target %{d})", .{
-            reloc.fmtType(target),
+            @tagName(reloc.type),
             reloc.target.sym_index,
         });
         gop.value_ptr.appendAssumeCapacity(reloc);
src/link/MachO/Relocation.zig
@@ -1,19 +1,7 @@
-const Relocation = @This();
-
-const std = @import("std");
-const aarch64 = @import("../../arch/aarch64/bits.zig");
-const assert = std.debug.assert;
-const log = std.log.scoped(.link);
-const macho = std.macho;
-const math = std.math;
-const mem = std.mem;
-const meta = std.meta;
-
-const Atom = @import("Atom.zig");
-const MachO = @import("../MachO.zig");
-const SymbolWithLoc = MachO.SymbolWithLoc;
+//! Relocation used by the self-hosted backends to instruct the linker where and how to
+//! fixup the values when flushing the contents to file and/or memory.
 
-type: u4,
+type: Type,
 target: SymbolWithLoc,
 offset: u32,
 addend: i64,
@@ -21,36 +9,46 @@ pcrel: bool,
 length: u2,
 dirty: bool = true,
 
+pub const Type = enum {
+    // x86, x86_64
+    /// RIP-relative displacement to a GOT pointer
+    got,
+    /// RIP-relative displacement
+    signed,
+    /// RIP-relative displacemen to threadlocal variable descriptor
+    tlv,
+
+    // aarch64
+    /// PC-relative distance to target page in GOT section
+    got_page,
+    /// Offset to a GOT pointer relative to the start of a page in GOT section
+    got_pageoff,
+    /// PC-relative distance to target page in a section
+    page,
+    /// Offset to a pointer relative to the start of a page in a section
+    pageoff,
+    /// PC-relative distance to target page in TLV section
+    tlv_page,
+    /// Offset to a pointer relative to the start of a page in TLV section
+    tlv_pageoff,
+
+    // common
+    /// PC/RIP-relative displacement B/BL/CALL
+    branch,
+    /// Absolute pointer value
+    unsigned,
+};
+
 /// Returns true if and only if the reloc is dirty AND the target address is available.
 pub fn isResolvable(self: Relocation, macho_file: *MachO) bool {
     _ = self.getTargetAtomIndex(macho_file) orelse return false;
     return self.dirty;
 }
 
-pub fn fmtType(self: Relocation, target: std.Target) []const u8 {
-    switch (target.cpu.arch) {
-        .aarch64 => return @tagName(@intToEnum(macho.reloc_type_arm64, self.type)),
-        .x86_64 => return @tagName(@intToEnum(macho.reloc_type_x86_64, self.type)),
-        else => unreachable,
-    }
-}
-
 pub fn getTargetAtomIndex(self: Relocation, macho_file: *MachO) ?Atom.Index {
-    switch (macho_file.base.options.target.cpu.arch) {
-        .aarch64 => switch (@intToEnum(macho.reloc_type_arm64, self.type)) {
-            .ARM64_RELOC_GOT_LOAD_PAGE21,
-            .ARM64_RELOC_GOT_LOAD_PAGEOFF12,
-            .ARM64_RELOC_POINTER_TO_GOT,
-            => return macho_file.getGotAtomIndexForSymbol(self.target),
-            else => {},
-        },
-        .x86_64 => switch (@intToEnum(macho.reloc_type_x86_64, self.type)) {
-            .X86_64_RELOC_GOT,
-            .X86_64_RELOC_GOT_LOAD,
-            => return macho_file.getGotAtomIndexForSymbol(self.target),
-            else => {},
-        },
-        else => unreachable,
+    switch (self.type) {
+        .got, .got_page, .got_pageoff => return macho_file.getGotAtomIndexForSymbol(self.target),
+        else => {},
     }
     if (macho_file.getStubsAtomIndexForSymbol(self.target)) |stubs_atom| return stubs_atom;
     return macho_file.getAtomIndexForSymbol(self.target);
@@ -70,7 +68,7 @@ pub fn resolve(self: Relocation, macho_file: *MachO, atom_index: Atom.Index, cod
         source_addr,
         target_addr,
         macho_file.getSymbolName(self.target),
-        self.fmtType(macho_file.base.options.target),
+        @tagName(self.type),
     });
 
     switch (arch) {
@@ -81,18 +79,9 @@ pub fn resolve(self: Relocation, macho_file: *MachO, atom_index: Atom.Index, cod
 }
 
 fn resolveAarch64(self: Relocation, source_addr: u64, target_addr: i64, code: []u8) void {
-    const rel_type = @intToEnum(macho.reloc_type_arm64, self.type);
-    if (rel_type == .ARM64_RELOC_UNSIGNED) {
-        return switch (self.length) {
-            2 => mem.writeIntLittle(u32, code[self.offset..][0..4], @truncate(u32, @bitCast(u64, target_addr))),
-            3 => mem.writeIntLittle(u64, code[self.offset..][0..8], @bitCast(u64, target_addr)),
-            else => unreachable,
-        };
-    }
-
-    var buffer = code[self.offset..][0..4];
-    switch (rel_type) {
-        .ARM64_RELOC_BRANCH26 => {
+    var buffer = code[self.offset..];
+    switch (self.type) {
+        .branch => {
             const displacement = math.cast(
                 i28,
                 @intCast(i64, target_addr) - @intCast(i64, source_addr),
@@ -101,15 +90,12 @@ fn resolveAarch64(self: Relocation, source_addr: u64, target_addr: i64, code: []
                 .unconditional_branch_immediate = mem.bytesToValue(meta.TagPayload(
                     aarch64.Instruction,
                     aarch64.Instruction.unconditional_branch_immediate,
-                ), buffer),
+                ), buffer[0..4]),
             };
             inst.unconditional_branch_immediate.imm26 = @truncate(u26, @bitCast(u28, displacement >> 2));
-            mem.writeIntLittle(u32, buffer, inst.toU32());
+            mem.writeIntLittle(u32, buffer[0..4], inst.toU32());
         },
-        .ARM64_RELOC_PAGE21,
-        .ARM64_RELOC_GOT_LOAD_PAGE21,
-        .ARM64_RELOC_TLVP_LOAD_PAGE21,
-        => {
+        .page, .got_page, .tlv_page => {
             const source_page = @intCast(i32, source_addr >> 12);
             const target_page = @intCast(i32, target_addr >> 12);
             const pages = @bitCast(u21, @intCast(i21, target_page - source_page));
@@ -117,31 +103,29 @@ fn resolveAarch64(self: Relocation, source_addr: u64, target_addr: i64, code: []
                 .pc_relative_address = mem.bytesToValue(meta.TagPayload(
                     aarch64.Instruction,
                     aarch64.Instruction.pc_relative_address,
-                ), buffer),
+                ), buffer[0..4]),
             };
             inst.pc_relative_address.immhi = @truncate(u19, pages >> 2);
             inst.pc_relative_address.immlo = @truncate(u2, pages);
-            mem.writeIntLittle(u32, buffer, inst.toU32());
+            mem.writeIntLittle(u32, buffer[0..4], inst.toU32());
         },
-        .ARM64_RELOC_PAGEOFF12,
-        .ARM64_RELOC_GOT_LOAD_PAGEOFF12,
-        => {
+        .pageoff, .got_pageoff => {
             const narrowed = @truncate(u12, @intCast(u64, target_addr));
-            if (isArithmeticOp(buffer)) {
+            if (isArithmeticOp(buffer[0..4])) {
                 var inst = aarch64.Instruction{
                     .add_subtract_immediate = mem.bytesToValue(meta.TagPayload(
                         aarch64.Instruction,
                         aarch64.Instruction.add_subtract_immediate,
-                    ), buffer),
+                    ), buffer[0..4]),
                 };
                 inst.add_subtract_immediate.imm12 = narrowed;
-                mem.writeIntLittle(u32, buffer, inst.toU32());
+                mem.writeIntLittle(u32, buffer[0..4], inst.toU32());
             } else {
                 var inst = aarch64.Instruction{
                     .load_store_register = mem.bytesToValue(meta.TagPayload(
                         aarch64.Instruction,
                         aarch64.Instruction.load_store_register,
-                    ), buffer),
+                    ), buffer[0..4]),
                 };
                 const offset: u12 = blk: {
                     if (inst.load_store_register.size == 0) {
@@ -157,21 +141,21 @@ fn resolveAarch64(self: Relocation, source_addr: u64, target_addr: i64, code: []
                     }
                 };
                 inst.load_store_register.offset = offset;
-                mem.writeIntLittle(u32, buffer, inst.toU32());
+                mem.writeIntLittle(u32, buffer[0..4], inst.toU32());
             }
         },
-        .ARM64_RELOC_TLVP_LOAD_PAGEOFF12 => {
+        .tlv_pageoff => {
             const RegInfo = struct {
                 rd: u5,
                 rn: u5,
                 size: u2,
             };
             const reg_info: RegInfo = blk: {
-                if (isArithmeticOp(buffer)) {
+                if (isArithmeticOp(buffer[0..4])) {
                     const inst = mem.bytesToValue(meta.TagPayload(
                         aarch64.Instruction,
                         aarch64.Instruction.add_subtract_immediate,
-                    ), buffer);
+                    ), buffer[0..4]);
                     break :blk .{
                         .rd = inst.rd,
                         .rn = inst.rn,
@@ -181,7 +165,7 @@ fn resolveAarch64(self: Relocation, source_addr: u64, target_addr: i64, code: []
                     const inst = mem.bytesToValue(meta.TagPayload(
                         aarch64.Instruction,
                         aarch64.Instruction.load_store_register,
-                    ), buffer);
+                    ), buffer[0..4]);
                     break :blk .{
                         .rd = inst.rt,
                         .rn = inst.rn,
@@ -201,45 +185,24 @@ fn resolveAarch64(self: Relocation, source_addr: u64, target_addr: i64, code: []
                     .sf = @truncate(u1, reg_info.size),
                 },
             };
-            mem.writeIntLittle(u32, buffer, inst.toU32());
+            mem.writeIntLittle(u32, buffer[0..4], inst.toU32());
         },
-        .ARM64_RELOC_POINTER_TO_GOT => {
-            const result = @intCast(i32, @intCast(i64, target_addr) - @intCast(i64, source_addr));
-            mem.writeIntLittle(i32, buffer, result);
+        .unsigned => switch (self.length) {
+            2 => mem.writeIntLittle(u32, buffer[0..4], @truncate(u32, @bitCast(u64, target_addr))),
+            3 => mem.writeIntLittle(u64, buffer[0..8], @bitCast(u64, target_addr)),
+            else => unreachable,
         },
-        .ARM64_RELOC_SUBTRACTOR => unreachable,
-        .ARM64_RELOC_ADDEND => unreachable,
-        .ARM64_RELOC_UNSIGNED => unreachable,
+        .got, .signed, .tlv => unreachable, // Invalid target architecture.
     }
 }
 
 fn resolveX8664(self: Relocation, source_addr: u64, target_addr: i64, code: []u8) void {
-    const rel_type = @intToEnum(macho.reloc_type_x86_64, self.type);
-    switch (rel_type) {
-        .X86_64_RELOC_BRANCH,
-        .X86_64_RELOC_GOT,
-        .X86_64_RELOC_GOT_LOAD,
-        .X86_64_RELOC_TLV,
-        => {
+    switch (self.type) {
+        .branch, .got, .tlv, .signed => {
             const displacement = @intCast(i32, @intCast(i64, target_addr) - @intCast(i64, source_addr) - 4);
             mem.writeIntLittle(u32, code[self.offset..][0..4], @bitCast(u32, displacement));
         },
-        .X86_64_RELOC_SIGNED,
-        .X86_64_RELOC_SIGNED_1,
-        .X86_64_RELOC_SIGNED_2,
-        .X86_64_RELOC_SIGNED_4,
-        => {
-            const correction: u3 = switch (rel_type) {
-                .X86_64_RELOC_SIGNED => 0,
-                .X86_64_RELOC_SIGNED_1 => 1,
-                .X86_64_RELOC_SIGNED_2 => 2,
-                .X86_64_RELOC_SIGNED_4 => 4,
-                else => unreachable,
-            };
-            const displacement = @intCast(i32, target_addr - @intCast(i64, source_addr + correction + 4));
-            mem.writeIntLittle(u32, code[self.offset..][0..4], @bitCast(u32, displacement));
-        },
-        .X86_64_RELOC_UNSIGNED => {
+        .unsigned => {
             switch (self.length) {
                 2 => {
                     mem.writeIntLittle(u32, code[self.offset..][0..4], @truncate(u32, @bitCast(u64, target_addr)));
@@ -250,7 +213,7 @@ fn resolveX8664(self: Relocation, source_addr: u64, target_addr: i64, code: []u8
                 else => unreachable,
             }
         },
-        .X86_64_RELOC_SUBTRACTOR => unreachable,
+        .got_page, .got_pageoff, .page, .pageoff, .tlv_page, .tlv_pageoff => unreachable, // Invalid target architecture.
     }
 }
 
@@ -258,3 +221,18 @@ inline fn isArithmeticOp(inst: *const [4]u8) bool {
     const group_decode = @truncate(u5, inst[3]);
     return ((group_decode >> 2) == 4);
 }
+
+const Relocation = @This();
+
+const std = @import("std");
+const aarch64 = @import("../../arch/aarch64/bits.zig");
+const assert = std.debug.assert;
+const log = std.log.scoped(.link);
+const macho = std.macho;
+const math = std.math;
+const mem = std.mem;
+const meta = std.meta;
+
+const Atom = @import("Atom.zig");
+const MachO = @import("../MachO.zig");
+const SymbolWithLoc = MachO.SymbolWithLoc;
src/link/MachO.zig
@@ -1250,11 +1250,7 @@ pub fn createGotAtom(self: *MachO, target: SymbolWithLoc) !Atom.Index {
     log.debug("allocated GOT atom at 0x{x}", .{sym.n_value});
 
     try Atom.addRelocation(self, atom_index, .{
-        .type = switch (self.base.options.target.cpu.arch) {
-            .aarch64 => @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_UNSIGNED),
-            .x86_64 => @enumToInt(macho.reloc_type_x86_64.X86_64_RELOC_UNSIGNED),
-            else => unreachable,
-        },
+        .type = .unsigned,
         .target = target,
         .offset = 0,
         .addend = 0,
@@ -1336,15 +1332,15 @@ fn createStubHelperPreambleAtom(self: *MachO) !void {
             code[9] = 0xff;
             code[10] = 0x25;
 
-            try Atom.addRelocations(self, atom_index, 2, .{ .{
-                .type = @enumToInt(macho.reloc_type_x86_64.X86_64_RELOC_SIGNED),
+            try Atom.addRelocations(self, atom_index, &[_]Relocation{ .{
+                .type = .signed,
                 .target = dyld_private,
                 .offset = 3,
                 .addend = 0,
                 .pcrel = true,
                 .length = 2,
             }, .{
-                .type = @enumToInt(macho.reloc_type_x86_64.X86_64_RELOC_GOT),
+                .type = .got,
                 .target = dyld_stub_binder,
                 .offset = 11,
                 .addend = 0,
@@ -1376,29 +1372,29 @@ fn createStubHelperPreambleAtom(self: *MachO) !void {
             // br x16
             mem.writeIntLittle(u32, code[20..][0..4], aarch64.Instruction.br(.x16).toU32());
 
-            try Atom.addRelocations(self, atom_index, 4, .{ .{
-                .type = @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_PAGE21),
+            try Atom.addRelocations(self, atom_index, &[_]Relocation{ .{
+                .type = .page,
                 .target = dyld_private,
                 .offset = 0,
                 .addend = 0,
                 .pcrel = true,
                 .length = 2,
             }, .{
-                .type = @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_PAGEOFF12),
+                .type = .pageoff,
                 .target = dyld_private,
                 .offset = 4,
                 .addend = 0,
                 .pcrel = false,
                 .length = 2,
             }, .{
-                .type = @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_GOT_LOAD_PAGE21),
+                .type = .got_page,
                 .target = dyld_stub_binder,
                 .offset = 12,
                 .addend = 0,
                 .pcrel = true,
                 .length = 2,
             }, .{
-                .type = @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_GOT_LOAD_PAGEOFF12),
+                .type = .got_pageoff,
                 .target = dyld_stub_binder,
                 .offset = 16,
                 .addend = 0,
@@ -1456,7 +1452,7 @@ fn createStubHelperAtom(self: *MachO) !Atom.Index {
             code[5] = 0xe9;
 
             try Atom.addRelocation(self, atom_index, .{
-                .type = @enumToInt(macho.reloc_type_x86_64.X86_64_RELOC_BRANCH),
+                .type = .branch,
                 .target = .{ .sym_index = stub_helper_preamble_atom_sym_index, .file = null },
                 .offset = 6,
                 .addend = 0,
@@ -1479,7 +1475,7 @@ fn createStubHelperAtom(self: *MachO) !Atom.Index {
             // Next 4 bytes 8..12 are just a placeholder populated in `populateLazyBindOffsetsInStubHelper`.
 
             try Atom.addRelocation(self, atom_index, .{
-                .type = @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_BRANCH26),
+                .type = .branch,
                 .target = .{ .sym_index = stub_helper_preamble_atom_sym_index, .file = null },
                 .offset = 4,
                 .addend = 0,
@@ -1507,11 +1503,7 @@ fn createLazyPointerAtom(self: *MachO, stub_sym_index: u32, target: SymbolWithLo
     sym.n_sect = self.la_symbol_ptr_section_index.? + 1;
 
     try Atom.addRelocation(self, atom_index, .{
-        .type = switch (self.base.options.target.cpu.arch) {
-            .aarch64 => @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_UNSIGNED),
-            .x86_64 => @enumToInt(macho.reloc_type_x86_64.X86_64_RELOC_UNSIGNED),
-            else => unreachable,
-        },
+        .type = .unsigned,
         .target = .{ .sym_index = stub_sym_index, .file = null },
         .offset = 0,
         .addend = 0,
@@ -1565,7 +1557,7 @@ fn createStubAtom(self: *MachO, laptr_sym_index: u32) !Atom.Index {
             code[1] = 0x25;
 
             try Atom.addRelocation(self, atom_index, .{
-                .type = @enumToInt(macho.reloc_type_x86_64.X86_64_RELOC_BRANCH),
+                .type = .branch,
                 .target = .{ .sym_index = laptr_sym_index, .file = null },
                 .offset = 2,
                 .addend = 0,
@@ -1585,9 +1577,9 @@ fn createStubAtom(self: *MachO, laptr_sym_index: u32) !Atom.Index {
             // br x16
             mem.writeIntLittle(u32, code[8..12], aarch64.Instruction.br(.x16).toU32());
 
-            try Atom.addRelocations(self, atom_index, 2, .{
+            try Atom.addRelocations(self, atom_index, &[_]Relocation{
                 .{
-                    .type = @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_PAGE21),
+                    .type = .page,
                     .target = .{ .sym_index = laptr_sym_index, .file = null },
                     .offset = 0,
                     .addend = 0,
@@ -1595,7 +1587,7 @@ fn createStubAtom(self: *MachO, laptr_sym_index: u32) !Atom.Index {
                     .length = 2,
                 },
                 .{
-                    .type = @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_PAGEOFF12),
+                    .type = .pageoff,
                     .target = .{ .sym_index = laptr_sym_index, .file = null },
                     .offset = 4,
                     .addend = 0,
@@ -2663,11 +2655,7 @@ pub fn getDeclVAddr(self: *MachO, decl_index: Module.Decl.Index, reloc_info: Fil
     const sym_index = self.getAtom(this_atom_index).getSymbolIndex().?;
     const atom_index = self.getAtomIndexForSymbol(.{ .sym_index = reloc_info.parent_atom_index, .file = null }).?;
     try Atom.addRelocation(self, atom_index, .{
-        .type = switch (self.base.options.target.cpu.arch) {
-            .aarch64 => @enumToInt(macho.reloc_type_arm64.ARM64_RELOC_UNSIGNED),
-            .x86_64 => @enumToInt(macho.reloc_type_x86_64.X86_64_RELOC_UNSIGNED),
-            else => unreachable,
-        },
+        .type = .unsigned,
         .target = .{ .sym_index = sym_index, .file = null },
         .offset = @intCast(u32, reloc_info.offset),
         .addend = reloc_info.addend,
@@ -3115,28 +3103,6 @@ fn allocateAtom(self: *MachO, atom_index: Atom.Index, new_atom_size: u64, alignm
     return vaddr;
 }
 
-fn getSectionPrecedence(header: macho.section_64) u4 {
-    if (header.isCode()) {
-        if (mem.eql(u8, "__text", header.sectName())) return 0x0;
-        if (header.type() == macho.S_SYMBOL_STUBS) return 0x1;
-        return 0x2;
-    }
-    switch (header.type()) {
-        macho.S_NON_LAZY_SYMBOL_POINTERS,
-        macho.S_LAZY_SYMBOL_POINTERS,
-        => return 0x0,
-        macho.S_MOD_INIT_FUNC_POINTERS => return 0x1,
-        macho.S_MOD_TERM_FUNC_POINTERS => return 0x2,
-        macho.S_ZEROFILL => return 0xf,
-        macho.S_THREAD_LOCAL_REGULAR => return 0xd,
-        macho.S_THREAD_LOCAL_ZEROFILL => return 0xe,
-        else => if (mem.eql(u8, "__eh_frame", header.sectName()))
-            return 0xf
-        else
-            return 0x3,
-    }
-}
-
 pub fn getGlobalSymbol(self: *MachO, name: []const u8, lib_name: ?[]const u8) !u32 {
     _ = lib_name;
     const gpa = self.base.allocator;