Commit f9eb14ddcf
Changed files (3)
src
link
src/link/MachO/Object.zig
@@ -1233,6 +1233,82 @@ fn addSection(self: *Object, allocator: Allocator, segname: []const u8, sectname
return n_sect;
}
+pub fn parseAr(self: *Object, macho_file: *MachO) !void {
+ const tracy = trace(@src());
+ defer tracy.end();
+
+ const gpa = macho_file.base.comp.gpa;
+ const offset = if (self.archive) |ar| ar.offset else 0;
+ const handle = macho_file.getFileHandle(self.file_handle);
+
+ var header_buffer: [@sizeOf(macho.mach_header_64)]u8 = undefined;
+ {
+ const amt = try handle.preadAll(&header_buffer, offset);
+ if (amt != @sizeOf(macho.mach_header_64)) return error.InputOutput;
+ }
+ self.header = @as(*align(1) const macho.mach_header_64, @ptrCast(&header_buffer)).*;
+
+ const this_cpu_arch: std.Target.Cpu.Arch = switch (self.header.?.cputype) {
+ macho.CPU_TYPE_ARM64 => .aarch64,
+ macho.CPU_TYPE_X86_64 => .x86_64,
+ else => |x| {
+ try macho_file.reportParseError2(self.index, "unknown cpu architecture: {d}", .{x});
+ return error.InvalidCpuArch;
+ },
+ };
+ if (macho_file.getTarget().cpu.arch != this_cpu_arch) {
+ try macho_file.reportParseError2(self.index, "invalid cpu architecture: {s}", .{@tagName(this_cpu_arch)});
+ return error.InvalidCpuArch;
+ }
+
+ const lc_buffer = try gpa.alloc(u8, self.header.?.sizeofcmds);
+ defer gpa.free(lc_buffer);
+ {
+ const amt = try handle.preadAll(lc_buffer, offset + @sizeOf(macho.mach_header_64));
+ if (amt != self.header.?.sizeofcmds) return error.InputOutput;
+ }
+
+ var it = LoadCommandIterator{
+ .ncmds = self.header.?.ncmds,
+ .buffer = lc_buffer,
+ };
+ while (it.next()) |lc| switch (lc.cmd()) {
+ .SYMTAB => {
+ const cmd = lc.cast(macho.symtab_command).?;
+ try self.strtab.resize(gpa, cmd.strsize);
+ {
+ const amt = try handle.preadAll(self.strtab.items, cmd.stroff + offset);
+ if (amt != self.strtab.items.len) return error.InputOutput;
+ }
+
+ const symtab_buffer = try gpa.alloc(u8, cmd.nsyms * @sizeOf(macho.nlist_64));
+ defer gpa.free(symtab_buffer);
+ {
+ const amt = try handle.preadAll(symtab_buffer, cmd.symoff + offset);
+ if (amt != symtab_buffer.len) return error.InputOutput;
+ }
+ const symtab = @as([*]align(1) const macho.nlist_64, @ptrCast(symtab_buffer.ptr))[0..cmd.nsyms];
+ try self.symtab.ensureUnusedCapacity(gpa, symtab.len);
+ for (symtab) |nlist| {
+ self.symtab.appendAssumeCapacity(.{
+ .nlist = nlist,
+ .atom = 0,
+ .size = 0,
+ });
+ }
+ },
+ .BUILD_VERSION,
+ .VERSION_MIN_MACOSX,
+ .VERSION_MIN_IPHONEOS,
+ .VERSION_MIN_TVOS,
+ .VERSION_MIN_WATCHOS,
+ => if (self.platform == null) {
+ self.platform = MachO.Platform.fromLoadCommand(lc);
+ },
+ else => {},
+ };
+}
+
pub fn updateArSymtab(self: Object, ar_symtab: *Archive.ArSymtab, macho_file: *MachO) error{OutOfMemory}!void {
const gpa = macho_file.base.comp.gpa;
for (self.symtab.items(.nlist)) |nlist| {
src/link/MachO/relocatable.zig
@@ -104,8 +104,19 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ?
if (module_obj_path) |path| try positionals.append(.{ .path = path });
for (positionals.items) |obj| {
- // TODO: parse for archive meaning don't unpack objects
- _ = obj;
+ parsePositional(macho_file, obj.path) catch |err| switch (err) {
+ error.MalformedObject,
+ error.MalformedArchive,
+ error.InvalidCpuArch,
+ error.InvalidTarget,
+ => continue, // already reported
+ error.UnknownFileType => try macho_file.reportParseError(obj.path, "unknown file type for an object file", .{}),
+ else => |e| try macho_file.reportParseError(
+ obj.path,
+ "unexpected error: parsing input file failed with error {s}",
+ .{@errorName(e)},
+ ),
+ };
}
if (comp.link_errors.items.len > 0) return error.FlushFailure;
@@ -241,6 +252,75 @@ pub fn flushStaticLib(macho_file: *MachO, comp: *Compilation, module_obj_path: ?
if (comp.link_errors.items.len > 0) return error.FlushFailure;
}
+fn parsePositional(macho_file: *MachO, path: []const u8) MachO.ParseError!void {
+ const tracy = trace(@src());
+ defer tracy.end();
+ if (try Object.isObject(path)) {
+ try parseObject(macho_file, path);
+ } else if (try fat.isFatLibrary(path)) {
+ const fat_arch = try macho_file.parseFatLibrary(path);
+ if (try Archive.isArchive(path, fat_arch)) {
+ try parseArchive(macho_file, path, fat_arch);
+ } else return error.UnknownFileType;
+ } else if (try Archive.isArchive(path, null)) {
+ try parseArchive(macho_file, path, null);
+ } else return error.UnknownFileType;
+}
+
+fn parseObject(macho_file: *MachO, path: []const u8) MachO.ParseError!void {
+ const tracy = trace(@src());
+ defer tracy.end();
+
+ const gpa = macho_file.base.comp.gpa;
+ const file = try std.fs.cwd().openFile(path, .{});
+ errdefer file.close();
+ const handle = try macho_file.addFileHandle(file);
+ const mtime: u64 = mtime: {
+ const stat = file.stat() catch break :mtime 0;
+ break :mtime @as(u64, @intCast(@divFloor(stat.mtime, 1_000_000_000)));
+ };
+ const index = @as(File.Index, @intCast(try macho_file.files.addOne(gpa)));
+ macho_file.files.set(index, .{ .object = .{
+ .path = try gpa.dupe(u8, path),
+ .file_handle = handle,
+ .mtime = mtime,
+ .index = index,
+ } });
+ try macho_file.objects.append(gpa, index);
+
+ const object = macho_file.getFile(index).?.object;
+ try object.parseAr(macho_file);
+}
+
+fn parseArchive(macho_file: *MachO, path: []const u8, fat_arch: ?fat.Arch) MachO.ParseError!void {
+ const tracy = trace(@src());
+ defer tracy.end();
+
+ const gpa = macho_file.base.comp.gpa;
+
+ const file = try std.fs.cwd().openFile(path, .{});
+ errdefer file.close();
+ const handle = try macho_file.addFileHandle(file);
+
+ var archive = Archive{};
+ defer archive.deinit(gpa);
+ try archive.parse(macho_file, path, handle, fat_arch);
+
+ var has_parse_error = false;
+ for (archive.objects.items) |extracted| {
+ const index = @as(File.Index, @intCast(try macho_file.files.addOne(gpa)));
+ macho_file.files.set(index, .{ .object = extracted });
+ const object = &macho_file.files.items(.data)[index].object;
+ object.index = index;
+ object.parseAr(macho_file) catch |err| switch (err) {
+ error.InvalidCpuArch => has_parse_error = true,
+ else => |e| return e,
+ };
+ try macho_file.objects.append(gpa, index);
+ }
+ if (has_parse_error) return error.MalformedArchive;
+}
+
fn markExports(macho_file: *MachO) void {
if (macho_file.getZigObject()) |zo| {
zo.asFile().markExportsRelocatable(macho_file);
@@ -733,6 +813,7 @@ fn writeHeader(macho_file: *MachO, ncmds: usize, sizeofcmds: usize) !void {
const assert = std.debug.assert;
const build_options = @import("build_options");
const eh_frame = @import("eh_frame.zig");
+const fat = @import("fat.zig");
const link = @import("../../link.zig");
const load_commands = @import("load_commands.zig");
const log = std.log.scoped(.link);
@@ -748,4 +829,5 @@ const Atom = @import("Atom.zig");
const Compilation = @import("../../Compilation.zig");
const File = @import("file.zig").File;
const MachO = @import("../MachO.zig");
+const Object = @import("Object.zig");
const Symbol = @import("Symbol.zig");
src/link/MachO.zig
@@ -926,7 +926,7 @@ pub fn resolveLibSystem(
});
}
-const ParseError = error{
+pub const ParseError = error{
MalformedObject,
MalformedArchive,
MalformedDylib,
@@ -1003,7 +1003,7 @@ fn parseObject(self: *MachO, path: []const u8) ParseError!void {
try object.parse(self);
}
-fn parseFatLibrary(self: *MachO, path: []const u8) !fat.Arch {
+pub fn parseFatLibrary(self: *MachO, path: []const u8) !fat.Arch {
var buffer: [2]fat.Arch = undefined;
const fat_archs = try fat.parseArchs(path, &buffer);
const cpu_arch = self.getTarget().cpu.arch;