Commit 148b963a60
2022-01-14 04:09:42
1 parent
b713ce0Changed files (9)
src/link/MachO/Atom.zig
@@ -419,6 +419,7 @@ pub fn parseRelocs(self: *Atom, relocs: []macho.relocation_info, context: RelocC
.X86_64_RELOC_BRANCH => {
// TODO rewrite relocation
try addStub(target, context);
+ addend = mem.readIntLittle(i32, self.code.items[offset..][0..4]);
},
.X86_64_RELOC_GOT, .X86_64_RELOC_GOT_LOAD => {
// TODO rewrite relocation
@@ -1003,7 +1004,7 @@ pub fn resolveRelocs(self: *Atom, macho_file: *MachO) !void {
.X86_64_RELOC_BRANCH => {
const displacement = try math.cast(
i32,
- @intCast(i64, target_addr) - @intCast(i64, source_addr) - 4,
+ @intCast(i64, target_addr) - @intCast(i64, source_addr) - 4 + rel.addend,
);
mem.writeIntLittle(u32, self.code.items[rel.offset..][0..4], @bitCast(u32, displacement));
},
src/link/MachO/Object.zig
@@ -409,7 +409,7 @@ pub fn parseIntoAtoms(self: *Object, allocator: Allocator, macho_file: *MachO) !
} else blk: {
var iundefsym: usize = sorted_all_nlists.items.len;
while (iundefsym > 0) : (iundefsym -= 1) {
- const nlist = sorted_all_nlists.items[iundefsym];
+ const nlist = sorted_all_nlists.items[iundefsym - 1];
if (nlist.nlist.sect()) break;
}
break :blk iundefsym;
src/link/Coff.zig
@@ -920,7 +920,10 @@ fn linkWithLLD(self: *Coff, comp: *Compilation) !void {
man = comp.cache_parent.obtain();
self.base.releaseLock();
- try man.addListOfFiles(self.base.options.objects);
+ for (self.base.options.objects) |obj| {
+ _ = try man.addFile(obj.path, null);
+ man.hash.add(obj.must_link);
+ }
for (comp.c_object_table.keys()) |key| {
_ = try man.addFile(key.status.success.object_path, null);
}
@@ -984,7 +987,7 @@ fn linkWithLLD(self: *Coff, comp: *Compilation) !void {
// build-obj. See also the corresponding TODO in linkAsArchive.
const the_object_path = blk: {
if (self.base.options.objects.len != 0)
- break :blk self.base.options.objects[0];
+ break :blk self.base.options.objects[0].path;
if (comp.c_object_table.count() != 0)
break :blk comp.c_object_table.keys()[0].status.success.object_path;
@@ -1093,7 +1096,10 @@ fn linkWithLLD(self: *Coff, comp: *Compilation) !void {
try argv.append(try allocPrint(arena, "-LIBPATH:{s}", .{lib_dir}));
}
- try argv.appendSlice(self.base.options.objects);
+ try argv.ensureUnusedCapacity(self.base.options.objects.len);
+ for (self.base.options.objects) |obj| {
+ argv.appendAssumeCapacity(obj.path);
+ }
for (comp.c_object_table.keys()) |key| {
try argv.append(key.status.success.object_path);
src/link/Elf.zig
@@ -1307,7 +1307,10 @@ fn linkWithLLD(self: *Elf, comp: *Compilation) !void {
try man.addOptionalFile(self.base.options.linker_script);
try man.addOptionalFile(self.base.options.version_script);
- try man.addListOfFiles(self.base.options.objects);
+ for (self.base.options.objects) |obj| {
+ _ = try man.addFile(obj.path, null);
+ man.hash.add(obj.must_link);
+ }
for (comp.c_object_table.keys()) |key| {
_ = try man.addFile(key.status.success.object_path, null);
}
@@ -1392,7 +1395,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation) !void {
// build-obj. See also the corresponding TODO in linkAsArchive.
const the_object_path = blk: {
if (self.base.options.objects.len != 0)
- break :blk self.base.options.objects[0];
+ break :blk self.base.options.objects[0].path;
if (comp.c_object_table.count() != 0)
break :blk comp.c_object_table.keys()[0].status.success.object_path;
@@ -1607,7 +1610,10 @@ fn linkWithLLD(self: *Elf, comp: *Compilation) !void {
}
// Positional arguments to the linker such as object files.
- try argv.appendSlice(self.base.options.objects);
+ try argv.ensureUnusedCapacity(self.base.options.objects.len);
+ for (self.base.options.objects) |obj| {
+ argv.appendAssumeCapacity(obj.path);
+ }
for (comp.c_object_table.keys()) |key| {
try argv.append(key.status.success.object_path);
src/link/MachO.zig
@@ -140,6 +140,9 @@ objc_selrefs_section_index: ?u16 = null,
objc_classrefs_section_index: ?u16 = null,
objc_data_section_index: ?u16 = null,
+rustc_section_index: ?u16 = null,
+rustc_section_size: u64 = 0,
+
bss_file_offset: u32 = 0,
tlv_bss_file_offset: u32 = 0,
@@ -466,7 +469,10 @@ pub fn flushModule(self: *MachO, comp: *Compilation) !void {
// We are about to obtain this lock, so here we give other processes a chance first.
self.base.releaseLock();
- try man.addListOfFiles(self.base.options.objects);
+ for (self.base.options.objects) |obj| {
+ _ = try man.addFile(obj.path, null);
+ man.hash.add(obj.must_link);
+ }
for (comp.c_object_table.keys()) |key| {
_ = try man.addFile(key.status.success.object_path, null);
}
@@ -539,8 +545,9 @@ pub fn flushModule(self: *MachO, comp: *Compilation) !void {
// here. TODO: think carefully about how we can avoid this redundant operation when doing
// build-obj. See also the corresponding TODO in linkAsArchive.
const the_object_path = blk: {
- if (self.base.options.objects.len != 0)
- break :blk self.base.options.objects[0];
+ if (self.base.options.objects.len != 0) {
+ break :blk self.base.options.objects[0].path;
+ }
if (comp.c_object_table.count() != 0)
break :blk comp.c_object_table.keys()[0].status.success.object_path;
@@ -649,8 +656,19 @@ pub fn flushModule(self: *MachO, comp: *Compilation) !void {
// Positional arguments to the linker such as object files and static archives.
var positionals = std.ArrayList([]const u8).init(arena);
+ try positionals.ensureUnusedCapacity(self.base.options.objects.len);
- try positionals.appendSlice(self.base.options.objects);
+ var must_link_archives = std.StringArrayHashMap(void).init(arena);
+ try must_link_archives.ensureUnusedCapacity(self.base.options.objects.len);
+
+ for (self.base.options.objects) |obj| {
+ if (must_link_archives.contains(obj.path)) continue;
+ if (obj.must_link) {
+ _ = must_link_archives.getOrPutAssumeCapacity(obj.path);
+ } else {
+ _ = positionals.appendAssumeCapacity(obj.path);
+ }
+ }
for (comp.c_object_table.keys()) |key| {
try positionals.append(key.status.success.object_path);
@@ -857,12 +875,17 @@ pub fn flushModule(self: *MachO, comp: *Compilation) !void {
try argv.append("dynamic_lookup");
}
+ for (must_link_archives.keys()) |lib| {
+ try argv.append(try std.fmt.allocPrint(arena, "-force_load {s}", .{lib}));
+ }
+
Compilation.dump_argv(argv.items);
}
var dependent_libs = std.fifo.LinearFifo(Dylib.Id, .Dynamic).init(self.base.allocator);
defer dependent_libs.deinit();
try self.parseInputFiles(positionals.items, self.base.options.sysroot, &dependent_libs);
+ try self.parseAndForceLoadStaticArchives(must_link_archives.keys());
try self.parseLibs(libs.items, self.base.options.sysroot, &dependent_libs);
try self.parseDependentLibs(self.base.options.sysroot, &dependent_libs);
}
@@ -953,6 +976,12 @@ pub fn flushModule(self: *MachO, comp: *Compilation) !void {
try self.writeAtoms();
}
+ if (self.rustc_section_index) |id| {
+ const seg = &self.load_commands.items[self.data_segment_cmd_index.?].segment;
+ const sect = &seg.sections.items[id];
+ sect.size = self.rustc_section_size;
+ }
+
try self.setEntryPoint();
try self.updateSectionOrdinals();
try self.writeLinkeditSegment();
@@ -1142,7 +1171,7 @@ fn parseObject(self: *MachO, path: []const u8) !bool {
return true;
}
-fn parseArchive(self: *MachO, path: []const u8) !bool {
+fn parseArchive(self: *MachO, path: []const u8, force_load: bool) !bool {
const file = fs.cwd().openFile(path, .{}) catch |err| switch (err) {
error.FileNotFound => return false,
else => |e| return e,
@@ -1165,7 +1194,23 @@ fn parseArchive(self: *MachO, path: []const u8) !bool {
else => |e| return e,
};
- try self.archives.append(self.base.allocator, archive);
+ if (force_load) {
+ defer archive.deinit(self.base.allocator);
+ // Get all offsets from the ToC
+ var offsets = std.AutoArrayHashMap(u32, void).init(self.base.allocator);
+ defer offsets.deinit();
+ for (archive.toc.values()) |offs| {
+ for (offs.items) |off| {
+ _ = try offsets.getOrPut(off);
+ }
+ }
+ for (offsets.keys()) |off| {
+ const object = try self.objects.addOne(self.base.allocator);
+ object.* = try archive.parseObject(self.base.allocator, self.base.options.target, off);
+ }
+ } else {
+ try self.archives.append(self.base.allocator, archive);
+ }
return true;
}
@@ -1250,7 +1295,7 @@ fn parseInputFiles(self: *MachO, files: []const []const u8, syslibroot: ?[]const
log.debug("parsing input file path '{s}'", .{full_path});
if (try self.parseObject(full_path)) continue;
- if (try self.parseArchive(full_path)) continue;
+ if (try self.parseArchive(full_path, false)) continue;
if (try self.parseDylib(full_path, .{
.syslibroot = syslibroot,
.dependent_libs = dependent_libs,
@@ -1260,6 +1305,21 @@ fn parseInputFiles(self: *MachO, files: []const []const u8, syslibroot: ?[]const
}
}
+fn parseAndForceLoadStaticArchives(self: *MachO, files: []const []const u8) !void {
+ for (files) |file_name| {
+ const full_path = full_path: {
+ var buffer: [fs.MAX_PATH_BYTES]u8 = undefined;
+ const path = try fs.realpath(file_name, &buffer);
+ break :full_path try self.base.allocator.dupe(u8, path);
+ };
+ defer self.base.allocator.free(full_path);
+ log.debug("parsing and force loading static archive '{s}'", .{full_path});
+
+ if (try self.parseArchive(full_path, true)) continue;
+ log.warn("unknown filetype: expected static archive: '{s}'", .{file_name});
+ }
+}
+
fn parseLibs(self: *MachO, libs: []const []const u8, syslibroot: ?[]const u8, dependent_libs: anytype) !void {
for (libs) |lib| {
log.debug("parsing lib path '{s}'", .{lib});
@@ -1267,7 +1327,7 @@ fn parseLibs(self: *MachO, libs: []const []const u8, syslibroot: ?[]const u8, de
.syslibroot = syslibroot,
.dependent_libs = dependent_libs,
})) continue;
- if (try self.parseArchive(lib)) continue;
+ if (try self.parseArchive(lib, false)) continue;
log.warn("unknown filetype for a library: '{s}'", .{lib});
}
@@ -1833,6 +1893,24 @@ pub fn getMatchingSection(self: *MachO, sect: macho.section_64) !?MatchingSectio
.seg = self.data_segment_cmd_index.?,
.sect = self.objc_data_section_index.?,
};
+ } else if (mem.eql(u8, sectname, ".rustc")) {
+ if (self.rustc_section_index == null) {
+ self.rustc_section_index = try self.initSection(
+ self.data_segment_cmd_index.?,
+ ".rustc",
+ sect.size,
+ sect.@"align",
+ .{},
+ );
+ // We need to preserve the section size for rustc to properly
+ // decompress the metadata.
+ self.rustc_section_size = sect.size;
+ }
+
+ break :blk .{
+ .seg = self.data_segment_cmd_index.?,
+ .sect = self.rustc_section_index.?,
+ };
} else {
if (self.data_section_index == null) {
self.data_section_index = try self.initSection(
@@ -5003,6 +5081,7 @@ fn sortSections(self: *MachO) !void {
// __DATA segment
const indices = &[_]*?u16{
+ &self.rustc_section_index,
&self.la_symbol_ptr_section_index,
&self.objc_const_section_index,
&self.objc_selrefs_section_index,
src/link/Wasm.zig
@@ -1012,7 +1012,10 @@ fn linkWithLLD(self: *Wasm, comp: *Compilation) !void {
// We are about to obtain this lock, so here we give other processes a chance first.
self.base.releaseLock();
- try man.addListOfFiles(self.base.options.objects);
+ for (self.base.options.objects) |obj| {
+ _ = try man.addFile(obj.path, null);
+ man.hash.add(obj.must_link);
+ }
for (comp.c_object_table.keys()) |key| {
_ = try man.addFile(key.status.success.object_path, null);
}
@@ -1065,7 +1068,7 @@ fn linkWithLLD(self: *Wasm, comp: *Compilation) !void {
// build-obj. See also the corresponding TODO in linkAsArchive.
const the_object_path = blk: {
if (self.base.options.objects.len != 0)
- break :blk self.base.options.objects[0];
+ break :blk self.base.options.objects[0].path;
if (comp.c_object_table.count() != 0)
break :blk comp.c_object_table.keys()[0].status.success.object_path;
@@ -1225,7 +1228,10 @@ fn linkWithLLD(self: *Wasm, comp: *Compilation) !void {
}
// Positional arguments to the linker such as object files.
- try argv.appendSlice(self.base.options.objects);
+ try argv.ensureUnusedCapacity(self.base.options.objects.len);
+ for (self.base.options.objects) |obj| {
+ argv.appendAssumeCapacity(obj.path);
+ }
for (comp.c_object_table.keys()) |key| {
try argv.append(key.status.success.object_path);
src/Compilation.zig
@@ -636,6 +636,11 @@ pub const ClangPreprocessorMode = enum {
pub const SystemLib = link.SystemLib;
+pub const LinkObject = struct {
+ path: []const u8,
+ must_link: bool = false,
+};
+
pub const InitOptions = struct {
zig_lib_directory: Directory,
local_cache_directory: Directory,
@@ -679,7 +684,7 @@ pub const InitOptions = struct {
lib_dirs: []const []const u8 = &[0][]const u8{},
rpath_list: []const []const u8 = &[0][]const u8{},
c_source_files: []const CSourceFile = &[0]CSourceFile{},
- link_objects: []const []const u8 = &[0][]const u8{},
+ link_objects: []LinkObject = &[0]LinkObject{},
framework_dirs: []const []const u8 = &[0][]const u8{},
frameworks: []const []const u8 = &[0][]const u8{},
system_lib_names: []const []const u8 = &.{},
@@ -1027,7 +1032,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
if (options.system_lib_names.len != 0)
break :x true;
for (options.link_objects) |obj| {
- switch (classifyFileExt(obj)) {
+ switch (classifyFileExt(obj.path)) {
.shared_library => break :x true,
else => continue,
}
@@ -1389,7 +1394,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
if (options.c_source_files.len >= 1) {
hash.addBytes(options.c_source_files[0].src_path);
} else if (options.link_objects.len >= 1) {
- hash.addBytes(options.link_objects[0]);
+ hash.addBytes(options.link_objects[0].path);
}
const digest = hash.final();
src/link.zig
@@ -137,7 +137,7 @@ pub const Options = struct {
soname: ?[]const u8,
llvm_cpu_features: ?[*:0]const u8,
- objects: []const []const u8,
+ objects: []Compilation.LinkObject,
framework_dirs: []const []const u8,
frameworks: []const []const u8,
system_libs: std.StringArrayHashMapUnmanaged(SystemLib),
@@ -683,7 +683,10 @@ pub const File = struct {
// We are about to obtain this lock, so here we give other processes a chance first.
base.releaseLock();
- try man.addListOfFiles(base.options.objects);
+ for (base.options.objects) |obj| {
+ _ = try man.addFile(obj.path, null);
+ man.hash.add(obj.must_link);
+ }
for (comp.c_object_table.keys()) |key| {
_ = try man.addFile(key.status.success.object_path, null);
}
@@ -720,8 +723,8 @@ pub const File = struct {
var object_files = try std.ArrayList([*:0]const u8).initCapacity(base.allocator, num_object_files);
defer object_files.deinit();
- for (base.options.objects) |obj_path| {
- object_files.appendAssumeCapacity(try arena.dupeZ(u8, obj_path));
+ for (base.options.objects) |obj| {
+ object_files.appendAssumeCapacity(try arena.dupeZ(u8, obj.path));
}
for (comp.c_object_table.keys()) |key| {
object_files.appendAssumeCapacity(try arena.dupeZ(u8, key.status.success.object_path));
src/main.zig
@@ -703,7 +703,7 @@ fn buildOutputType(
var c_source_files = std.ArrayList(Compilation.CSourceFile).init(gpa);
defer c_source_files.deinit();
- var link_objects = std.ArrayList([]const u8).init(gpa);
+ var link_objects = std.ArrayList(Compilation.LinkObject).init(gpa);
defer link_objects.deinit();
var framework_dirs = std.ArrayList([]const u8).init(gpa);
@@ -1236,7 +1236,7 @@ fn buildOutputType(
}
} else switch (Compilation.classifyFileExt(arg)) {
.object, .static_library, .shared_library => {
- try link_objects.append(arg);
+ try link_objects.append(.{ .path = arg });
},
.assembly, .c, .cpp, .h, .ll, .bc, .m, .mm => {
try c_source_files.append(.{
@@ -1307,7 +1307,7 @@ fn buildOutputType(
switch (file_ext) {
.assembly, .c, .cpp, .ll, .bc, .h, .m, .mm => try c_source_files.append(.{ .src_path = it.only_arg }),
.unknown, .shared_library, .object, .static_library => {
- try link_objects.append(it.only_arg);
+ try link_objects.append(.{ .path = it.only_arg });
},
.zig => {
if (root_src_file) |other| {
@@ -1751,6 +1751,15 @@ fn buildOutputType(
fatal("expected linker arg after '{s}'", .{arg});
}
install_name = linker_args.items[i];
+ } else if (mem.eql(u8, arg, "-force_load")) {
+ i += 1;
+ if (i >= linker_args.items.len) {
+ fatal("expected linker arg after '{s}'", .{arg});
+ }
+ try link_objects.append(.{
+ .path = linker_args.items[i],
+ .must_link = true,
+ });
} else {
warn("unsupported linker arg: {s}", .{arg});
}
@@ -1845,7 +1854,7 @@ fn buildOutputType(
const basename = fs.path.basename(c_source_files.items[0].src_path);
break :blk basename[0 .. basename.len - fs.path.extension(basename).len];
} else if (link_objects.items.len >= 1) {
- const basename = fs.path.basename(link_objects.items[0]);
+ const basename = fs.path.basename(link_objects.items[0].path);
break :blk basename[0 .. basename.len - fs.path.extension(basename).len];
} else if (emit_bin == .yes) {
const basename = fs.path.basename(emit_bin.yes);
@@ -2048,7 +2057,7 @@ fn buildOutputType(
test_path.items, @errorName(e),
}),
};
- try link_objects.append(try arena.dupe(u8, test_path.items));
+ try link_objects.append(.{ .path = try arena.dupe(u8, test_path.items) });
break;
} else {
var search_paths = std.ArrayList(u8).init(arena);