master
1index: File.Index,
2
3sections: std.MultiArrayList(Section) = .{},
4atoms: std.ArrayList(Atom) = .empty,
5atoms_indexes: std.ArrayList(Atom.Index) = .empty,
6atoms_extra: std.ArrayList(u32) = .empty,
7symtab: std.ArrayList(macho.nlist_64) = .empty,
8strtab: std.ArrayList(u8) = .empty,
9symbols: std.ArrayList(Symbol) = .empty,
10symbols_extra: std.ArrayList(u32) = .empty,
11globals: std.ArrayList(MachO.SymbolResolver.Index) = .empty,
12
13objc_methnames: std.ArrayList(u8) = .empty,
14objc_selrefs: [@sizeOf(u64)]u8 = [_]u8{0} ** @sizeOf(u64),
15
16force_undefined: std.ArrayList(Symbol.Index) = .empty,
17entry_index: ?Symbol.Index = null,
18dyld_stub_binder_index: ?Symbol.Index = null,
19dyld_private_index: ?Symbol.Index = null,
20objc_msg_send_index: ?Symbol.Index = null,
21mh_execute_header_index: ?Symbol.Index = null,
22mh_dylib_header_index: ?Symbol.Index = null,
23dso_handle_index: ?Symbol.Index = null,
24boundary_symbols: std.ArrayList(Symbol.Index) = .empty,
25
26output_symtab_ctx: MachO.SymtabCtx = .{},
27
28pub fn deinit(self: *InternalObject, allocator: Allocator) void {
29 for (self.sections.items(.relocs)) |*relocs| {
30 relocs.deinit(allocator);
31 }
32 self.sections.deinit(allocator);
33 self.atoms.deinit(allocator);
34 self.atoms_indexes.deinit(allocator);
35 self.atoms_extra.deinit(allocator);
36 self.symtab.deinit(allocator);
37 self.strtab.deinit(allocator);
38 self.symbols.deinit(allocator);
39 self.symbols_extra.deinit(allocator);
40 self.globals.deinit(allocator);
41 self.objc_methnames.deinit(allocator);
42 self.force_undefined.deinit(allocator);
43 self.boundary_symbols.deinit(allocator);
44}
45
46pub fn init(self: *InternalObject, allocator: Allocator) !void {
47 // Atom at index 0 is reserved as null atom.
48 try self.atoms.append(allocator, .{ .extra = try self.addAtomExtra(allocator, .{}) });
49 // Null byte in strtab
50 try self.strtab.append(allocator, 0);
51}
52
53pub fn initSymbols(self: *InternalObject, macho_file: *MachO) !void {
54 const newSymbolAssumeCapacity = struct {
55 fn newSymbolAssumeCapacity(obj: *InternalObject, name: MachO.String, args: struct {
56 type: u8 = macho.N_UNDF | macho.N_EXT,
57 desc: u16 = 0,
58 }) Symbol.Index {
59 const index = obj.addSymbolAssumeCapacity();
60 const symbol = &obj.symbols.items[index];
61 symbol.name = name;
62 symbol.extra = obj.addSymbolExtraAssumeCapacity(.{});
63 symbol.flags.dyn_ref = args.desc & macho.REFERENCED_DYNAMICALLY != 0;
64 symbol.visibility = if (args.type & macho.N_EXT != 0) blk: {
65 break :blk if (args.type & macho.N_PEXT != 0) .hidden else .global;
66 } else .local;
67
68 const nlist_idx: u32 = @intCast(obj.symtab.items.len);
69 const nlist = obj.symtab.addOneAssumeCapacity();
70 nlist.* = .{
71 .n_strx = name.pos,
72 .n_type = @bitCast(args.type),
73 .n_sect = 0,
74 .n_desc = @bitCast(args.desc),
75 .n_value = 0,
76 };
77 symbol.nlist_idx = nlist_idx;
78 return index;
79 }
80 }.newSymbolAssumeCapacity;
81
82 const gpa = macho_file.base.comp.gpa;
83 var nsyms = macho_file.base.comp.force_undefined_symbols.keys().len;
84 nsyms += 1; // dyld_stub_binder
85 nsyms += 1; // _objc_msgSend
86 if (!macho_file.base.isDynLib()) {
87 nsyms += 1; // entry
88 nsyms += 1; // __mh_execute_header
89 } else {
90 nsyms += 1; // __mh_dylib_header
91 }
92 nsyms += 1; // ___dso_handle
93 nsyms += 1; // dyld_private
94
95 try self.symbols.ensureTotalCapacityPrecise(gpa, nsyms);
96 try self.symbols_extra.ensureTotalCapacityPrecise(gpa, nsyms * @sizeOf(Symbol.Extra));
97 try self.symtab.ensureTotalCapacityPrecise(gpa, nsyms);
98 try self.globals.ensureTotalCapacityPrecise(gpa, nsyms);
99 self.globals.resize(gpa, nsyms) catch unreachable;
100 @memset(self.globals.items, 0);
101
102 try self.force_undefined.ensureTotalCapacityPrecise(gpa, macho_file.base.comp.force_undefined_symbols.keys().len);
103 for (macho_file.base.comp.force_undefined_symbols.keys()) |name| {
104 self.force_undefined.addOneAssumeCapacity().* = newSymbolAssumeCapacity(self, try self.addString(gpa, name), .{});
105 }
106
107 self.dyld_stub_binder_index = newSymbolAssumeCapacity(self, try self.addString(gpa, "dyld_stub_binder"), .{});
108 self.objc_msg_send_index = newSymbolAssumeCapacity(self, try self.addString(gpa, "_objc_msgSend"), .{});
109
110 if (!macho_file.base.isDynLib()) {
111 self.entry_index = newSymbolAssumeCapacity(self, try self.addString(gpa, macho_file.entry_name orelse "_main"), .{});
112 self.mh_execute_header_index = newSymbolAssumeCapacity(self, try self.addString(gpa, "__mh_execute_header"), .{
113 .type = macho.N_SECT | macho.N_EXT,
114 .desc = macho.REFERENCED_DYNAMICALLY,
115 });
116 } else {
117 self.mh_dylib_header_index = newSymbolAssumeCapacity(self, try self.addString(gpa, "__mh_dylib_header"), .{
118 .type = macho.N_SECT | macho.N_EXT,
119 });
120 }
121
122 self.dso_handle_index = newSymbolAssumeCapacity(self, try self.addString(gpa, "___dso_handle"), .{
123 .type = macho.N_SECT | macho.N_EXT,
124 });
125 self.dyld_private_index = newSymbolAssumeCapacity(self, try self.addString(gpa, "dyld_private"), .{
126 .type = macho.N_SECT,
127 });
128}
129
130pub fn resolveSymbols(self: *InternalObject, macho_file: *MachO) !void {
131 const tracy = trace(@src());
132 defer tracy.end();
133
134 const gpa = macho_file.base.comp.gpa;
135
136 for (self.symtab.items, self.globals.items, 0..) |nlist, *global, i| {
137 const gop = try macho_file.resolver.getOrPut(gpa, .{
138 .index = @intCast(i),
139 .file = self.index,
140 }, macho_file);
141 if (!gop.found_existing) {
142 gop.ref.* = .{ .index = 0, .file = 0 };
143 }
144 global.* = gop.index;
145
146 if (nlist.n_type.bits.type == .undf) continue;
147 if (gop.ref.getFile(macho_file) == null) {
148 gop.ref.* = .{ .index = @intCast(i), .file = self.index };
149 continue;
150 }
151
152 if (self.asFile().getSymbolRank(.{
153 .archive = false,
154 .weak = false,
155 .tentative = false,
156 }) < gop.ref.getSymbol(macho_file).?.getSymbolRank(macho_file)) {
157 gop.ref.* = .{ .index = @intCast(i), .file = self.index };
158 }
159 }
160}
161
162pub fn resolveBoundarySymbols(self: *InternalObject, macho_file: *MachO) !void {
163 const tracy = trace(@src());
164 defer tracy.end();
165
166 const gpa = macho_file.base.comp.gpa;
167 var boundary_symbols = std.StringArrayHashMap(MachO.Ref).init(gpa);
168 defer boundary_symbols.deinit();
169
170 for (macho_file.objects.items) |index| {
171 const object = macho_file.getFile(index).?.object;
172 for (object.symbols.items, 0..) |sym, i| {
173 const nlist = object.symtab.items(.nlist)[i];
174 if (nlist.n_type.bits.type != .undf or !nlist.n_type.bits.ext) continue;
175 const ref = object.getSymbolRef(@intCast(i), macho_file);
176 if (ref.getFile(macho_file) != null) continue;
177 const name = sym.getName(macho_file);
178 if (mem.startsWith(u8, name, "segment$start$") or
179 mem.startsWith(u8, name, "segment$end$") or
180 mem.startsWith(u8, name, "section$start$") or
181 mem.startsWith(u8, name, "section$end$"))
182 {
183 const gop = try boundary_symbols.getOrPut(name);
184 if (!gop.found_existing) {
185 gop.value_ptr.* = .{ .index = @intCast(i), .file = index };
186 }
187 }
188 }
189 }
190
191 const nsyms = boundary_symbols.values().len;
192 try self.boundary_symbols.ensureTotalCapacityPrecise(gpa, nsyms);
193 try self.symbols.ensureUnusedCapacity(gpa, nsyms);
194 try self.symtab.ensureUnusedCapacity(gpa, nsyms);
195 try self.symbols_extra.ensureUnusedCapacity(gpa, nsyms * @sizeOf(Symbol.Extra));
196 try self.globals.ensureUnusedCapacity(gpa, nsyms);
197
198 for (boundary_symbols.keys(), boundary_symbols.values()) |name, ref| {
199 const name_str = try self.addString(gpa, name);
200 const sym_index = self.addSymbolAssumeCapacity();
201 self.boundary_symbols.appendAssumeCapacity(sym_index);
202 const sym = &self.symbols.items[sym_index];
203 sym.name = name_str;
204 sym.visibility = .local;
205 const nlist_idx: u32 = @intCast(self.symtab.items.len);
206 const nlist = self.symtab.addOneAssumeCapacity();
207 nlist.* = .{
208 .n_strx = name_str.pos,
209 .n_type = .{ .bits = .{ .ext = false, .type = .sect, .pext = false, .is_stab = 0 } },
210 .n_sect = 0,
211 .n_desc = @bitCast(@as(u16, 0)),
212 .n_value = 0,
213 };
214 sym.nlist_idx = nlist_idx;
215 sym.extra = self.addSymbolExtraAssumeCapacity(.{});
216
217 const idx = ref.getFile(macho_file).?.object.globals.items[ref.index];
218 self.globals.addOneAssumeCapacity().* = idx;
219 macho_file.resolver.values.items[idx - 1] = .{ .index = sym_index, .file = self.index };
220 }
221}
222
223pub fn markLive(self: *InternalObject, macho_file: *MachO) void {
224 const tracy = trace(@src());
225 defer tracy.end();
226
227 for (0..self.symbols.items.len) |i| {
228 const nlist = self.symtab.items[i];
229 if (!nlist.n_type.bits.ext) continue;
230
231 const ref = self.getSymbolRef(@intCast(i), macho_file);
232 const file = ref.getFile(macho_file) orelse continue;
233 if (file == .object and !file.object.alive) {
234 file.object.alive = true;
235 file.object.markLive(macho_file);
236 }
237 }
238}
239
240/// Creates a fake input sections __TEXT,__objc_methname and __DATA,__objc_selrefs.
241pub fn addObjcMsgsendSections(self: *InternalObject, sym_name: []const u8, macho_file: *MachO) !Symbol.Index {
242 const methname_sym_index = try self.addObjcMethnameSection(sym_name, macho_file);
243 return try self.addObjcSelrefsSection(methname_sym_index, macho_file);
244}
245
246fn addObjcMethnameSection(self: *InternalObject, methname: []const u8, macho_file: *MachO) !Symbol.Index {
247 const gpa = macho_file.base.comp.gpa;
248 const atom_index = try self.addAtom(gpa);
249 try self.atoms_indexes.append(gpa, atom_index);
250 const atom = self.getAtom(atom_index).?;
251 atom.size = methname.len + 1;
252 atom.alignment = .@"1";
253
254 const n_sect = try self.addSection(gpa, "__TEXT", "__objc_methname");
255 const sect = &self.sections.items(.header)[n_sect];
256 sect.flags = macho.S_CSTRING_LITERALS;
257 sect.size = atom.size;
258 sect.@"align" = 0;
259 atom.n_sect = n_sect;
260 self.sections.items(.extra)[n_sect].is_objc_methname = true;
261
262 sect.offset = @intCast(self.objc_methnames.items.len);
263 try self.objc_methnames.ensureUnusedCapacity(gpa, methname.len + 1);
264 self.objc_methnames.print(gpa, "{s}\x00", .{methname}) catch unreachable;
265
266 const name_str = try self.addString(gpa, "ltmp");
267 const sym_index = try self.addSymbol(gpa);
268 const sym = &self.symbols.items[sym_index];
269 sym.name = name_str;
270 sym.atom_ref = .{ .index = atom_index, .file = self.index };
271 sym.extra = try self.addSymbolExtra(gpa, .{});
272 const nlist_idx: u32 = @intCast(self.symtab.items.len);
273 const nlist = try self.symtab.addOne(gpa);
274 nlist.* = .{
275 .n_strx = name_str.pos,
276 .n_type = .{ .bits = .{ .ext = false, .type = .sect, .pext = false, .is_stab = 0 } },
277 .n_sect = @intCast(n_sect + 1),
278 .n_desc = @bitCast(@as(u16, 0)),
279 .n_value = 0,
280 };
281 sym.nlist_idx = nlist_idx;
282 try self.globals.append(gpa, 0);
283
284 return sym_index;
285}
286
287fn addObjcSelrefsSection(self: *InternalObject, methname_sym_index: Symbol.Index, macho_file: *MachO) !Symbol.Index {
288 const gpa = macho_file.base.comp.gpa;
289 const atom_index = try self.addAtom(gpa);
290 try self.atoms_indexes.append(gpa, atom_index);
291 const atom = self.getAtom(atom_index).?;
292 atom.size = @sizeOf(u64);
293 atom.alignment = .@"8";
294
295 const n_sect = try self.addSection(gpa, "__DATA", "__objc_selrefs");
296 const sect = &self.sections.items(.header)[n_sect];
297 sect.flags = macho.S_LITERAL_POINTERS | macho.S_ATTR_NO_DEAD_STRIP;
298 sect.offset = 0;
299 sect.size = atom.size;
300 sect.@"align" = 3;
301 atom.n_sect = n_sect;
302 self.sections.items(.extra)[n_sect].is_objc_selref = true;
303
304 const relocs = &self.sections.items(.relocs)[n_sect];
305 try relocs.ensureUnusedCapacity(gpa, 1);
306 relocs.appendAssumeCapacity(.{
307 .tag = .@"extern",
308 .offset = 0,
309 .target = methname_sym_index,
310 .addend = 0,
311 .type = .unsigned,
312 .meta = .{
313 .pcrel = false,
314 .length = 3,
315 .symbolnum = 0, // Only used when synthesising unwind records so can be anything
316 .has_subtractor = false,
317 },
318 });
319 atom.addExtra(.{ .rel_index = 0, .rel_count = 1 }, macho_file);
320
321 const sym_index = try self.addSymbol(gpa);
322 const sym = &self.symbols.items[sym_index];
323 sym.atom_ref = .{ .index = atom_index, .file = self.index };
324 sym.extra = try self.addSymbolExtra(gpa, .{});
325 const nlist_idx: u32 = @intCast(self.symtab.items.len);
326 const nlist = try self.symtab.addOne(gpa);
327 nlist.* = .{
328 .n_strx = 0,
329 .n_type = .{ .bits = .{ .ext = false, .type = .sect, .pext = false, .is_stab = 0 } },
330 .n_sect = @intCast(n_sect + 1),
331 .n_desc = @bitCast(@as(u16, 0)),
332 .n_value = 0,
333 };
334 sym.nlist_idx = nlist_idx;
335 try self.globals.append(gpa, 0);
336 atom.addExtra(.{ .literal_symbol_index = sym_index }, macho_file);
337
338 return sym_index;
339}
340
341pub fn resolveObjcMsgSendSymbols(self: *InternalObject, macho_file: *MachO) !void {
342 const tracy = trace(@src());
343 defer tracy.end();
344
345 const gpa = macho_file.base.comp.gpa;
346
347 var objc_msgsend_syms = std.StringArrayHashMap(MachO.Ref).init(gpa);
348 defer objc_msgsend_syms.deinit();
349
350 for (macho_file.objects.items) |index| {
351 const object = macho_file.getFile(index).?.object;
352
353 for (object.symbols.items, 0..) |sym, i| {
354 const nlist = object.symtab.items(.nlist)[i];
355 if (!nlist.n_type.bits.ext) continue;
356 if (nlist.n_type.bits.type != .undf) continue;
357
358 const ref = object.getSymbolRef(@intCast(i), macho_file);
359 if (ref.getFile(macho_file) != null) continue;
360
361 const name = sym.getName(macho_file);
362 if (mem.startsWith(u8, name, "_objc_msgSend$")) {
363 const gop = try objc_msgsend_syms.getOrPut(name);
364 if (!gop.found_existing) {
365 gop.value_ptr.* = .{ .index = @intCast(i), .file = index };
366 }
367 }
368 }
369 }
370
371 for (objc_msgsend_syms.keys(), objc_msgsend_syms.values()) |sym_name, ref| {
372 const name = MachO.eatPrefix(sym_name, "_objc_msgSend$").?;
373 const selrefs_index = try self.addObjcMsgsendSections(name, macho_file);
374
375 const name_str = try self.addString(gpa, sym_name);
376 const sym_index = try self.addSymbol(gpa);
377 const sym = &self.symbols.items[sym_index];
378 sym.name = name_str;
379 sym.visibility = .hidden;
380 const nlist_idx: u32 = @intCast(self.symtab.items.len);
381 const nlist = try self.symtab.addOne(gpa);
382 nlist.* = .{
383 .n_strx = name_str.pos,
384 .n_type = .{ .bits = .{ .ext = true, .type = .sect, .pext = true, .is_stab = 0 } },
385 .n_sect = 0,
386 .n_desc = @bitCast(@as(u16, 0)),
387 .n_value = 0,
388 };
389 sym.nlist_idx = nlist_idx;
390 sym.extra = try self.addSymbolExtra(gpa, .{ .objc_selrefs = selrefs_index });
391 sym.setSectionFlags(.{ .objc_stubs = true });
392
393 const idx = ref.getFile(macho_file).?.object.globals.items[ref.index];
394 try self.globals.append(gpa, idx);
395 macho_file.resolver.values.items[idx - 1] = .{ .index = sym_index, .file = self.index };
396 }
397}
398
399pub fn resolveLiterals(self: *InternalObject, lp: *MachO.LiteralPool, macho_file: *MachO) !void {
400 const tracy = trace(@src());
401 defer tracy.end();
402
403 const gpa = macho_file.base.comp.gpa;
404
405 var buffer = std.array_list.Managed(u8).init(gpa);
406 defer buffer.deinit();
407
408 const slice = self.sections.slice();
409 for (slice.items(.header), self.getAtoms()) |header, atom_index| {
410 if (!Object.isPtrLiteral(header)) continue;
411 const atom = self.getAtom(atom_index).?;
412 const relocs = atom.getRelocs(macho_file);
413 assert(relocs.len == 1);
414 const rel = relocs[0];
415 assert(rel.tag == .@"extern");
416 const target = rel.getTargetSymbol(atom.*, macho_file).getAtom(macho_file).?;
417 const target_size = try macho_file.cast(usize, target.size);
418 try buffer.ensureUnusedCapacity(target_size);
419 buffer.resize(target_size) catch unreachable;
420 const section_data = try self.getSectionData(target.n_sect, macho_file);
421 @memcpy(buffer.items, section_data);
422 const res = try lp.insert(gpa, header.type(), buffer.items);
423 buffer.clearRetainingCapacity();
424 if (!res.found_existing) {
425 res.ref.* = .{ .index = atom.getExtra(macho_file).literal_symbol_index, .file = self.index };
426 } else {
427 const lp_sym = lp.getSymbol(res.index, macho_file);
428 const lp_atom = lp_sym.getAtom(macho_file).?;
429 lp_atom.alignment = lp_atom.alignment.max(atom.alignment);
430 atom.setAlive(false);
431 }
432 atom.addExtra(.{ .literal_pool_index = res.index }, macho_file);
433 }
434}
435
436pub fn dedupLiterals(self: *InternalObject, lp: MachO.LiteralPool, macho_file: *MachO) void {
437 const tracy = trace(@src());
438 defer tracy.end();
439
440 for (self.getAtoms()) |atom_index| {
441 const atom = self.getAtom(atom_index) orelse continue;
442 if (!atom.isAlive()) continue;
443
444 const relocs = blk: {
445 const extra = atom.getExtra(macho_file);
446 const relocs = self.sections.items(.relocs)[atom.n_sect].items;
447 break :blk relocs[extra.rel_index..][0..extra.rel_count];
448 };
449 for (relocs) |*rel| {
450 if (rel.tag != .@"extern") continue;
451 const target_sym_ref = rel.getTargetSymbolRef(atom.*, macho_file);
452 const file = target_sym_ref.getFile(macho_file) orelse continue;
453 if (file.getIndex() != self.index) continue;
454 const target_sym = target_sym_ref.getSymbol(macho_file).?;
455 const target_atom = target_sym.getAtom(macho_file) orelse continue;
456 if (!Object.isPtrLiteral(target_atom.getInputSection(macho_file))) continue;
457 const lp_index = target_atom.getExtra(macho_file).literal_pool_index;
458 const lp_sym = lp.getSymbol(lp_index, macho_file);
459 const lp_atom_ref = lp_sym.atom_ref;
460 if (target_atom.atom_index != lp_atom_ref.index or target_atom.file != lp_atom_ref.file) {
461 target_sym.atom_ref = lp_atom_ref;
462 }
463 }
464 }
465
466 for (self.symbols.items) |*sym| {
467 if (!sym.getSectionFlags().objc_stubs) continue;
468 const extra = sym.getExtra(macho_file);
469 const file = sym.getFile(macho_file).?;
470 if (file.getIndex() != self.index) continue;
471 const tsym = switch (file) {
472 .dylib => unreachable,
473 inline else => |x| &x.symbols.items[extra.objc_selrefs],
474 };
475 const atom = tsym.getAtom(macho_file) orelse continue;
476 if (!Object.isPtrLiteral(atom.getInputSection(macho_file))) continue;
477 const lp_index = atom.getExtra(macho_file).literal_pool_index;
478 const lp_sym = lp.getSymbol(lp_index, macho_file);
479 const lp_atom_ref = lp_sym.atom_ref;
480 if (atom.atom_index != lp_atom_ref.index or atom.file != lp_atom_ref.file) {
481 tsym.atom_ref = lp_atom_ref;
482 }
483 }
484}
485
486pub fn scanRelocs(self: *InternalObject, macho_file: *MachO) void {
487 const tracy = trace(@src());
488 defer tracy.end();
489
490 if (self.getEntryRef(macho_file)) |ref| {
491 if (ref.getFile(macho_file) != null) {
492 const sym = ref.getSymbol(macho_file).?;
493 if (sym.flags.import) sym.setSectionFlags(.{ .stubs = true });
494 }
495 }
496 if (self.getDyldStubBinderRef(macho_file)) |ref| {
497 if (ref.getFile(macho_file) != null) {
498 const sym = ref.getSymbol(macho_file).?;
499 sym.setSectionFlags(.{ .needs_got = true });
500 }
501 }
502 if (self.getObjcMsgSendRef(macho_file)) |ref| {
503 if (ref.getFile(macho_file) != null) {
504 const sym = ref.getSymbol(macho_file).?;
505 // TODO is it always needed, or only if we are synthesising fast stubs
506 sym.setSectionFlags(.{ .needs_got = true });
507 }
508 }
509}
510
511pub fn checkUndefs(self: InternalObject, macho_file: *MachO) !void {
512 const addUndef = struct {
513 fn addUndef(mf: *MachO, index: MachO.SymbolResolver.Index, tag: anytype) !void {
514 const gpa = mf.base.comp.gpa;
515 mf.undefs_mutex.lock();
516 defer mf.undefs_mutex.unlock();
517 const gop = try mf.undefs.getOrPut(gpa, index);
518 if (!gop.found_existing) {
519 gop.value_ptr.* = tag;
520 }
521 }
522 }.addUndef;
523 for (self.force_undefined.items) |index| {
524 const ref = self.getSymbolRef(index, macho_file);
525 if (ref.getFile(macho_file) == null) {
526 try addUndef(macho_file, self.globals.items[index], .force_undefined);
527 }
528 }
529 if (self.getEntryRef(macho_file)) |ref| {
530 if (ref.getFile(macho_file) == null) {
531 try addUndef(macho_file, self.globals.items[self.entry_index.?], .entry);
532 }
533 }
534 if (self.getDyldStubBinderRef(macho_file)) |ref| {
535 if (ref.getFile(macho_file) == null and macho_file.stubs.symbols.items.len > 0) {
536 try addUndef(macho_file, self.globals.items[self.dyld_stub_binder_index.?], .dyld_stub_binder);
537 }
538 }
539 if (self.getObjcMsgSendRef(macho_file)) |ref| {
540 if (ref.getFile(macho_file) == null and self.needsObjcMsgsendSymbol()) {
541 try addUndef(macho_file, self.globals.items[self.objc_msg_send_index.?], .objc_msgsend);
542 }
543 }
544}
545
546pub fn allocateSyntheticSymbols(self: *InternalObject, macho_file: *MachO) void {
547 const text_seg = macho_file.getTextSegment();
548
549 if (self.mh_execute_header_index) |index| {
550 const ref = self.getSymbolRef(index, macho_file);
551 if (ref.getFile(macho_file)) |file| {
552 if (file.getIndex() == self.index) {
553 const sym = &self.symbols.items[index];
554 sym.value = text_seg.vmaddr;
555 }
556 }
557 }
558
559 if (macho_file.data_sect_index) |idx| {
560 const sect = macho_file.sections.items(.header)[idx];
561 for (&[_]?Symbol.Index{
562 self.dso_handle_index,
563 self.mh_dylib_header_index,
564 self.dyld_private_index,
565 }) |maybe_index| {
566 if (maybe_index) |index| {
567 const ref = self.getSymbolRef(index, macho_file);
568 if (ref.getFile(macho_file)) |file| {
569 if (file.getIndex() == self.index) {
570 const sym = &self.symbols.items[index];
571 sym.value = sect.addr;
572 sym.out_n_sect = idx;
573 }
574 }
575 }
576 }
577 }
578}
579
580pub fn calcSymtabSize(self: *InternalObject, macho_file: *MachO) void {
581 for (self.symbols.items, 0..) |*sym, i| {
582 const ref = self.getSymbolRef(@intCast(i), macho_file);
583 const file = ref.getFile(macho_file) orelse continue;
584 if (file.getIndex() != self.index) continue;
585 if (sym.getName(macho_file).len == 0) continue;
586 if (macho_file.discard_local_symbols and sym.isLocal()) continue;
587 sym.flags.output_symtab = true;
588 if (sym.isLocal()) {
589 sym.addExtra(.{ .symtab = self.output_symtab_ctx.nlocals }, macho_file);
590 self.output_symtab_ctx.nlocals += 1;
591 } else if (sym.flags.@"export") {
592 sym.addExtra(.{ .symtab = self.output_symtab_ctx.nexports }, macho_file);
593 self.output_symtab_ctx.nexports += 1;
594 } else {
595 assert(sym.flags.import);
596 sym.addExtra(.{ .symtab = self.output_symtab_ctx.nimports }, macho_file);
597 self.output_symtab_ctx.nimports += 1;
598 }
599 self.output_symtab_ctx.strsize += @as(u32, @intCast(sym.getName(macho_file).len + 1));
600 }
601}
602
603pub fn writeAtoms(self: *InternalObject, macho_file: *MachO) !void {
604 const tracy = trace(@src());
605 defer tracy.end();
606
607 for (self.getAtoms()) |atom_index| {
608 const atom = self.getAtom(atom_index) orelse continue;
609 if (!atom.isAlive()) continue;
610 const sect = atom.getInputSection(macho_file);
611 if (sect.isZerofill()) continue;
612 const off = try macho_file.cast(usize, atom.value);
613 const size = try macho_file.cast(usize, atom.size);
614 const buffer = macho_file.sections.items(.out)[atom.out_n_sect].items[off..][0..size];
615 const section_data = try self.getSectionData(atom.n_sect, macho_file);
616 @memcpy(buffer, section_data);
617 try atom.resolveRelocs(macho_file, buffer);
618 }
619}
620
621pub fn writeSymtab(self: InternalObject, macho_file: *MachO, ctx: anytype) void {
622 var n_strx = self.output_symtab_ctx.stroff;
623 for (self.symbols.items, 0..) |sym, i| {
624 const ref = self.getSymbolRef(@intCast(i), macho_file);
625 const file = ref.getFile(macho_file) orelse continue;
626 if (file.getIndex() != self.index) continue;
627 const idx = sym.getOutputSymtabIndex(macho_file) orelse continue;
628 const out_sym = &ctx.symtab.items[idx];
629 out_sym.n_strx = n_strx;
630 sym.setOutputSym(macho_file, out_sym);
631 const name = sym.getName(macho_file);
632 @memcpy(ctx.strtab.items[n_strx..][0..name.len], name);
633 n_strx += @intCast(name.len);
634 ctx.strtab.items[n_strx] = 0;
635 n_strx += 1;
636 }
637}
638
639fn addSection(self: *InternalObject, allocator: Allocator, segname: []const u8, sectname: []const u8) !u32 {
640 const n_sect = @as(u32, @intCast(try self.sections.addOne(allocator)));
641 self.sections.set(n_sect, .{
642 .header = .{
643 .sectname = MachO.makeStaticString(sectname),
644 .segname = MachO.makeStaticString(segname),
645 },
646 });
647 return n_sect;
648}
649
650fn getSectionData(self: *const InternalObject, index: u32, macho_file: *MachO) error{LinkFailure}![]const u8 {
651 const slice = self.sections.slice();
652 assert(index < slice.items(.header).len);
653 const sect = slice.items(.header)[index];
654 const extra = slice.items(.extra)[index];
655 if (extra.is_objc_methname) {
656 const size = try macho_file.cast(usize, sect.size);
657 return self.objc_methnames.items[sect.offset..][0..size];
658 } else if (extra.is_objc_selref)
659 return &self.objc_selrefs
660 else
661 @panic("ref to non-existent section");
662}
663
664pub fn addString(self: *InternalObject, allocator: Allocator, string: []const u8) !MachO.String {
665 const off: u32 = @intCast(self.strtab.items.len);
666 try self.strtab.ensureUnusedCapacity(allocator, string.len + 1);
667 self.strtab.appendSliceAssumeCapacity(string);
668 self.strtab.appendAssumeCapacity(0);
669 return .{ .pos = off, .len = @intCast(string.len + 1) };
670}
671
672pub fn getString(self: InternalObject, string: MachO.String) [:0]const u8 {
673 assert(string.pos < self.strtab.items.len and string.pos + string.len <= self.strtab.items.len);
674 if (string.len == 0) return "";
675 return self.strtab.items[string.pos..][0 .. string.len - 1 :0];
676}
677
678pub fn asFile(self: *InternalObject) File {
679 return .{ .internal = self };
680}
681
682pub fn getAtomRelocs(self: *const InternalObject, atom: Atom, macho_file: *MachO) []const Relocation {
683 const extra = atom.getExtra(macho_file);
684 const relocs = self.sections.items(.relocs)[atom.n_sect];
685 return relocs.items[extra.rel_index..][0..extra.rel_count];
686}
687
688fn addAtom(self: *InternalObject, allocator: Allocator) !Atom.Index {
689 const atom_index: Atom.Index = @intCast(self.atoms.items.len);
690 const atom = try self.atoms.addOne(allocator);
691 atom.* = .{
692 .file = self.index,
693 .atom_index = atom_index,
694 .extra = try self.addAtomExtra(allocator, .{}),
695 };
696 return atom_index;
697}
698
699pub fn getAtom(self: *InternalObject, atom_index: Atom.Index) ?*Atom {
700 if (atom_index == 0) return null;
701 assert(atom_index < self.atoms.items.len);
702 return &self.atoms.items[atom_index];
703}
704
705pub fn getAtoms(self: InternalObject) []const Atom.Index {
706 return self.atoms_indexes.items;
707}
708
709fn addAtomExtra(self: *InternalObject, allocator: Allocator, extra: Atom.Extra) !u32 {
710 const fields = @typeInfo(Atom.Extra).@"struct".fields;
711 try self.atoms_extra.ensureUnusedCapacity(allocator, fields.len);
712 return self.addAtomExtraAssumeCapacity(extra);
713}
714
715fn addAtomExtraAssumeCapacity(self: *InternalObject, extra: Atom.Extra) u32 {
716 const index = @as(u32, @intCast(self.atoms_extra.items.len));
717 const fields = @typeInfo(Atom.Extra).@"struct".fields;
718 inline for (fields) |field| {
719 self.atoms_extra.appendAssumeCapacity(switch (field.type) {
720 u32 => @field(extra, field.name),
721 else => @compileError("bad field type"),
722 });
723 }
724 return index;
725}
726
727pub fn getAtomExtra(self: InternalObject, index: u32) Atom.Extra {
728 const fields = @typeInfo(Atom.Extra).@"struct".fields;
729 var i: usize = index;
730 var result: Atom.Extra = undefined;
731 inline for (fields) |field| {
732 @field(result, field.name) = switch (field.type) {
733 u32 => self.atoms_extra.items[i],
734 else => @compileError("bad field type"),
735 };
736 i += 1;
737 }
738 return result;
739}
740
741pub fn setAtomExtra(self: *InternalObject, index: u32, extra: Atom.Extra) void {
742 assert(index > 0);
743 const fields = @typeInfo(Atom.Extra).@"struct".fields;
744 inline for (fields, 0..) |field, i| {
745 self.atoms_extra.items[index + i] = switch (field.type) {
746 u32 => @field(extra, field.name),
747 else => @compileError("bad field type"),
748 };
749 }
750}
751
752pub fn getEntryRef(self: InternalObject, macho_file: *MachO) ?MachO.Ref {
753 const index = self.entry_index orelse return null;
754 return self.getSymbolRef(index, macho_file);
755}
756
757pub fn getDyldStubBinderRef(self: InternalObject, macho_file: *MachO) ?MachO.Ref {
758 const index = self.dyld_stub_binder_index orelse return null;
759 return self.getSymbolRef(index, macho_file);
760}
761
762pub fn getDyldPrivateRef(self: InternalObject, macho_file: *MachO) ?MachO.Ref {
763 const index = self.dyld_private_index orelse return null;
764 return self.getSymbolRef(index, macho_file);
765}
766
767pub fn getObjcMsgSendRef(self: InternalObject, macho_file: *MachO) ?MachO.Ref {
768 const index = self.objc_msg_send_index orelse return null;
769 return self.getSymbolRef(index, macho_file);
770}
771
772pub fn addSymbol(self: *InternalObject, allocator: Allocator) !Symbol.Index {
773 try self.symbols.ensureUnusedCapacity(allocator, 1);
774 return self.addSymbolAssumeCapacity();
775}
776
777pub fn addSymbolAssumeCapacity(self: *InternalObject) Symbol.Index {
778 const index: Symbol.Index = @intCast(self.symbols.items.len);
779 const symbol = self.symbols.addOneAssumeCapacity();
780 symbol.* = .{ .file = self.index };
781 return index;
782}
783
784pub fn getSymbolRef(self: InternalObject, index: Symbol.Index, macho_file: *MachO) MachO.Ref {
785 const global_index = self.globals.items[index];
786 if (macho_file.resolver.get(global_index)) |ref| return ref;
787 return .{ .index = index, .file = self.index };
788}
789
790pub fn addSymbolExtra(self: *InternalObject, allocator: Allocator, extra: Symbol.Extra) !u32 {
791 const fields = @typeInfo(Symbol.Extra).@"struct".fields;
792 try self.symbols_extra.ensureUnusedCapacity(allocator, fields.len);
793 return self.addSymbolExtraAssumeCapacity(extra);
794}
795
796fn addSymbolExtraAssumeCapacity(self: *InternalObject, extra: Symbol.Extra) u32 {
797 const index = @as(u32, @intCast(self.symbols_extra.items.len));
798 const fields = @typeInfo(Symbol.Extra).@"struct".fields;
799 inline for (fields) |field| {
800 self.symbols_extra.appendAssumeCapacity(switch (field.type) {
801 u32 => @field(extra, field.name),
802 else => @compileError("bad field type"),
803 });
804 }
805 return index;
806}
807
808pub fn getSymbolExtra(self: InternalObject, index: u32) Symbol.Extra {
809 const fields = @typeInfo(Symbol.Extra).@"struct".fields;
810 var i: usize = index;
811 var result: Symbol.Extra = undefined;
812 inline for (fields) |field| {
813 @field(result, field.name) = switch (field.type) {
814 u32 => self.symbols_extra.items[i],
815 else => @compileError("bad field type"),
816 };
817 i += 1;
818 }
819 return result;
820}
821
822pub fn setSymbolExtra(self: *InternalObject, index: u32, extra: Symbol.Extra) void {
823 const fields = @typeInfo(Symbol.Extra).@"struct".fields;
824 inline for (fields, 0..) |field, i| {
825 self.symbols_extra.items[index + i] = switch (field.type) {
826 u32 => @field(extra, field.name),
827 else => @compileError("bad field type"),
828 };
829 }
830}
831
832fn needsObjcMsgsendSymbol(self: InternalObject) bool {
833 for (self.sections.items(.extra)) |extra| {
834 if (extra.is_objc_methname or extra.is_objc_selref) return true;
835 }
836 return false;
837}
838
839const Format = struct {
840 self: *InternalObject,
841 macho_file: *MachO,
842
843 fn atoms(f: Format, w: *Writer) Writer.Error!void {
844 try w.writeAll(" atoms\n");
845 for (f.self.getAtoms()) |atom_index| {
846 const atom = f.self.getAtom(atom_index) orelse continue;
847 try w.print(" {f}\n", .{atom.fmt(f.macho_file)});
848 }
849 }
850
851 fn symtab(f: Format, w: *Writer) Writer.Error!void {
852 const macho_file = f.macho_file;
853 const self = f.self;
854 try w.writeAll(" symbols\n");
855 for (self.symbols.items, 0..) |sym, i| {
856 const ref = self.getSymbolRef(@intCast(i), macho_file);
857 if (ref.getFile(macho_file) == null) {
858 // TODO any better way of handling this?
859 try w.print(" {s} : unclaimed\n", .{sym.getName(macho_file)});
860 } else {
861 try w.print(" {f}\n", .{ref.getSymbol(macho_file).?.fmt(macho_file)});
862 }
863 }
864 }
865};
866
867pub fn fmtAtoms(self: *InternalObject, macho_file: *MachO) std.fmt.Alt(Format, Format.atoms) {
868 return .{ .data = .{
869 .self = self,
870 .macho_file = macho_file,
871 } };
872}
873
874pub fn fmtSymtab(self: *InternalObject, macho_file: *MachO) std.fmt.Alt(Format, Format.symtab) {
875 return .{ .data = .{
876 .self = self,
877 .macho_file = macho_file,
878 } };
879}
880
881const Section = struct {
882 header: macho.section_64,
883 relocs: std.ArrayList(Relocation) = .empty,
884 extra: Extra = .{},
885
886 const Extra = packed struct {
887 is_objc_methname: bool = false,
888 is_objc_selref: bool = false,
889 };
890};
891
892const assert = std.debug.assert;
893const macho = std.macho;
894const mem = std.mem;
895const std = @import("std");
896const trace = @import("../../tracy.zig").trace;
897const Writer = std.Io.Writer;
898
899const Allocator = std.mem.Allocator;
900const Atom = @import("Atom.zig");
901const File = @import("file.zig").File;
902const InternalObject = @This();
903const MachO = @import("../MachO.zig");
904const Object = @import("Object.zig");
905const Relocation = @import("Relocation.zig");
906const Symbol = @import("Symbol.zig");