master
1pub const Entry = struct {
2 target: MachO.Ref,
3 offset: u64,
4 segment_id: u8,
5 addend: i64,
6
7 pub fn lessThan(ctx: *MachO, entry: Entry, other: Entry) bool {
8 _ = ctx;
9 if (entry.segment_id == other.segment_id) {
10 if (entry.target.eql(other.target)) {
11 return entry.offset < other.offset;
12 }
13 return entry.target.lessThan(other.target);
14 }
15 return entry.segment_id < other.segment_id;
16 }
17};
18
19pub const Bind = struct {
20 entries: std.ArrayList(Entry) = .empty,
21 buffer: std.ArrayList(u8) = .empty,
22
23 const Self = @This();
24
25 pub fn deinit(self: *Self, gpa: Allocator) void {
26 self.entries.deinit(gpa);
27 self.buffer.deinit(gpa);
28 }
29
30 pub fn updateSize(self: *Self, macho_file: *MachO) !void {
31 const tracy = trace(@src());
32 defer tracy.end();
33
34 const gpa = macho_file.base.comp.gpa;
35 const cpu_arch = macho_file.getTarget().cpu.arch;
36
37 var objects = try std.array_list.Managed(File.Index).initCapacity(gpa, macho_file.objects.items.len + 2);
38 defer objects.deinit();
39 objects.appendSliceAssumeCapacity(macho_file.objects.items);
40 if (macho_file.getZigObject()) |obj| objects.appendAssumeCapacity(obj.index);
41 if (macho_file.getInternalObject()) |obj| objects.appendAssumeCapacity(obj.index);
42
43 for (objects.items) |index| {
44 const file = macho_file.getFile(index).?;
45 for (file.getAtoms()) |atom_index| {
46 const atom = file.getAtom(atom_index) orelse continue;
47 if (!atom.isAlive()) continue;
48 if (atom.getInputSection(macho_file).isZerofill()) continue;
49 const atom_addr = atom.getAddress(macho_file);
50 const relocs = atom.getRelocs(macho_file);
51 const seg_id = macho_file.sections.items(.segment_id)[atom.out_n_sect];
52 const seg = macho_file.segments.items[seg_id];
53 for (relocs) |rel| {
54 if (rel.type != .unsigned or rel.meta.length != 3 or rel.tag != .@"extern") continue;
55 const rel_offset = rel.offset - atom.off;
56 const addend = rel.addend + rel.getRelocAddend(cpu_arch);
57 const sym = rel.getTargetSymbol(atom.*, macho_file);
58 if (sym.isTlvInit(macho_file)) continue;
59 const entry = Entry{
60 .target = rel.getTargetSymbolRef(atom.*, macho_file),
61 .offset = atom_addr + rel_offset - seg.vmaddr,
62 .segment_id = seg_id,
63 .addend = addend,
64 };
65 if (sym.flags.import or (!(sym.flags.@"export" and sym.flags.weak) and sym.flags.interposable)) {
66 try self.entries.append(gpa, entry);
67 }
68 }
69 }
70 }
71
72 if (macho_file.got_sect_index) |sid| {
73 const seg_id = macho_file.sections.items(.segment_id)[sid];
74 const seg = macho_file.segments.items[seg_id];
75 for (macho_file.got.symbols.items, 0..) |ref, idx| {
76 const sym = ref.getSymbol(macho_file).?;
77 const addr = macho_file.got.getAddress(@intCast(idx), macho_file);
78 const entry = Entry{
79 .target = ref,
80 .offset = addr - seg.vmaddr,
81 .segment_id = seg_id,
82 .addend = 0,
83 };
84 if (sym.flags.import or (sym.flags.@"export" and sym.flags.interposable and !sym.flags.weak)) {
85 try self.entries.append(gpa, entry);
86 }
87 }
88 }
89
90 if (macho_file.la_symbol_ptr_sect_index) |sid| {
91 const sect = macho_file.sections.items(.header)[sid];
92 const seg_id = macho_file.sections.items(.segment_id)[sid];
93 const seg = macho_file.segments.items[seg_id];
94 for (macho_file.stubs.symbols.items, 0..) |ref, idx| {
95 const sym = ref.getSymbol(macho_file).?;
96 const addr = sect.addr + idx * @sizeOf(u64);
97 const bind_entry = Entry{
98 .target = ref,
99 .offset = addr - seg.vmaddr,
100 .segment_id = seg_id,
101 .addend = 0,
102 };
103 if (sym.flags.import and sym.flags.weak) {
104 try self.entries.append(gpa, bind_entry);
105 }
106 }
107 }
108
109 if (macho_file.tlv_ptr_sect_index) |sid| {
110 const seg_id = macho_file.sections.items(.segment_id)[sid];
111 const seg = macho_file.segments.items[seg_id];
112
113 for (macho_file.tlv_ptr.symbols.items, 0..) |ref, idx| {
114 const sym = ref.getSymbol(macho_file).?;
115 const addr = macho_file.tlv_ptr.getAddress(@intCast(idx), macho_file);
116 const entry = Entry{
117 .target = ref,
118 .offset = addr - seg.vmaddr,
119 .segment_id = seg_id,
120 .addend = 0,
121 };
122 if (sym.flags.import or (sym.flags.@"export" and sym.flags.interposable and !sym.flags.weak)) {
123 try self.entries.append(gpa, entry);
124 }
125 }
126 }
127
128 try self.finalize(gpa, macho_file);
129 macho_file.dyld_info_cmd.bind_size = mem.alignForward(u32, @intCast(self.buffer.items.len), @alignOf(u64));
130 }
131
132 fn finalize(self: *Self, gpa: Allocator, ctx: *MachO) !void {
133 if (self.entries.items.len == 0) return;
134
135 log.debug("bind opcodes", .{});
136
137 std.mem.sort(Entry, self.entries.items, ctx, Entry.lessThan);
138
139 var allocating: std.Io.Writer.Allocating = .fromArrayList(gpa, &self.buffer);
140 defer self.buffer = allocating.toArrayList();
141 const writer = &allocating.writer;
142
143 var start: usize = 0;
144 var seg_id: ?u8 = null;
145 for (self.entries.items, 0..) |entry, i| {
146 if (seg_id != null and seg_id.? == entry.segment_id) continue;
147 try finalizeSegment(self.entries.items[start..i], ctx, writer);
148 seg_id = entry.segment_id;
149 start = i;
150 }
151
152 try finalizeSegment(self.entries.items[start..], ctx, writer);
153 try done(writer);
154 }
155
156 fn finalizeSegment(entries: []const Entry, ctx: *MachO, writer: *std.Io.Writer) !void {
157 if (entries.len == 0) return;
158
159 const seg_id = entries[0].segment_id;
160 try setSegmentOffset(seg_id, 0, writer);
161
162 var offset: u64 = 0;
163 var addend: i64 = 0;
164 var count: usize = 0;
165 var skip: u64 = 0;
166 var target: ?MachO.Ref = null;
167
168 var state: enum {
169 start,
170 bind_single,
171 bind_times_skip,
172 } = .start;
173
174 var i: usize = 0;
175 while (i < entries.len) : (i += 1) {
176 const current = entries[i];
177 if (target == null or !target.?.eql(current.target)) {
178 switch (state) {
179 .start => {},
180 .bind_single => try doBind(writer),
181 .bind_times_skip => try doBindTimesSkip(count, skip, writer),
182 }
183 state = .start;
184 target = current.target;
185
186 const sym = current.target.getSymbol(ctx).?;
187 const name = sym.getName(ctx);
188 const flags: u8 = if (sym.weakRef(ctx)) macho.BIND_SYMBOL_FLAGS_WEAK_IMPORT else 0;
189 const ordinal: i16 = ord: {
190 if (sym.flags.interposable) break :ord macho.BIND_SPECIAL_DYLIB_FLAT_LOOKUP;
191 if (sym.flags.import) {
192 // TODO: if (ctx.options.namespace == .flat) break :ord macho.BIND_SPECIAL_DYLIB_FLAT_LOOKUP;
193 if (sym.getDylibOrdinal(ctx)) |ord| break :ord @bitCast(ord);
194 }
195 if (ctx.undefined_treatment == .dynamic_lookup)
196 break :ord macho.BIND_SPECIAL_DYLIB_FLAT_LOOKUP;
197 break :ord macho.BIND_SPECIAL_DYLIB_SELF;
198 };
199
200 try setSymbol(name, flags, writer);
201 try setTypePointer(writer);
202 try setDylibOrdinal(ordinal, writer);
203
204 if (current.addend != addend) {
205 addend = current.addend;
206 try setAddend(addend, writer);
207 }
208 }
209
210 log.debug("{x}, {d}, {x}, {x}, {s}", .{ offset, count, skip, addend, @tagName(state) });
211 log.debug(" => {x}", .{current.offset});
212 switch (state) {
213 .start => {
214 if (current.offset < offset) {
215 try addAddr(@bitCast(@as(i64, @intCast(current.offset)) - @as(i64, @intCast(offset))), writer);
216 offset = offset - (offset - current.offset);
217 } else if (current.offset > offset) {
218 const delta = current.offset - offset;
219 try addAddr(delta, writer);
220 offset += delta;
221 }
222 state = .bind_single;
223 offset += @sizeOf(u64);
224 count = 1;
225 },
226 .bind_single => {
227 if (current.offset == offset) {
228 try doBind(writer);
229 state = .start;
230 } else if (current.offset > offset) {
231 const delta = current.offset - offset;
232 state = .bind_times_skip;
233 skip = @as(u64, @intCast(delta));
234 offset += skip;
235 } else unreachable;
236 i -= 1;
237 },
238 .bind_times_skip => {
239 if (current.offset < offset) {
240 count -= 1;
241 if (count == 1) {
242 try doBindAddAddr(skip, writer);
243 } else {
244 try doBindTimesSkip(count, skip, writer);
245 }
246 state = .start;
247 offset = offset - (@sizeOf(u64) + skip);
248 i -= 2;
249 } else if (current.offset == offset) {
250 count += 1;
251 offset += @sizeOf(u64) + skip;
252 } else {
253 try doBindTimesSkip(count, skip, writer);
254 state = .start;
255 i -= 1;
256 }
257 },
258 }
259 }
260
261 switch (state) {
262 .start => unreachable,
263 .bind_single => try doBind(writer),
264 .bind_times_skip => try doBindTimesSkip(count, skip, writer),
265 }
266 }
267
268 pub fn write(self: Self, writer: *std.Io.Writer) !void {
269 try writer.writeAll(self.buffer.items);
270 }
271};
272
273pub const WeakBind = struct {
274 entries: std.ArrayList(Entry) = .empty,
275 buffer: std.ArrayList(u8) = .empty,
276
277 const Self = @This();
278
279 pub fn deinit(self: *Self, gpa: Allocator) void {
280 self.entries.deinit(gpa);
281 self.buffer.deinit(gpa);
282 }
283
284 pub fn updateSize(self: *Self, macho_file: *MachO) !void {
285 const tracy = trace(@src());
286 defer tracy.end();
287
288 const gpa = macho_file.base.comp.gpa;
289 const cpu_arch = macho_file.getTarget().cpu.arch;
290
291 var objects = try std.array_list.Managed(File.Index).initCapacity(gpa, macho_file.objects.items.len + 2);
292 defer objects.deinit();
293 objects.appendSliceAssumeCapacity(macho_file.objects.items);
294 if (macho_file.getZigObject()) |obj| objects.appendAssumeCapacity(obj.index);
295 if (macho_file.getInternalObject()) |obj| objects.appendAssumeCapacity(obj.index);
296
297 for (objects.items) |index| {
298 const file = macho_file.getFile(index).?;
299 for (file.getAtoms()) |atom_index| {
300 const atom = file.getAtom(atom_index) orelse continue;
301 if (!atom.isAlive()) continue;
302 if (atom.getInputSection(macho_file).isZerofill()) continue;
303 const atom_addr = atom.getAddress(macho_file);
304 const relocs = atom.getRelocs(macho_file);
305 const seg_id = macho_file.sections.items(.segment_id)[atom.out_n_sect];
306 const seg = macho_file.segments.items[seg_id];
307 for (relocs) |rel| {
308 if (rel.type != .unsigned or rel.meta.length != 3 or rel.tag != .@"extern") continue;
309 const rel_offset = rel.offset - atom.off;
310 const addend = rel.addend + rel.getRelocAddend(cpu_arch);
311 const sym = rel.getTargetSymbol(atom.*, macho_file);
312 if (sym.isTlvInit(macho_file)) continue;
313 const entry = Entry{
314 .target = rel.getTargetSymbolRef(atom.*, macho_file),
315 .offset = atom_addr + rel_offset - seg.vmaddr,
316 .segment_id = seg_id,
317 .addend = addend,
318 };
319 if (!sym.isLocal() and sym.flags.weak) {
320 try self.entries.append(gpa, entry);
321 }
322 }
323 }
324 }
325
326 if (macho_file.got_sect_index) |sid| {
327 const seg_id = macho_file.sections.items(.segment_id)[sid];
328 const seg = macho_file.segments.items[seg_id];
329 for (macho_file.got.symbols.items, 0..) |ref, idx| {
330 const sym = ref.getSymbol(macho_file).?;
331 const addr = macho_file.got.getAddress(@intCast(idx), macho_file);
332 const entry = Entry{
333 .target = ref,
334 .offset = addr - seg.vmaddr,
335 .segment_id = seg_id,
336 .addend = 0,
337 };
338 if (sym.flags.weak) {
339 try self.entries.append(gpa, entry);
340 }
341 }
342 }
343
344 if (macho_file.la_symbol_ptr_sect_index) |sid| {
345 const sect = macho_file.sections.items(.header)[sid];
346 const seg_id = macho_file.sections.items(.segment_id)[sid];
347 const seg = macho_file.segments.items[seg_id];
348
349 for (macho_file.stubs.symbols.items, 0..) |ref, idx| {
350 const sym = ref.getSymbol(macho_file).?;
351 const addr = sect.addr + idx * @sizeOf(u64);
352 const bind_entry = Entry{
353 .target = ref,
354 .offset = addr - seg.vmaddr,
355 .segment_id = seg_id,
356 .addend = 0,
357 };
358 if (sym.flags.weak) {
359 try self.entries.append(gpa, bind_entry);
360 }
361 }
362 }
363
364 if (macho_file.tlv_ptr_sect_index) |sid| {
365 const seg_id = macho_file.sections.items(.segment_id)[sid];
366 const seg = macho_file.segments.items[seg_id];
367
368 for (macho_file.tlv_ptr.symbols.items, 0..) |ref, idx| {
369 const sym = ref.getSymbol(macho_file).?;
370 const addr = macho_file.tlv_ptr.getAddress(@intCast(idx), macho_file);
371 const entry = Entry{
372 .target = ref,
373 .offset = addr - seg.vmaddr,
374 .segment_id = seg_id,
375 .addend = 0,
376 };
377 if (sym.flags.weak) {
378 try self.entries.append(gpa, entry);
379 }
380 }
381 }
382
383 try self.finalize(gpa, macho_file);
384 macho_file.dyld_info_cmd.weak_bind_size = mem.alignForward(u32, @intCast(self.buffer.items.len), @alignOf(u64));
385 }
386
387 fn finalize(self: *Self, gpa: Allocator, ctx: *MachO) !void {
388 if (self.entries.items.len == 0) return;
389
390 log.debug("weak bind opcodes", .{});
391
392 std.mem.sort(Entry, self.entries.items, ctx, Entry.lessThan);
393
394 var allocating: std.Io.Writer.Allocating = .fromArrayList(gpa, &self.buffer);
395 defer self.buffer = allocating.toArrayList();
396 const writer = &allocating.writer;
397
398 var start: usize = 0;
399 var seg_id: ?u8 = null;
400 for (self.entries.items, 0..) |entry, i| {
401 if (seg_id != null and seg_id.? == entry.segment_id) continue;
402 try finalizeSegment(self.entries.items[start..i], ctx, writer);
403 seg_id = entry.segment_id;
404 start = i;
405 }
406
407 try finalizeSegment(self.entries.items[start..], ctx, writer);
408 try done(writer);
409 }
410
411 fn finalizeSegment(entries: []const Entry, ctx: *MachO, writer: *std.Io.Writer) !void {
412 if (entries.len == 0) return;
413
414 const seg_id = entries[0].segment_id;
415 try setSegmentOffset(seg_id, 0, writer);
416
417 var offset: u64 = 0;
418 var addend: i64 = 0;
419 var count: usize = 0;
420 var skip: u64 = 0;
421 var target: ?MachO.Ref = null;
422
423 var state: enum {
424 start,
425 bind_single,
426 bind_times_skip,
427 } = .start;
428
429 var i: usize = 0;
430 while (i < entries.len) : (i += 1) {
431 const current = entries[i];
432 if (target == null or !target.?.eql(current.target)) {
433 switch (state) {
434 .start => {},
435 .bind_single => try doBind(writer),
436 .bind_times_skip => try doBindTimesSkip(count, skip, writer),
437 }
438 state = .start;
439 target = current.target;
440
441 const sym = current.target.getSymbol(ctx).?;
442 const name = sym.getName(ctx);
443 const flags: u8 = 0; // TODO NON_WEAK_DEFINITION
444
445 try setSymbol(name, flags, writer);
446 try setTypePointer(writer);
447
448 if (current.addend != addend) {
449 addend = current.addend;
450 try setAddend(addend, writer);
451 }
452 }
453
454 log.debug("{x}, {d}, {x}, {x}, {s}", .{ offset, count, skip, addend, @tagName(state) });
455 log.debug(" => {x}", .{current.offset});
456 switch (state) {
457 .start => {
458 if (current.offset < offset) {
459 try addAddr(@as(u64, @bitCast(@as(i64, @intCast(current.offset)) - @as(i64, @intCast(offset)))), writer);
460 offset = offset - (offset - current.offset);
461 } else if (current.offset > offset) {
462 const delta = current.offset - offset;
463 try addAddr(delta, writer);
464 offset += delta;
465 }
466 state = .bind_single;
467 offset += @sizeOf(u64);
468 count = 1;
469 },
470 .bind_single => {
471 if (current.offset == offset) {
472 try doBind(writer);
473 state = .start;
474 } else if (current.offset > offset) {
475 const delta = current.offset - offset;
476 state = .bind_times_skip;
477 skip = @intCast(delta);
478 offset += skip;
479 } else unreachable;
480 i -= 1;
481 },
482 .bind_times_skip => {
483 if (current.offset < offset) {
484 count -= 1;
485 if (count == 1) {
486 try doBindAddAddr(skip, writer);
487 } else {
488 try doBindTimesSkip(count, skip, writer);
489 }
490 state = .start;
491 offset = offset - (@sizeOf(u64) + skip);
492 i -= 2;
493 } else if (current.offset == offset) {
494 count += 1;
495 offset += @sizeOf(u64) + skip;
496 } else {
497 try doBindTimesSkip(count, skip, writer);
498 state = .start;
499 i -= 1;
500 }
501 },
502 }
503 }
504
505 switch (state) {
506 .start => unreachable,
507 .bind_single => try doBind(writer),
508 .bind_times_skip => try doBindTimesSkip(count, skip, writer),
509 }
510 }
511
512 pub fn write(self: Self, writer: *std.Io.Writer) !void {
513 try writer.writeAll(self.buffer.items);
514 }
515};
516
517pub const LazyBind = struct {
518 entries: std.ArrayList(Entry) = .empty,
519 buffer: std.ArrayList(u8) = .empty,
520 offsets: std.ArrayList(u32) = .empty,
521
522 const Self = @This();
523
524 pub fn deinit(self: *Self, gpa: Allocator) void {
525 self.entries.deinit(gpa);
526 self.buffer.deinit(gpa);
527 self.offsets.deinit(gpa);
528 }
529
530 pub fn updateSize(self: *Self, macho_file: *MachO) !void {
531 const tracy = trace(@src());
532 defer tracy.end();
533
534 const gpa = macho_file.base.comp.gpa;
535
536 const sid = macho_file.la_symbol_ptr_sect_index.?;
537 const sect = macho_file.sections.items(.header)[sid];
538 const seg_id = macho_file.sections.items(.segment_id)[sid];
539 const seg = macho_file.segments.items[seg_id];
540
541 for (macho_file.stubs.symbols.items, 0..) |ref, idx| {
542 const sym = ref.getSymbol(macho_file).?;
543 const addr = sect.addr + idx * @sizeOf(u64);
544 const bind_entry = Entry{
545 .target = ref,
546 .offset = addr - seg.vmaddr,
547 .segment_id = seg_id,
548 .addend = 0,
549 };
550 if ((sym.flags.import and !sym.flags.weak) or (sym.flags.interposable and !sym.flags.weak)) {
551 try self.entries.append(gpa, bind_entry);
552 }
553 }
554
555 try self.finalize(gpa, macho_file);
556 macho_file.dyld_info_cmd.lazy_bind_size = mem.alignForward(u32, @intCast(self.buffer.items.len), @alignOf(u64));
557 }
558
559 fn finalize(self: *Self, gpa: Allocator, ctx: *MachO) !void {
560 try self.offsets.ensureTotalCapacityPrecise(gpa, self.entries.items.len);
561
562 log.debug("lazy bind opcodes", .{});
563
564 var addend: i64 = 0;
565
566 for (self.entries.items) |entry| {
567 self.offsets.appendAssumeCapacity(@intCast(self.buffer.items.len));
568
569 const sym = entry.target.getSymbol(ctx).?;
570 const name = sym.getName(ctx);
571 const flags: u8 = if (sym.weakRef(ctx)) macho.BIND_SYMBOL_FLAGS_WEAK_IMPORT else 0;
572 const ordinal: i16 = ord: {
573 if (sym.flags.interposable) break :ord macho.BIND_SPECIAL_DYLIB_FLAT_LOOKUP;
574 if (sym.flags.import) {
575 // TODO: if (ctx.options.namespace == .flat) break :ord macho.BIND_SPECIAL_DYLIB_FLAT_LOOKUP;
576 if (sym.getDylibOrdinal(ctx)) |ord| break :ord @bitCast(ord);
577 }
578 if (ctx.undefined_treatment == .dynamic_lookup)
579 break :ord macho.BIND_SPECIAL_DYLIB_FLAT_LOOKUP;
580 break :ord macho.BIND_SPECIAL_DYLIB_SELF;
581 };
582
583 var allocating: std.Io.Writer.Allocating = .fromArrayList(gpa, &self.buffer);
584 defer self.buffer = allocating.toArrayList();
585 const writer = &allocating.writer;
586 try setSegmentOffset(entry.segment_id, entry.offset, writer);
587 try setSymbol(name, flags, writer);
588 try setDylibOrdinal(ordinal, writer);
589
590 if (entry.addend != addend) {
591 try setAddend(entry.addend, writer);
592 addend = entry.addend;
593 }
594
595 try doBind(writer);
596 try done(writer);
597 }
598 }
599
600 pub fn write(self: Self, writer: *std.Io.Writer) !void {
601 try writer.writeAll(self.buffer.items);
602 }
603};
604
605fn setSegmentOffset(segment_id: u8, offset: u64, writer: *std.Io.Writer) !void {
606 log.debug(">>> set segment: {d} and offset: {x}", .{ segment_id, offset });
607 try writer.writeByte(macho.BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB | @as(u4, @truncate(segment_id)));
608 try writer.writeUleb128(offset);
609}
610
611fn setSymbol(name: []const u8, flags: u8, writer: *std.Io.Writer) !void {
612 log.debug(">>> set symbol: {s} with flags: {x}", .{ name, flags });
613 try writer.writeByte(macho.BIND_OPCODE_SET_SYMBOL_TRAILING_FLAGS_IMM | @as(u4, @truncate(flags)));
614 try writer.writeAll(name);
615 try writer.writeByte(0);
616}
617
618fn setTypePointer(writer: *std.Io.Writer) !void {
619 log.debug(">>> set type: {d}", .{macho.BIND_TYPE_POINTER});
620 try writer.writeByte(macho.BIND_OPCODE_SET_TYPE_IMM | @as(u4, @truncate(macho.BIND_TYPE_POINTER)));
621}
622
623fn setDylibOrdinal(ordinal: i16, writer: *std.Io.Writer) !void {
624 if (ordinal <= 0) {
625 switch (ordinal) {
626 macho.BIND_SPECIAL_DYLIB_SELF,
627 macho.BIND_SPECIAL_DYLIB_MAIN_EXECUTABLE,
628 macho.BIND_SPECIAL_DYLIB_FLAT_LOOKUP,
629 => {},
630 else => unreachable, // Invalid dylib special binding
631 }
632 log.debug(">>> set dylib special: {d}", .{ordinal});
633 const cast = @as(u16, @bitCast(ordinal));
634 try writer.writeByte(macho.BIND_OPCODE_SET_DYLIB_SPECIAL_IMM | @as(u4, @truncate(cast)));
635 } else {
636 const cast = @as(u16, @bitCast(ordinal));
637 log.debug(">>> set dylib ordinal: {d}", .{ordinal});
638 if (cast <= 0xf) {
639 try writer.writeByte(macho.BIND_OPCODE_SET_DYLIB_ORDINAL_IMM | @as(u4, @truncate(cast)));
640 } else {
641 try writer.writeByte(macho.BIND_OPCODE_SET_DYLIB_ORDINAL_ULEB);
642 try writer.writeUleb128(cast);
643 }
644 }
645}
646
647fn setAddend(addend: i64, writer: *std.Io.Writer) !void {
648 log.debug(">>> set addend: {x}", .{addend});
649 try writer.writeByte(macho.BIND_OPCODE_SET_ADDEND_SLEB);
650 try writer.writeSleb128(addend);
651}
652
653fn doBind(writer: *std.Io.Writer) !void {
654 log.debug(">>> bind", .{});
655 try writer.writeByte(macho.BIND_OPCODE_DO_BIND);
656}
657
658fn doBindAddAddr(addr: u64, writer: *std.Io.Writer) !void {
659 log.debug(">>> bind with add: {x}", .{addr});
660 if (std.mem.isAlignedGeneric(u64, addr, @sizeOf(u64))) {
661 const imm = @divExact(addr, @sizeOf(u64));
662 if (imm <= 0xf) {
663 try writer.writeByte(
664 macho.BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED | @as(u4, @truncate(imm)),
665 );
666 return;
667 }
668 }
669 try writer.writeByte(macho.BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB);
670 try writer.writeUleb128(addr);
671}
672
673fn doBindTimesSkip(count: usize, skip: u64, writer: *std.Io.Writer) !void {
674 log.debug(">>> bind with count: {d} and skip: {x}", .{ count, skip });
675 try writer.writeByte(macho.BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB);
676 try writer.writeUleb128(count);
677 try writer.writeUleb128(skip);
678}
679
680fn addAddr(addr: u64, writer: *std.Io.Writer) !void {
681 log.debug(">>> add: {x}", .{addr});
682 try writer.writeByte(macho.BIND_OPCODE_ADD_ADDR_ULEB);
683 try writer.writeUleb128(addr);
684}
685
686fn done(writer: *std.Io.Writer) !void {
687 log.debug(">>> done", .{});
688 try writer.writeByte(macho.BIND_OPCODE_DONE);
689}
690
691const assert = std.debug.assert;
692const log = std.log.scoped(.link_dyld_info);
693const macho = std.macho;
694const mem = std.mem;
695const testing = std.testing;
696const trace = @import("../../../tracy.zig").trace;
697const std = @import("std");
698
699const Allocator = mem.Allocator;
700const File = @import("../file.zig").File;
701const MachO = @import("../../MachO.zig");
702const Symbol = @import("../Symbol.zig");