master
1/// List of all unwind records gathered from all objects and sorted
2/// by allocated relative function address within the section.
3records: std.ArrayList(Record.Ref) = .empty,
4
5/// List of all personalities referenced by either unwind info entries
6/// or __eh_frame entries.
7personalities: [max_personalities]MachO.Ref = undefined,
8personalities_count: u2 = 0,
9
10/// List of common encodings sorted in descending order with the most common first.
11common_encodings: [max_common_encodings]Encoding = undefined,
12common_encodings_count: u7 = 0,
13
14/// List of record indexes containing an LSDA pointer.
15lsdas: std.ArrayList(u32) = .empty,
16lsdas_lookup: std.ArrayList(u32) = .empty,
17
18/// List of second level pages.
19pages: std.ArrayList(Page) = .empty,
20
21pub fn deinit(info: *UnwindInfo, allocator: Allocator) void {
22 info.records.deinit(allocator);
23 info.pages.deinit(allocator);
24 info.lsdas.deinit(allocator);
25 info.lsdas_lookup.deinit(allocator);
26}
27
28fn canFold(macho_file: *MachO, lhs_ref: Record.Ref, rhs_ref: Record.Ref) bool {
29 const cpu_arch = macho_file.getTarget().cpu.arch;
30 const lhs = lhs_ref.getUnwindRecord(macho_file);
31 const rhs = rhs_ref.getUnwindRecord(macho_file);
32 if (cpu_arch == .x86_64) {
33 if (lhs.enc.getMode() == @intFromEnum(macho.UNWIND_X86_64_MODE.STACK_IND) or
34 rhs.enc.getMode() == @intFromEnum(macho.UNWIND_X86_64_MODE.STACK_IND)) return false;
35 }
36 const lhs_per = lhs.personality orelse 0;
37 const rhs_per = rhs.personality orelse 0;
38 return lhs.enc.eql(rhs.enc) and
39 lhs_per == rhs_per and
40 lhs.fde == rhs.fde and
41 lhs.getLsdaAtom(macho_file) == null and rhs.getLsdaAtom(macho_file) == null;
42}
43
44pub fn generate(info: *UnwindInfo, macho_file: *MachO) !void {
45 const tracy = trace(@src());
46 defer tracy.end();
47
48 const gpa = macho_file.base.comp.gpa;
49
50 log.debug("generating unwind info", .{});
51
52 // Collect all unwind records
53 for (macho_file.sections.items(.atoms)) |atoms| {
54 for (atoms.items) |ref| {
55 const atom = ref.getAtom(macho_file) orelse continue;
56 if (!atom.isAlive()) continue;
57 const recs = atom.getUnwindRecords(macho_file);
58 const file = atom.getFile(macho_file);
59 try info.records.ensureUnusedCapacity(gpa, recs.len);
60 for (recs) |rec| {
61 if (!file.object.getUnwindRecord(rec).alive) continue;
62 info.records.appendAssumeCapacity(.{ .record = rec, .file = file.getIndex() });
63 }
64 }
65 }
66
67 // Encode records
68 for (info.records.items) |ref| {
69 const rec = ref.getUnwindRecord(macho_file);
70 if (rec.getFde(macho_file)) |fde| {
71 // The unwinder will look for the DWARF entry starting at the hint,
72 // assuming the hint points to a valid CFI record start. If it
73 // fails to find the record, it proceeds in a linear search through
74 // the contiguous CFI records from the hint until the end of the
75 // section. Ideally, in the case where the offset is too large to
76 // be encoded, we would instead encode the largest possible offset
77 // to a valid CFI record, but since we don't keep track of that,
78 // just encode zero -- the start of the section is always the start
79 // of a CFI record.
80 const hint = std.math.cast(u24, fde.out_offset) orelse 0;
81 rec.enc.setDwarfSectionOffset(hint);
82
83 if (fde.getLsdaAtom(macho_file)) |lsda| {
84 rec.lsda = lsda.atom_index;
85 rec.lsda_offset = fde.lsda_offset;
86 rec.enc.setHasLsda(true);
87 }
88 const cie = fde.getCie(macho_file);
89 if (cie.getPersonality(macho_file)) |_| {
90 const object = cie.getObject(macho_file);
91 const sym_ref = object.getSymbolRef(cie.personality.?.index, macho_file);
92 const personality_index = try info.getOrPutPersonalityFunction(sym_ref); // TODO handle error
93 rec.enc.setPersonalityIndex(personality_index + 1);
94 }
95 } else if (rec.getPersonality(macho_file)) |_| {
96 const object = rec.getObject(macho_file);
97 const sym_ref = object.getSymbolRef(rec.personality.?, macho_file);
98 const personality_index = try info.getOrPutPersonalityFunction(sym_ref); // TODO handle error
99 rec.enc.setPersonalityIndex(personality_index + 1);
100 }
101 }
102
103 // Sort by assigned relative address within each output section
104 const sortFn = struct {
105 fn sortFn(ctx: *MachO, lhs_ref: Record.Ref, rhs_ref: Record.Ref) bool {
106 const lhs = lhs_ref.getUnwindRecord(ctx);
107 const rhs = rhs_ref.getUnwindRecord(ctx);
108 const lhsa = lhs.getAtom(ctx);
109 const rhsa = rhs.getAtom(ctx);
110 if (lhsa.out_n_sect == rhsa.out_n_sect) return lhs.getAtomAddress(ctx) < rhs.getAtomAddress(ctx);
111 return lhsa.out_n_sect < rhsa.out_n_sect;
112 }
113 }.sortFn;
114 mem.sort(Record.Ref, info.records.items, macho_file, sortFn);
115
116 // Fold the records
117 // Any adjacent two records that share encoding can be folded into one.
118 {
119 var i: usize = 0;
120 var j: usize = 1;
121 while (j < info.records.items.len) : (j += 1) {
122 if (canFold(macho_file, info.records.items[i], info.records.items[j])) {
123 const rec = info.records.items[i].getUnwindRecord(macho_file);
124 rec.length += info.records.items[j].getUnwindRecord(macho_file).length + 1;
125 } else {
126 i += 1;
127 info.records.items[i] = info.records.items[j];
128 }
129 }
130 info.records.shrinkAndFree(gpa, i + 1);
131 }
132
133 for (info.records.items) |ref| {
134 const rec = ref.getUnwindRecord(macho_file);
135 const atom = rec.getAtom(macho_file);
136 log.debug("@{x}-{x} : {s} : rec({d}) : object({d}) : {f}", .{
137 rec.getAtomAddress(macho_file),
138 rec.getAtomAddress(macho_file) + rec.length,
139 atom.getName(macho_file),
140 ref.record,
141 ref.file,
142 rec.enc,
143 });
144 }
145
146 // Calculate common encodings
147 {
148 const CommonEncWithCount = struct {
149 enc: Encoding,
150 count: u32,
151
152 fn greaterThan(ctx: void, lhs: @This(), rhs: @This()) bool {
153 _ = ctx;
154 return lhs.count > rhs.count;
155 }
156 };
157
158 const Context = struct {
159 pub fn hash(ctx: @This(), key: Encoding) u32 {
160 _ = ctx;
161 return key.enc;
162 }
163
164 pub fn eql(
165 ctx: @This(),
166 key1: Encoding,
167 key2: Encoding,
168 b_index: usize,
169 ) bool {
170 _ = ctx;
171 _ = b_index;
172 return key1.eql(key2);
173 }
174 };
175
176 var common_encodings_counts = std.ArrayHashMap(
177 Encoding,
178 CommonEncWithCount,
179 Context,
180 false,
181 ).init(gpa);
182 defer common_encodings_counts.deinit();
183
184 for (info.records.items) |ref| {
185 const rec = ref.getUnwindRecord(macho_file);
186 if (rec.enc.isDwarf(macho_file)) continue;
187 const gop = try common_encodings_counts.getOrPut(rec.enc);
188 if (!gop.found_existing) {
189 gop.value_ptr.* = .{
190 .enc = rec.enc,
191 .count = 0,
192 };
193 }
194 gop.value_ptr.count += 1;
195 }
196
197 const slice = common_encodings_counts.values();
198 mem.sort(CommonEncWithCount, slice, {}, CommonEncWithCount.greaterThan);
199
200 var i: u7 = 0;
201 while (i < slice.len) : (i += 1) {
202 if (i >= max_common_encodings) break;
203 if (slice[i].count < 2) continue;
204 info.appendCommonEncoding(slice[i].enc);
205 log.debug("adding common encoding: {d} => {f}", .{ i, slice[i].enc });
206 }
207 }
208
209 // Compute page allocations
210 {
211 var i: u32 = 0;
212 while (i < info.records.items.len) {
213 const rec = info.records.items[i].getUnwindRecord(macho_file);
214 const range_start_max: u64 = rec.getAtomAddress(macho_file) + compressed_entry_func_offset_mask;
215 var encoding_count: u9 = info.common_encodings_count;
216 var space_left: u32 = second_level_page_words -
217 @sizeOf(macho.unwind_info_compressed_second_level_page_header) / @sizeOf(u32);
218 var page = Page{
219 .kind = undefined,
220 .start = i,
221 .count = 0,
222 };
223
224 while (space_left >= 1 and i < info.records.items.len) {
225 const next = info.records.items[i].getUnwindRecord(macho_file);
226 const is_dwarf = next.enc.isDwarf(macho_file);
227
228 if (next.getAtomAddress(macho_file) >= range_start_max) {
229 break;
230 } else if (info.getCommonEncoding(next.enc) != null or
231 page.getPageEncoding(next.enc) != null and !is_dwarf)
232 {
233 i += 1;
234 space_left -= 1;
235 } else if (space_left >= 2 and encoding_count < max_compact_encodings) {
236 page.appendPageEncoding(next.enc);
237 i += 1;
238 space_left -= 2;
239 encoding_count += 1;
240 } else {
241 break;
242 }
243 }
244
245 page.count = @as(u16, @intCast(i - page.start));
246
247 if (i < info.records.items.len and page.count < max_regular_second_level_entries) {
248 page.kind = .regular;
249 page.count = @as(u16, @intCast(@min(
250 max_regular_second_level_entries,
251 info.records.items.len - page.start,
252 )));
253 i = page.start + page.count;
254 } else {
255 page.kind = .compressed;
256 }
257
258 log.debug("{f}", .{page.fmt(info.*)});
259
260 try info.pages.append(gpa, page);
261 }
262 }
263
264 // Save records having an LSDA pointer
265 log.debug("LSDA pointers:", .{});
266 try info.lsdas_lookup.ensureTotalCapacityPrecise(gpa, info.records.items.len);
267 for (info.records.items, 0..) |ref, i| {
268 const rec = ref.getUnwindRecord(macho_file);
269 info.lsdas_lookup.appendAssumeCapacity(@intCast(info.lsdas.items.len));
270 if (rec.getLsdaAtom(macho_file)) |lsda| {
271 log.debug(" @{x} => lsda({d})", .{ rec.getAtomAddress(macho_file), lsda.atom_index });
272 try info.lsdas.append(gpa, @intCast(i));
273 }
274 }
275}
276
277pub fn calcSize(info: UnwindInfo) usize {
278 const tracy = trace(@src());
279 defer tracy.end();
280
281 var total_size: usize = 0;
282 total_size += @sizeOf(macho.unwind_info_section_header);
283 total_size +=
284 @as(usize, @intCast(info.common_encodings_count)) * @sizeOf(macho.compact_unwind_encoding_t);
285 total_size += @as(usize, @intCast(info.personalities_count)) * @sizeOf(u32);
286 total_size += (info.pages.items.len + 1) * @sizeOf(macho.unwind_info_section_header_index_entry);
287 total_size += info.lsdas.items.len * @sizeOf(macho.unwind_info_section_header_lsda_index_entry);
288 total_size += info.pages.items.len * second_level_page_bytes;
289 return total_size;
290}
291
292pub fn write(info: UnwindInfo, macho_file: *MachO, buffer: []u8) !void {
293 const seg = macho_file.getTextSegment();
294 const header = macho_file.sections.items(.header)[macho_file.unwind_info_sect_index.?];
295
296 var writer: Writer = .fixed(buffer);
297
298 const common_encodings_offset: u32 = @sizeOf(macho.unwind_info_section_header);
299 const common_encodings_count: u32 = info.common_encodings_count;
300 const personalities_offset: u32 = common_encodings_offset + common_encodings_count * @sizeOf(u32);
301 const personalities_count: u32 = info.personalities_count;
302 const indexes_offset: u32 = personalities_offset + personalities_count * @sizeOf(u32);
303 const indexes_count: u32 = @as(u32, @intCast(info.pages.items.len + 1));
304
305 try writer.writeStruct(@as(macho.unwind_info_section_header, .{
306 .commonEncodingsArraySectionOffset = common_encodings_offset,
307 .commonEncodingsArrayCount = common_encodings_count,
308 .personalityArraySectionOffset = personalities_offset,
309 .personalityArrayCount = personalities_count,
310 .indexSectionOffset = indexes_offset,
311 .indexCount = indexes_count,
312 }), .little);
313
314 try writer.writeSliceEndian(Encoding, info.common_encodings[0..info.common_encodings_count], .little);
315
316 for (info.personalities[0..info.personalities_count]) |ref| {
317 const sym = ref.getSymbol(macho_file).?;
318 try writer.writeInt(u32, @intCast(sym.getGotAddress(macho_file) - seg.vmaddr), .little);
319 }
320
321 const pages_base_offset = @as(u32, @intCast(header.size - (info.pages.items.len * second_level_page_bytes)));
322 const lsda_base_offset = @as(u32, @intCast(pages_base_offset -
323 (info.lsdas.items.len * @sizeOf(macho.unwind_info_section_header_lsda_index_entry))));
324 for (info.pages.items, 0..) |page, i| {
325 assert(page.count > 0);
326 const rec = info.records.items[page.start].getUnwindRecord(macho_file);
327 try writer.writeStruct(@as(macho.unwind_info_section_header_index_entry, .{
328 .functionOffset = @as(u32, @intCast(rec.getAtomAddress(macho_file) - seg.vmaddr)),
329 .secondLevelPagesSectionOffset = @as(u32, @intCast(pages_base_offset + i * second_level_page_bytes)),
330 .lsdaIndexArraySectionOffset = lsda_base_offset +
331 info.lsdas_lookup.items[page.start] * @sizeOf(macho.unwind_info_section_header_lsda_index_entry),
332 }), .little);
333 }
334
335 const last_rec = info.records.items[info.records.items.len - 1].getUnwindRecord(macho_file);
336 const sentinel_address = @as(u32, @intCast(last_rec.getAtomAddress(macho_file) + last_rec.length - seg.vmaddr));
337 try writer.writeStruct(@as(macho.unwind_info_section_header_index_entry, .{
338 .functionOffset = sentinel_address,
339 .secondLevelPagesSectionOffset = 0,
340 .lsdaIndexArraySectionOffset = lsda_base_offset +
341 @as(u32, @intCast(info.lsdas.items.len)) * @sizeOf(macho.unwind_info_section_header_lsda_index_entry),
342 }), .little);
343
344 for (info.lsdas.items) |index| {
345 const rec = info.records.items[index].getUnwindRecord(macho_file);
346 try writer.writeStruct(@as(macho.unwind_info_section_header_lsda_index_entry, .{
347 .functionOffset = @as(u32, @intCast(rec.getAtomAddress(macho_file) - seg.vmaddr)),
348 .lsdaOffset = @as(u32, @intCast(rec.getLsdaAddress(macho_file) - seg.vmaddr)),
349 }), .little);
350 }
351
352 for (info.pages.items) |page| {
353 const start = writer.end;
354 try page.write(info, macho_file, &writer);
355 const nwritten = writer.end - start;
356 if (nwritten < second_level_page_bytes) {
357 const padding = math.cast(usize, second_level_page_bytes - nwritten) orelse return error.Overflow;
358 try writer.splatByteAll(0, padding);
359 }
360 }
361
362 @memset(buffer[writer.end..], 0);
363}
364
365fn getOrPutPersonalityFunction(info: *UnwindInfo, ref: MachO.Ref) error{TooManyPersonalities}!u2 {
366 comptime var index: u2 = 0;
367 inline while (index < max_personalities) : (index += 1) {
368 if (info.personalities[index].eql(ref)) {
369 return index;
370 } else if (index == info.personalities_count) {
371 info.personalities[index] = ref;
372 info.personalities_count += 1;
373 return index;
374 }
375 }
376 return error.TooManyPersonalities;
377}
378
379fn appendCommonEncoding(info: *UnwindInfo, enc: Encoding) void {
380 assert(info.common_encodings_count <= max_common_encodings);
381 info.common_encodings[info.common_encodings_count] = enc;
382 info.common_encodings_count += 1;
383}
384
385fn getCommonEncoding(info: UnwindInfo, enc: Encoding) ?u7 {
386 comptime var index: u7 = 0;
387 inline while (index < max_common_encodings) : (index += 1) {
388 if (index >= info.common_encodings_count) return null;
389 if (info.common_encodings[index].eql(enc)) {
390 return index;
391 }
392 }
393 return null;
394}
395
396pub const Encoding = extern struct {
397 enc: macho.compact_unwind_encoding_t,
398
399 pub fn getMode(enc: Encoding) u4 {
400 comptime assert(macho.UNWIND_ARM64_MODE_MASK == macho.UNWIND_X86_64_MODE_MASK);
401 const shift = comptime @ctz(macho.UNWIND_ARM64_MODE_MASK);
402 return @as(u4, @truncate((enc.enc & macho.UNWIND_ARM64_MODE_MASK) >> shift));
403 }
404
405 pub fn isDwarf(enc: Encoding, macho_file: *MachO) bool {
406 const mode = enc.getMode();
407 return switch (macho_file.getTarget().cpu.arch) {
408 .aarch64 => @as(macho.UNWIND_ARM64_MODE, @enumFromInt(mode)) == .DWARF,
409 .x86_64 => @as(macho.UNWIND_X86_64_MODE, @enumFromInt(mode)) == .DWARF,
410 else => unreachable,
411 };
412 }
413
414 pub fn setMode(enc: *Encoding, mode: anytype) void {
415 comptime assert(macho.UNWIND_ARM64_MODE_MASK == macho.UNWIND_X86_64_MODE_MASK);
416 const shift = comptime @ctz(macho.UNWIND_ARM64_MODE_MASK);
417 enc.enc |= @as(u32, @intCast(@intFromEnum(mode))) << shift;
418 }
419
420 pub fn hasLsda(enc: Encoding) bool {
421 const shift = comptime @ctz(macho.UNWIND_HAS_LSDA);
422 const has_lsda = @as(u1, @truncate((enc.enc & macho.UNWIND_HAS_LSDA) >> shift));
423 return has_lsda == 1;
424 }
425
426 pub fn setHasLsda(enc: *Encoding, has_lsda: bool) void {
427 const shift = comptime @ctz(macho.UNWIND_HAS_LSDA);
428 const mask = @as(u32, @intCast(@intFromBool(has_lsda))) << shift;
429 enc.enc |= mask;
430 }
431
432 pub fn getPersonalityIndex(enc: Encoding) u2 {
433 const shift = comptime @ctz(macho.UNWIND_PERSONALITY_MASK);
434 const index = @as(u2, @truncate((enc.enc & macho.UNWIND_PERSONALITY_MASK) >> shift));
435 return index;
436 }
437
438 pub fn setPersonalityIndex(enc: *Encoding, index: u2) void {
439 const shift = comptime @ctz(macho.UNWIND_PERSONALITY_MASK);
440 const mask = @as(u32, @intCast(index)) << shift;
441 enc.enc |= mask;
442 }
443
444 pub fn getDwarfSectionOffset(enc: Encoding) u24 {
445 const offset = @as(u24, @truncate(enc.enc));
446 return offset;
447 }
448
449 pub fn setDwarfSectionOffset(enc: *Encoding, offset: u24) void {
450 enc.enc |= offset;
451 }
452
453 pub fn eql(enc: Encoding, other: Encoding) bool {
454 return enc.enc == other.enc;
455 }
456
457 pub fn format(enc: Encoding, w: *Writer) Writer.Error!void {
458 try w.print("0x{x:0>8}", .{enc.enc});
459 }
460};
461
462pub const Record = struct {
463 length: u32 = 0,
464 enc: Encoding = .{ .enc = 0 },
465 atom: Atom.Index = 0,
466 atom_offset: u32 = 0,
467 lsda: Atom.Index = 0,
468 lsda_offset: u32 = 0,
469 personality: ?Symbol.Index = null, // TODO make this zero-is-null
470 fde: Fde.Index = 0, // TODO actually make FDE at 0 an invalid FDE
471 file: File.Index = 0,
472 alive: bool = true,
473
474 pub fn getObject(rec: Record, macho_file: *MachO) *Object {
475 return macho_file.getFile(rec.file).?.object;
476 }
477
478 pub fn getAtom(rec: Record, macho_file: *MachO) *Atom {
479 return rec.getObject(macho_file).getAtom(rec.atom).?;
480 }
481
482 pub fn getLsdaAtom(rec: Record, macho_file: *MachO) ?*Atom {
483 return rec.getObject(macho_file).getAtom(rec.lsda);
484 }
485
486 pub fn getPersonality(rec: Record, macho_file: *MachO) ?*Symbol {
487 const personality = rec.personality orelse return null;
488 const object = rec.getObject(macho_file);
489 return object.getSymbolRef(personality, macho_file).getSymbol(macho_file);
490 }
491
492 pub fn getFde(rec: Record, macho_file: *MachO) ?Fde {
493 if (!rec.enc.isDwarf(macho_file)) return null;
494 return rec.getObject(macho_file).fdes.items[rec.fde];
495 }
496
497 pub fn getFdePtr(rec: Record, macho_file: *MachO) ?*Fde {
498 if (!rec.enc.isDwarf(macho_file)) return null;
499 return &rec.getObject(macho_file).fdes.items[rec.fde];
500 }
501
502 pub fn getAtomAddress(rec: Record, macho_file: *MachO) u64 {
503 const atom = rec.getAtom(macho_file);
504 return atom.getAddress(macho_file) + rec.atom_offset;
505 }
506
507 pub fn getLsdaAddress(rec: Record, macho_file: *MachO) u64 {
508 const lsda = rec.getLsdaAtom(macho_file) orelse return 0;
509 return lsda.getAddress(macho_file) + rec.lsda_offset;
510 }
511
512 pub fn fmt(rec: Record, macho_file: *MachO) std.fmt.Alt(Format, Format.default) {
513 return .{ .data = .{
514 .rec = rec,
515 .macho_file = macho_file,
516 } };
517 }
518
519 const Format = struct {
520 rec: Record,
521 macho_file: *MachO,
522
523 fn default(f: Format, w: *Writer) Writer.Error!void {
524 const rec = f.rec;
525 const macho_file = f.macho_file;
526 try w.print("{x} : len({x})", .{
527 rec.enc.enc, rec.length,
528 });
529 if (rec.enc.isDwarf(macho_file)) try w.print(" : fde({d})", .{rec.fde});
530 try w.print(" : {s}", .{rec.getAtom(macho_file).getName(macho_file)});
531 if (!rec.alive) try w.writeAll(" : [*]");
532 }
533 };
534
535 pub const Index = u32;
536
537 const Ref = struct {
538 record: Index,
539 file: File.Index,
540
541 pub fn getUnwindRecord(ref: Ref, macho_file: *MachO) *Record {
542 return macho_file.getFile(ref.file).?.object.getUnwindRecord(ref.record);
543 }
544 };
545};
546
547const max_personalities = 3;
548const max_common_encodings = 127;
549const max_compact_encodings = 256;
550
551const second_level_page_bytes = 0x1000;
552const second_level_page_words = second_level_page_bytes / @sizeOf(u32);
553
554const max_regular_second_level_entries =
555 (second_level_page_bytes - @sizeOf(macho.unwind_info_regular_second_level_page_header)) /
556 @sizeOf(macho.unwind_info_regular_second_level_entry);
557
558const max_compressed_second_level_entries =
559 (second_level_page_bytes - @sizeOf(macho.unwind_info_compressed_second_level_page_header)) /
560 @sizeOf(u32);
561
562const compressed_entry_func_offset_mask = ~@as(u24, 0);
563
564const Page = struct {
565 kind: enum { regular, compressed },
566 start: u32,
567 count: u16,
568 page_encodings: [max_compact_encodings]Encoding = undefined,
569 page_encodings_count: u9 = 0,
570
571 fn appendPageEncoding(page: *Page, enc: Encoding) void {
572 assert(page.page_encodings_count <= max_compact_encodings);
573 page.page_encodings[page.page_encodings_count] = enc;
574 page.page_encodings_count += 1;
575 }
576
577 fn getPageEncoding(page: Page, enc: Encoding) ?u8 {
578 comptime var index: u9 = 0;
579 inline while (index < max_compact_encodings) : (index += 1) {
580 if (index >= page.page_encodings_count) return null;
581 if (page.page_encodings[index].eql(enc)) {
582 return @as(u8, @intCast(index));
583 }
584 }
585 return null;
586 }
587
588 const Format = struct {
589 page: Page,
590 info: UnwindInfo,
591
592 fn default(f: Format, w: *Writer) Writer.Error!void {
593 try w.writeAll("Page:\n");
594 try w.print(" kind: {s}\n", .{@tagName(f.page.kind)});
595 try w.print(" entries: {d} - {d}\n", .{
596 f.page.start,
597 f.page.start + f.page.count,
598 });
599 try w.print(" encodings (count = {d})\n", .{f.page.page_encodings_count});
600 for (f.page.page_encodings[0..f.page.page_encodings_count], 0..) |enc, i| {
601 try w.print(" {d}: {f}\n", .{ f.info.common_encodings_count + i, enc });
602 }
603 }
604 };
605
606 fn fmt(page: Page, info: UnwindInfo) std.fmt.Alt(Format, Format.default) {
607 return .{ .data = .{
608 .page = page,
609 .info = info,
610 } };
611 }
612
613 fn write(page: Page, info: UnwindInfo, macho_file: *MachO, writer: *Writer) !void {
614 const seg = macho_file.getTextSegment();
615
616 switch (page.kind) {
617 .regular => {
618 try writer.writeStruct(@as(macho.unwind_info_regular_second_level_page_header, .{
619 .entryPageOffset = @sizeOf(macho.unwind_info_regular_second_level_page_header),
620 .entryCount = page.count,
621 }), .little);
622
623 for (info.records.items[page.start..][0..page.count]) |ref| {
624 const rec = ref.getUnwindRecord(macho_file);
625 try writer.writeStruct(@as(macho.unwind_info_regular_second_level_entry, .{
626 .functionOffset = @as(u32, @intCast(rec.getAtomAddress(macho_file) - seg.vmaddr)),
627 .encoding = rec.enc.enc,
628 }), .little);
629 }
630 },
631 .compressed => {
632 const entry_offset = @sizeOf(macho.unwind_info_compressed_second_level_page_header) +
633 @as(u16, @intCast(page.page_encodings_count)) * @sizeOf(u32);
634 try writer.writeStruct(@as(macho.unwind_info_compressed_second_level_page_header, .{
635 .entryPageOffset = entry_offset,
636 .entryCount = page.count,
637 .encodingsPageOffset = @sizeOf(macho.unwind_info_compressed_second_level_page_header),
638 .encodingsCount = page.page_encodings_count,
639 }), .little);
640
641 for (page.page_encodings[0..page.page_encodings_count]) |enc| {
642 try writer.writeInt(u32, enc.enc, .little);
643 }
644
645 assert(page.count > 0);
646 const first_rec = info.records.items[page.start].getUnwindRecord(macho_file);
647 for (info.records.items[page.start..][0..page.count]) |ref| {
648 const rec = ref.getUnwindRecord(macho_file);
649 const enc_index = blk: {
650 if (info.getCommonEncoding(rec.enc)) |id| break :blk id;
651 const ncommon = info.common_encodings_count;
652 break :blk ncommon + page.getPageEncoding(rec.enc).?;
653 };
654 const compressed = macho.UnwindInfoCompressedEntry{
655 .funcOffset = @as(u24, @intCast(rec.getAtomAddress(macho_file) - first_rec.getAtomAddress(macho_file))),
656 .encodingIndex = @as(u8, @intCast(enc_index)),
657 };
658 try writer.writeStruct(compressed, .little);
659 }
660 },
661 }
662 }
663};
664
665const std = @import("std");
666const assert = std.debug.assert;
667const eh_frame = @import("eh_frame.zig");
668const fs = std.fs;
669const leb = std.leb;
670const log = std.log.scoped(.link);
671const macho = std.macho;
672const math = std.math;
673const mem = std.mem;
674const trace = @import("../../tracy.zig").trace;
675const Writer = std.Io.Writer;
676
677const Allocator = mem.Allocator;
678const Atom = @import("Atom.zig");
679const Fde = eh_frame.Fde;
680const File = @import("file.zig").File;
681const MachO = @import("../MachO.zig");
682const Object = @import("Object.zig");
683const Symbol = @import("Symbol.zig");
684const UnwindInfo = @This();