master
1pub const DynamicSection = struct {
2 soname: ?u32 = null,
3 needed: std.ArrayList(u32) = .empty,
4 rpath: u32 = 0,
5
6 pub fn deinit(dt: *DynamicSection, allocator: Allocator) void {
7 dt.needed.deinit(allocator);
8 }
9
10 pub fn addNeeded(dt: *DynamicSection, shared: *SharedObject, elf_file: *Elf) !void {
11 const comp = elf_file.base.comp;
12 const gpa = comp.gpa;
13 const off = try elf_file.insertDynString(shared.soname());
14 try dt.needed.append(gpa, off);
15 }
16
17 pub fn setRpath(dt: *DynamicSection, rpath_list: []const []const u8, elf_file: *Elf) !void {
18 if (rpath_list.len == 0) return;
19 const comp = elf_file.base.comp;
20 const gpa = comp.gpa;
21 var rpath = std.array_list.Managed(u8).init(gpa);
22 defer rpath.deinit();
23 for (rpath_list, 0..) |path, i| {
24 if (i > 0) try rpath.append(':');
25 try rpath.appendSlice(path);
26 }
27 dt.rpath = try elf_file.insertDynString(rpath.items);
28 }
29
30 pub fn setSoname(dt: *DynamicSection, soname: []const u8, elf_file: *Elf) !void {
31 dt.soname = try elf_file.insertDynString(soname);
32 }
33
34 fn getFlags(dt: DynamicSection, elf_file: *Elf) ?u64 {
35 _ = dt;
36 var flags: u64 = 0;
37 if (elf_file.z_now) {
38 flags |= elf.DF_BIND_NOW;
39 }
40 for (elf_file.got.entries.items) |entry| switch (entry.tag) {
41 .gottp => {
42 flags |= elf.DF_STATIC_TLS;
43 break;
44 },
45 else => {},
46 };
47 if (elf_file.has_text_reloc) {
48 flags |= elf.DF_TEXTREL;
49 }
50 return if (flags > 0) flags else null;
51 }
52
53 fn getFlags1(dt: DynamicSection, elf_file: *Elf) ?u64 {
54 const comp = elf_file.base.comp;
55 _ = dt;
56 var flags_1: u64 = 0;
57 if (elf_file.z_now) {
58 flags_1 |= elf.DF_1_NOW;
59 }
60 if (elf_file.base.isExe() and comp.config.pie) {
61 flags_1 |= elf.DF_1_PIE;
62 }
63 // if (elf_file.z_nodlopen) {
64 // flags_1 |= elf.DF_1_NOOPEN;
65 // }
66 return if (flags_1 > 0) flags_1 else null;
67 }
68
69 pub fn size(dt: DynamicSection, elf_file: *Elf) usize {
70 var nentries: usize = 0;
71 nentries += dt.needed.items.len; // NEEDED
72 if (dt.soname != null) nentries += 1; // SONAME
73 if (dt.rpath > 0) nentries += 1; // RUNPATH
74 if (elf_file.sectionByName(".init") != null) nentries += 1; // INIT
75 if (elf_file.sectionByName(".fini") != null) nentries += 1; // FINI
76 if (elf_file.sectionByName(".init_array") != null) nentries += 2; // INIT_ARRAY
77 if (elf_file.sectionByName(".fini_array") != null) nentries += 2; // FINI_ARRAY
78 if (elf_file.section_indexes.rela_dyn != null) nentries += 3; // RELA
79 if (elf_file.section_indexes.rela_plt != null) nentries += 3; // JMPREL
80 if (elf_file.section_indexes.got_plt != null) nentries += 1; // PLTGOT
81 nentries += 1; // HASH
82 if (elf_file.section_indexes.gnu_hash != null) nentries += 1; // GNU_HASH
83 if (elf_file.has_text_reloc) nentries += 1; // TEXTREL
84 nentries += 1; // SYMTAB
85 nentries += 1; // SYMENT
86 nentries += 1; // STRTAB
87 nentries += 1; // STRSZ
88 if (elf_file.section_indexes.versym != null) nentries += 1; // VERSYM
89 if (elf_file.section_indexes.verneed != null) nentries += 2; // VERNEED
90 if (dt.getFlags(elf_file) != null) nentries += 1; // FLAGS
91 if (dt.getFlags1(elf_file) != null) nentries += 1; // FLAGS_1
92 if (!elf_file.isEffectivelyDynLib()) nentries += 1; // DEBUG
93 nentries += 1; // NULL
94 return nentries * @sizeOf(elf.Elf64_Dyn);
95 }
96
97 pub fn write(dt: DynamicSection, elf_file: *Elf, writer: *std.Io.Writer) !void {
98 const shdrs = elf_file.sections.items(.shdr);
99
100 // NEEDED
101 for (dt.needed.items) |off| {
102 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_NEEDED, .d_val = off }), .little);
103 }
104
105 if (dt.soname) |off| {
106 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_SONAME, .d_val = off }), .little);
107 }
108
109 // RUNPATH
110 // TODO add option in Options to revert to old RPATH tag
111 if (dt.rpath > 0) {
112 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_RUNPATH, .d_val = dt.rpath }), .little);
113 }
114
115 // INIT
116 if (elf_file.sectionByName(".init")) |shndx| {
117 const addr = shdrs[shndx].sh_addr;
118 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_INIT, .d_val = addr }), .little);
119 }
120
121 // FINI
122 if (elf_file.sectionByName(".fini")) |shndx| {
123 const addr = shdrs[shndx].sh_addr;
124 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_FINI, .d_val = addr }), .little);
125 }
126
127 // INIT_ARRAY
128 if (elf_file.sectionByName(".init_array")) |shndx| {
129 const shdr = shdrs[shndx];
130 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_INIT_ARRAY, .d_val = shdr.sh_addr }), .little);
131 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_INIT_ARRAYSZ, .d_val = shdr.sh_size }), .little);
132 }
133
134 // FINI_ARRAY
135 if (elf_file.sectionByName(".fini_array")) |shndx| {
136 const shdr = shdrs[shndx];
137 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_FINI_ARRAY, .d_val = shdr.sh_addr }), .little);
138 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_FINI_ARRAYSZ, .d_val = shdr.sh_size }), .little);
139 }
140
141 // RELA
142 if (elf_file.section_indexes.rela_dyn) |shndx| {
143 const shdr = shdrs[shndx];
144 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_RELA, .d_val = shdr.sh_addr }), .little);
145 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_RELASZ, .d_val = shdr.sh_size }), .little);
146 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_RELAENT, .d_val = shdr.sh_entsize }), .little);
147 }
148
149 // JMPREL
150 if (elf_file.section_indexes.rela_plt) |shndx| {
151 const shdr = shdrs[shndx];
152 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_JMPREL, .d_val = shdr.sh_addr }), .little);
153 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_PLTRELSZ, .d_val = shdr.sh_size }), .little);
154 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_PLTREL, .d_val = elf.DT_RELA }), .little);
155 }
156
157 // PLTGOT
158 if (elf_file.section_indexes.got_plt) |shndx| {
159 const addr = shdrs[shndx].sh_addr;
160 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_PLTGOT, .d_val = addr }), .little);
161 }
162
163 {
164 assert(elf_file.section_indexes.hash != null);
165 const addr = shdrs[elf_file.section_indexes.hash.?].sh_addr;
166 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_HASH, .d_val = addr }), .little);
167 }
168
169 if (elf_file.section_indexes.gnu_hash) |shndx| {
170 const addr = shdrs[shndx].sh_addr;
171 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_GNU_HASH, .d_val = addr }), .little);
172 }
173
174 // TEXTREL
175 if (elf_file.has_text_reloc) {
176 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_TEXTREL, .d_val = 0 }), .little);
177 }
178
179 // SYMTAB + SYMENT
180 {
181 assert(elf_file.section_indexes.dynsymtab != null);
182 const shdr = shdrs[elf_file.section_indexes.dynsymtab.?];
183 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_SYMTAB, .d_val = shdr.sh_addr }), .little);
184 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_SYMENT, .d_val = shdr.sh_entsize }), .little);
185 }
186
187 // STRTAB + STRSZ
188 {
189 assert(elf_file.section_indexes.dynstrtab != null);
190 const shdr = shdrs[elf_file.section_indexes.dynstrtab.?];
191 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_STRTAB, .d_val = shdr.sh_addr }), .little);
192 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_STRSZ, .d_val = shdr.sh_size }), .little);
193 }
194
195 // VERSYM
196 if (elf_file.section_indexes.versym) |shndx| {
197 const addr = shdrs[shndx].sh_addr;
198 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_VERSYM, .d_val = addr }), .little);
199 }
200
201 // VERNEED + VERNEEDNUM
202 if (elf_file.section_indexes.verneed) |shndx| {
203 const addr = shdrs[shndx].sh_addr;
204 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_VERNEED, .d_val = addr }), .little);
205 try writer.writeStruct(@as(elf.Elf64_Dyn, .{
206 .d_tag = elf.DT_VERNEEDNUM,
207 .d_val = elf_file.verneed.verneed.items.len,
208 }), .little);
209 }
210
211 // FLAGS
212 if (dt.getFlags(elf_file)) |flags| {
213 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_FLAGS, .d_val = flags }), .little);
214 }
215 // FLAGS_1
216 if (dt.getFlags1(elf_file)) |flags_1| {
217 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_FLAGS_1, .d_val = flags_1 }), .little);
218 }
219
220 // DEBUG
221 if (!elf_file.isEffectivelyDynLib()) try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_DEBUG, .d_val = 0 }), .little);
222
223 // NULL
224 try writer.writeStruct(@as(elf.Elf64_Dyn, .{ .d_tag = elf.DT_NULL, .d_val = 0 }), .little);
225 }
226};
227
228pub const GotSection = struct {
229 entries: std.ArrayList(Entry) = .empty,
230 output_symtab_ctx: Elf.SymtabCtx = .{},
231 tlsld_index: ?u32 = null,
232 flags: Flags = .{},
233
234 pub const Index = u32;
235
236 const Flags = packed struct {
237 needs_rela: bool = false,
238 needs_tlsld: bool = false,
239 };
240
241 const Tag = enum {
242 got,
243 tlsld,
244 tlsgd,
245 gottp,
246 tlsdesc,
247 };
248
249 const Entry = struct {
250 tag: Tag,
251 ref: Elf.Ref,
252 cell_index: Index,
253
254 /// Returns how many indexes in the GOT this entry uses.
255 pub inline fn len(entry: Entry) usize {
256 return switch (entry.tag) {
257 .got, .gottp => 1,
258 .tlsld, .tlsgd, .tlsdesc => 2,
259 };
260 }
261
262 pub fn address(entry: Entry, elf_file: *Elf) i64 {
263 const ptr_bytes = elf_file.archPtrWidthBytes();
264 const shdr = &elf_file.sections.items(.shdr)[elf_file.section_indexes.got.?];
265 return @as(i64, @intCast(shdr.sh_addr)) + entry.cell_index * ptr_bytes;
266 }
267 };
268
269 pub fn deinit(got: *GotSection, allocator: Allocator) void {
270 got.entries.deinit(allocator);
271 }
272
273 fn allocateEntry(got: *GotSection, allocator: Allocator) !Index {
274 try got.entries.ensureUnusedCapacity(allocator, 1);
275 // TODO add free list
276 const index = @as(Index, @intCast(got.entries.items.len));
277 const entry = got.entries.addOneAssumeCapacity();
278 const cell_index: Index = if (index > 0) blk: {
279 const last = got.entries.items[index - 1];
280 break :blk last.cell_index + @as(Index, @intCast(last.len()));
281 } else 0;
282 entry.* = .{ .tag = undefined, .ref = undefined, .cell_index = cell_index };
283 return index;
284 }
285
286 pub fn addGotSymbol(got: *GotSection, ref: Elf.Ref, elf_file: *Elf) !Index {
287 const comp = elf_file.base.comp;
288 const gpa = comp.gpa;
289 const index = try got.allocateEntry(gpa);
290 const entry = &got.entries.items[index];
291 entry.tag = .got;
292 entry.ref = ref;
293 const symbol = elf_file.symbol(ref).?;
294 symbol.flags.has_got = true;
295 if (symbol.flags.import or symbol.isIFunc(elf_file) or
296 ((elf_file.isEffectivelyDynLib() or (elf_file.base.isExe() and comp.config.pie)) and !symbol.isAbs(elf_file)))
297 {
298 got.flags.needs_rela = true;
299 }
300 symbol.addExtra(.{ .got = index }, elf_file);
301 return index;
302 }
303
304 pub fn addTlsLdSymbol(got: *GotSection, elf_file: *Elf) !void {
305 const comp = elf_file.base.comp;
306 const gpa = comp.gpa;
307 assert(got.flags.needs_tlsld);
308 const index = try got.allocateEntry(gpa);
309 const entry = &got.entries.items[index];
310 entry.tag = .tlsld;
311 entry.ref = .{ .index = 0, .file = 0 }; // unused
312 got.flags.needs_rela = true;
313 got.tlsld_index = index;
314 }
315
316 pub fn addTlsGdSymbol(got: *GotSection, ref: Elf.Ref, elf_file: *Elf) !void {
317 const comp = elf_file.base.comp;
318 const gpa = comp.gpa;
319 const index = try got.allocateEntry(gpa);
320 const entry = &got.entries.items[index];
321 entry.tag = .tlsgd;
322 entry.ref = ref;
323 const symbol = elf_file.symbol(ref).?;
324 symbol.flags.has_tlsgd = true;
325 if (symbol.flags.import or elf_file.isEffectivelyDynLib()) got.flags.needs_rela = true;
326 symbol.addExtra(.{ .tlsgd = index }, elf_file);
327 }
328
329 pub fn addGotTpSymbol(got: *GotSection, ref: Elf.Ref, elf_file: *Elf) !void {
330 const comp = elf_file.base.comp;
331 const gpa = comp.gpa;
332 const index = try got.allocateEntry(gpa);
333 const entry = &got.entries.items[index];
334 entry.tag = .gottp;
335 entry.ref = ref;
336 const symbol = elf_file.symbol(ref).?;
337 symbol.flags.has_gottp = true;
338 if (symbol.flags.import or elf_file.isEffectivelyDynLib()) got.flags.needs_rela = true;
339 symbol.addExtra(.{ .gottp = index }, elf_file);
340 }
341
342 pub fn addTlsDescSymbol(got: *GotSection, ref: Elf.Ref, elf_file: *Elf) !void {
343 const comp = elf_file.base.comp;
344 const gpa = comp.gpa;
345 const index = try got.allocateEntry(gpa);
346 const entry = &got.entries.items[index];
347 entry.tag = .tlsdesc;
348 entry.ref = ref;
349 const symbol = elf_file.symbol(ref).?;
350 symbol.flags.has_tlsdesc = true;
351 got.flags.needs_rela = true;
352 symbol.addExtra(.{ .tlsdesc = index }, elf_file);
353 }
354
355 pub fn size(got: GotSection, elf_file: *Elf) usize {
356 var s: usize = 0;
357 for (got.entries.items) |entry| {
358 s += elf_file.archPtrWidthBytes() * entry.len();
359 }
360 return s;
361 }
362
363 pub fn write(got: GotSection, elf_file: *Elf, writer: *std.Io.Writer) !void {
364 const comp = elf_file.base.comp;
365 const is_dyn_lib = elf_file.isEffectivelyDynLib();
366 const apply_relocs = true; // TODO add user option for this
367
368 for (got.entries.items) |entry| {
369 const symbol = elf_file.symbol(entry.ref);
370 switch (entry.tag) {
371 .got => {
372 const value = blk: {
373 const value = symbol.?.address(.{ .plt = false }, elf_file);
374 if (symbol.?.flags.import) break :blk 0;
375 if (symbol.?.isIFunc(elf_file))
376 break :blk if (apply_relocs) value else 0;
377 if ((elf_file.isEffectivelyDynLib() or (elf_file.base.isExe() and comp.config.pie)) and
378 !symbol.?.isAbs(elf_file))
379 {
380 break :blk if (apply_relocs) value else 0;
381 }
382 break :blk value;
383 };
384 try writeInt(value, elf_file, writer);
385 },
386 .tlsld => {
387 try writeInt(if (is_dyn_lib) @as(i64, 0) else 1, elf_file, writer);
388 try writeInt(0, elf_file, writer);
389 },
390 .tlsgd => {
391 if (symbol.?.flags.import) {
392 try writeInt(0, elf_file, writer);
393 try writeInt(0, elf_file, writer);
394 } else {
395 try writeInt(if (is_dyn_lib) @as(i64, 0) else 1, elf_file, writer);
396 const offset = symbol.?.address(.{}, elf_file) - elf_file.dtpAddress();
397 try writeInt(offset, elf_file, writer);
398 }
399 },
400 .gottp => {
401 if (symbol.?.flags.import) {
402 try writeInt(0, elf_file, writer);
403 } else if (is_dyn_lib) {
404 const offset = if (apply_relocs)
405 symbol.?.address(.{}, elf_file) - elf_file.tlsAddress()
406 else
407 0;
408 try writeInt(offset, elf_file, writer);
409 } else {
410 const offset = symbol.?.address(.{}, elf_file) - elf_file.tpAddress();
411 try writeInt(offset, elf_file, writer);
412 }
413 },
414 .tlsdesc => {
415 try writeInt(0, elf_file, writer);
416 const offset: i64 = if (apply_relocs and !symbol.?.flags.import)
417 symbol.?.address(.{}, elf_file) - elf_file.tlsAddress()
418 else
419 0;
420 try writeInt(offset, elf_file, writer);
421 },
422 }
423 }
424 }
425
426 pub fn addRela(got: GotSection, elf_file: *Elf) !void {
427 const comp = elf_file.base.comp;
428 const gpa = comp.gpa;
429 const is_dyn_lib = elf_file.isEffectivelyDynLib();
430 const cpu_arch = elf_file.getTarget().cpu.arch;
431 try elf_file.rela_dyn.ensureUnusedCapacity(gpa, got.numRela(elf_file));
432
433 relocs_log.debug(".got", .{});
434
435 for (got.entries.items) |entry| {
436 const symbol = elf_file.symbol(entry.ref);
437 const extra = if (symbol) |s| s.extra(elf_file) else null;
438
439 switch (entry.tag) {
440 .got => {
441 const offset: u64 = @intCast(symbol.?.gotAddress(elf_file));
442 if (symbol.?.flags.import) {
443 elf_file.addRelaDynAssumeCapacity(.{
444 .offset = offset,
445 .sym = extra.?.dynamic,
446 .type = relocation.encode(.glob_dat, cpu_arch),
447 .target = symbol,
448 });
449 continue;
450 }
451 if (symbol.?.isIFunc(elf_file)) {
452 elf_file.addRelaDynAssumeCapacity(.{
453 .offset = offset,
454 .type = relocation.encode(.irel, cpu_arch),
455 .addend = symbol.?.address(.{ .plt = false }, elf_file),
456 .target = symbol,
457 });
458 continue;
459 }
460 if ((elf_file.isEffectivelyDynLib() or (elf_file.base.isExe() and comp.config.pie)) and
461 !symbol.?.isAbs(elf_file))
462 {
463 elf_file.addRelaDynAssumeCapacity(.{
464 .offset = offset,
465 .type = relocation.encode(.rel, cpu_arch),
466 .addend = symbol.?.address(.{ .plt = false }, elf_file),
467 .target = symbol,
468 });
469 }
470 },
471
472 .tlsld => {
473 if (is_dyn_lib) {
474 const offset: u64 = @intCast(entry.address(elf_file));
475 elf_file.addRelaDynAssumeCapacity(.{
476 .offset = offset,
477 .type = relocation.encode(.dtpmod, cpu_arch),
478 });
479 }
480 },
481
482 .tlsgd => {
483 const offset: u64 = @intCast(symbol.?.tlsGdAddress(elf_file));
484 if (symbol.?.flags.import) {
485 elf_file.addRelaDynAssumeCapacity(.{
486 .offset = offset,
487 .sym = extra.?.dynamic,
488 .type = relocation.encode(.dtpmod, cpu_arch),
489 .target = symbol,
490 });
491 elf_file.addRelaDynAssumeCapacity(.{
492 .offset = offset + 8,
493 .sym = extra.?.dynamic,
494 .type = relocation.encode(.dtpoff, cpu_arch),
495 .target = symbol,
496 });
497 } else if (is_dyn_lib) {
498 elf_file.addRelaDynAssumeCapacity(.{
499 .offset = offset,
500 .sym = extra.?.dynamic,
501 .type = relocation.encode(.dtpmod, cpu_arch),
502 .target = symbol,
503 });
504 }
505 },
506
507 .gottp => {
508 const offset: u64 = @intCast(symbol.?.gotTpAddress(elf_file));
509 if (symbol.?.flags.import) {
510 elf_file.addRelaDynAssumeCapacity(.{
511 .offset = offset,
512 .sym = extra.?.dynamic,
513 .type = relocation.encode(.tpoff, cpu_arch),
514 .target = symbol,
515 });
516 } else if (is_dyn_lib) {
517 elf_file.addRelaDynAssumeCapacity(.{
518 .offset = offset,
519 .type = relocation.encode(.tpoff, cpu_arch),
520 .addend = symbol.?.address(.{}, elf_file) - elf_file.tlsAddress(),
521 .target = symbol,
522 });
523 }
524 },
525
526 .tlsdesc => {
527 const offset: u64 = @intCast(symbol.?.tlsDescAddress(elf_file));
528 elf_file.addRelaDynAssumeCapacity(.{
529 .offset = offset,
530 .sym = if (symbol.?.flags.import) extra.?.dynamic else 0,
531 .type = relocation.encode(.tlsdesc, cpu_arch),
532 .addend = if (symbol.?.flags.import) 0 else symbol.?.address(.{}, elf_file) - elf_file.tlsAddress(),
533 .target = symbol,
534 });
535 },
536 }
537 }
538 }
539
540 pub fn numRela(got: GotSection, elf_file: *Elf) usize {
541 const comp = elf_file.base.comp;
542 const is_dyn_lib = elf_file.isEffectivelyDynLib();
543 var num: usize = 0;
544 for (got.entries.items) |entry| {
545 const symbol = elf_file.symbol(entry.ref);
546 switch (entry.tag) {
547 .got => if (symbol.?.flags.import or symbol.?.isIFunc(elf_file) or
548 ((elf_file.isEffectivelyDynLib() or (elf_file.base.isExe() and comp.config.pie)) and
549 !symbol.?.isAbs(elf_file)))
550 {
551 num += 1;
552 },
553
554 .tlsld => if (is_dyn_lib) {
555 num += 1;
556 },
557
558 .tlsgd => if (symbol.?.flags.import) {
559 num += 2;
560 } else if (is_dyn_lib) {
561 num += 1;
562 },
563
564 .gottp => if (symbol.?.flags.import or is_dyn_lib) {
565 num += 1;
566 },
567
568 .tlsdesc => num += 1,
569 }
570 }
571 return num;
572 }
573
574 pub fn updateSymtabSize(got: *GotSection, elf_file: *Elf) void {
575 got.output_symtab_ctx.nlocals = @as(u32, @intCast(got.entries.items.len));
576 for (got.entries.items) |entry| {
577 const symbol_name = if (elf_file.symbol(entry.ref)) |sym| sym.name(elf_file) else "";
578 got.output_symtab_ctx.strsize += @as(u32, @intCast(symbol_name.len + @tagName(entry.tag).len)) + 1 + 1;
579 }
580 }
581
582 pub fn writeSymtab(got: GotSection, elf_file: *Elf) void {
583 for (got.entries.items, got.output_symtab_ctx.ilocal..) |entry, ilocal| {
584 const symbol = elf_file.symbol(entry.ref);
585 const symbol_name = if (symbol) |s| s.name(elf_file) else "";
586 const st_name = @as(u32, @intCast(elf_file.strtab.items.len));
587 elf_file.strtab.appendSliceAssumeCapacity(symbol_name);
588 elf_file.strtab.appendAssumeCapacity('$');
589 elf_file.strtab.appendSliceAssumeCapacity(@tagName(entry.tag));
590 elf_file.strtab.appendAssumeCapacity(0);
591 const st_value = entry.address(elf_file);
592 const st_size: u64 = entry.len() * elf_file.archPtrWidthBytes();
593 elf_file.symtab.items[ilocal] = .{
594 .st_name = st_name,
595 .st_info = elf.STT_OBJECT,
596 .st_other = 0,
597 .st_shndx = @intCast(elf_file.section_indexes.got.?),
598 .st_value = @intCast(st_value),
599 .st_size = st_size,
600 };
601 }
602 }
603
604 const Format = struct {
605 got: GotSection,
606 elf_file: *Elf,
607
608 pub fn default(f: Format, writer: *std.Io.Writer) std.Io.Writer.Error!void {
609 const got = f.got;
610 const elf_file = f.elf_file;
611 try writer.writeAll("GOT\n");
612 for (got.entries.items) |entry| {
613 const symbol = elf_file.symbol(entry.ref).?;
614 try writer.print(" {d}@0x{x} => {f}@0x{x} ({s})\n", .{
615 entry.cell_index,
616 entry.address(elf_file),
617 entry.ref,
618 symbol.address(.{}, elf_file),
619 symbol.name(elf_file),
620 });
621 }
622 }
623 };
624
625 pub fn fmt(got: GotSection, elf_file: *Elf) std.fmt.Alt(Format, Format.default) {
626 return .{ .data = .{ .got = got, .elf_file = elf_file } };
627 }
628};
629
630pub const PltSection = struct {
631 symbols: std.ArrayList(Elf.Ref) = .empty,
632 output_symtab_ctx: Elf.SymtabCtx = .{},
633
634 pub fn deinit(plt: *PltSection, allocator: Allocator) void {
635 plt.symbols.deinit(allocator);
636 }
637
638 pub fn addSymbol(plt: *PltSection, ref: Elf.Ref, elf_file: *Elf) !void {
639 const comp = elf_file.base.comp;
640 const gpa = comp.gpa;
641 const index = @as(u32, @intCast(plt.symbols.items.len));
642 const symbol = elf_file.symbol(ref).?;
643 symbol.flags.has_plt = true;
644 symbol.addExtra(.{ .plt = index }, elf_file);
645 try plt.symbols.append(gpa, ref);
646 }
647
648 pub fn size(plt: PltSection, elf_file: *Elf) usize {
649 const cpu_arch = elf_file.getTarget().cpu.arch;
650 return preambleSize(cpu_arch) + plt.symbols.items.len * entrySize(cpu_arch);
651 }
652
653 pub fn preambleSize(cpu_arch: std.Target.Cpu.Arch) usize {
654 return switch (cpu_arch) {
655 .x86_64 => 32,
656 .aarch64 => 8 * @sizeOf(u32),
657 else => @panic("TODO implement preambleSize for this cpu arch"),
658 };
659 }
660
661 pub fn entrySize(cpu_arch: std.Target.Cpu.Arch) usize {
662 return switch (cpu_arch) {
663 .x86_64 => 16,
664 .aarch64 => 4 * @sizeOf(u32),
665 else => @panic("TODO implement entrySize for this cpu arch"),
666 };
667 }
668
669 pub fn write(plt: PltSection, elf_file: *Elf, writer: *std.Io.Writer) !void {
670 const cpu_arch = elf_file.getTarget().cpu.arch;
671 switch (cpu_arch) {
672 .x86_64 => try x86_64.write(plt, elf_file, writer),
673 .aarch64 => try aarch64.write(plt, elf_file, writer),
674 else => return error.UnsupportedCpuArch,
675 }
676 }
677
678 pub fn addRela(plt: PltSection, elf_file: *Elf) !void {
679 const comp = elf_file.base.comp;
680 const gpa = comp.gpa;
681 const cpu_arch = elf_file.getTarget().cpu.arch;
682 try elf_file.rela_plt.ensureUnusedCapacity(gpa, plt.numRela());
683
684 relocs_log.debug(".plt", .{});
685
686 for (plt.symbols.items) |ref| {
687 const sym = elf_file.symbol(ref).?;
688 assert(sym.flags.import);
689 const extra = sym.extra(elf_file);
690 const r_offset: u64 = @intCast(sym.gotPltAddress(elf_file));
691 const r_sym: u64 = extra.dynamic;
692 const r_type = relocation.encode(.jump_slot, cpu_arch);
693
694 relocs_log.debug(" {f}: [{x} => {d}({s})] + 0", .{
695 relocation.fmtRelocType(r_type, cpu_arch),
696 r_offset,
697 r_sym,
698 sym.name(elf_file),
699 });
700
701 elf_file.rela_plt.appendAssumeCapacity(.{
702 .r_offset = r_offset,
703 .r_info = (r_sym << 32) | r_type,
704 .r_addend = 0,
705 });
706 }
707 }
708
709 pub fn numRela(plt: PltSection) usize {
710 return plt.symbols.items.len;
711 }
712
713 pub fn updateSymtabSize(plt: *PltSection, elf_file: *Elf) void {
714 plt.output_symtab_ctx.nlocals = @as(u32, @intCast(plt.symbols.items.len));
715 for (plt.symbols.items) |ref| {
716 const name = elf_file.symbol(ref).?.name(elf_file);
717 plt.output_symtab_ctx.strsize += @as(u32, @intCast(name.len + "$plt".len)) + 1;
718 }
719 }
720
721 pub fn writeSymtab(plt: PltSection, elf_file: *Elf) void {
722 const cpu_arch = elf_file.getTarget().cpu.arch;
723 for (plt.symbols.items, plt.output_symtab_ctx.ilocal..) |ref, ilocal| {
724 const sym = elf_file.symbol(ref).?;
725 const st_name = @as(u32, @intCast(elf_file.strtab.items.len));
726 elf_file.strtab.appendSliceAssumeCapacity(sym.name(elf_file));
727 elf_file.strtab.appendSliceAssumeCapacity("$plt");
728 elf_file.strtab.appendAssumeCapacity(0);
729 elf_file.symtab.items[ilocal] = .{
730 .st_name = st_name,
731 .st_info = elf.STT_FUNC,
732 .st_other = 0,
733 .st_shndx = @intCast(elf_file.section_indexes.plt.?),
734 .st_value = @intCast(sym.pltAddress(elf_file)),
735 .st_size = entrySize(cpu_arch),
736 };
737 }
738 }
739
740 const Format = struct {
741 plt: PltSection,
742 elf_file: *Elf,
743
744 pub fn default(f: Format, writer: *std.Io.Writer) std.Io.Writer.Error!void {
745 const plt = f.plt;
746 const elf_file = f.elf_file;
747 try writer.writeAll("PLT\n");
748 for (plt.symbols.items, 0..) |ref, i| {
749 const symbol = elf_file.symbol(ref).?;
750 try writer.print(" {d}@0x{x} => {f}@0x{x} ({s})\n", .{
751 i,
752 symbol.pltAddress(elf_file),
753 ref,
754 symbol.address(.{}, elf_file),
755 symbol.name(elf_file),
756 });
757 }
758 }
759 };
760
761 pub fn fmt(plt: PltSection, elf_file: *Elf) std.fmt.Alt(Format, Format.default) {
762 return .{ .data = .{ .plt = plt, .elf_file = elf_file } };
763 }
764
765 const x86_64 = struct {
766 fn write(plt: PltSection, elf_file: *Elf, writer: *std.Io.Writer) !void {
767 const shdrs = elf_file.sections.items(.shdr);
768 const plt_addr = shdrs[elf_file.section_indexes.plt.?].sh_addr;
769 const got_plt_addr = shdrs[elf_file.section_indexes.got_plt.?].sh_addr;
770 var preamble = [_]u8{
771 0xf3, 0x0f, 0x1e, 0xfa, // endbr64
772 0x41, 0x53, // push r11
773 0xff, 0x35, 0x00, 0x00, 0x00, 0x00, // push qword ptr [rip] -> .got.plt[1]
774 0xff, 0x25, 0x00, 0x00, 0x00, 0x00, // jmp qword ptr [rip] -> .got.plt[2]
775 };
776 var disp = @as(i64, @intCast(got_plt_addr + 8)) - @as(i64, @intCast(plt_addr + 8)) - 4;
777 mem.writeInt(i32, preamble[8..][0..4], @as(i32, @intCast(disp)), .little);
778 disp = @as(i64, @intCast(got_plt_addr + 16)) - @as(i64, @intCast(plt_addr + 14)) - 4;
779 mem.writeInt(i32, preamble[14..][0..4], @as(i32, @intCast(disp)), .little);
780 try writer.writeAll(&preamble);
781 try writer.splatByteAll(0xcc, preambleSize(.x86_64) - preamble.len);
782
783 for (plt.symbols.items, 0..) |ref, i| {
784 const sym = elf_file.symbol(ref).?;
785 const target_addr = sym.gotPltAddress(elf_file);
786 const source_addr = sym.pltAddress(elf_file);
787 disp = @as(i64, @intCast(target_addr)) - @as(i64, @intCast(source_addr + 12)) - 4;
788 var entry = [_]u8{
789 0xf3, 0x0f, 0x1e, 0xfa, // endbr64
790 0x41, 0xbb, 0x00, 0x00, 0x00, 0x00, // mov r11d, N
791 0xff, 0x25, 0x00, 0x00, 0x00, 0x00, // jmp qword ptr [rip] -> .got.plt[N]
792 };
793 mem.writeInt(i32, entry[6..][0..4], @as(i32, @intCast(i)), .little);
794 mem.writeInt(i32, entry[12..][0..4], @as(i32, @intCast(disp)), .little);
795 try writer.writeAll(&entry);
796 }
797 }
798 };
799
800 const aarch64 = struct {
801 fn write(plt: PltSection, elf_file: *Elf, writer: *std.Io.Writer) !void {
802 {
803 const shdrs = elf_file.sections.items(.shdr);
804 const plt_addr: i64 = @intCast(shdrs[elf_file.section_indexes.plt.?].sh_addr);
805 const got_plt_addr: i64 = @intCast(shdrs[elf_file.section_indexes.got_plt.?].sh_addr);
806 // TODO: relax if possible
807 // .got.plt[2]
808 const pages = try util.calcNumberOfPages(plt_addr + 4, got_plt_addr + 16);
809 const ldr_off: u12 = @truncate(@as(u64, @bitCast(got_plt_addr + 16)));
810 const add_off: u12 = @truncate(@as(u64, @bitCast(got_plt_addr + 16)));
811
812 const preamble = [_]util.encoding.Instruction{
813 .stp(.x16, .x30, .{ .pre_index = .{ .base = .sp, .index = -16 } }),
814 .adrp(.x16, pages << 12),
815 .ldr(.x17, .{ .unsigned_offset = .{ .base = .x16, .offset = ldr_off } }),
816 .add(.x16, .x16, .{ .immediate = add_off }),
817 .br(.x17),
818 .nop(),
819 .nop(),
820 .nop(),
821 };
822 comptime assert(preamble.len == 8);
823 for (preamble) |inst| try writer.writeInt(util.encoding.Instruction.Backing, @bitCast(inst), .little);
824 }
825
826 for (plt.symbols.items) |ref| {
827 const sym = elf_file.symbol(ref).?;
828 const target_addr = sym.gotPltAddress(elf_file);
829 const source_addr = sym.pltAddress(elf_file);
830 const pages = try util.calcNumberOfPages(source_addr, target_addr);
831 const ldr_off: u12 = @truncate(@as(u64, @bitCast(target_addr)));
832 const add_off: u12 = @truncate(@as(u64, @bitCast(target_addr)));
833 const insts = [_]util.encoding.Instruction{
834 .adrp(.x16, pages << 12),
835 .ldr(.x17, .{ .unsigned_offset = .{ .base = .x16, .offset = ldr_off } }),
836 .add(.x16, .x16, .{ .immediate = add_off }),
837 .br(.x17),
838 };
839 comptime assert(insts.len == 4);
840 for (insts) |inst| try writer.writeInt(util.encoding.Instruction.Backing, @bitCast(inst), .little);
841 }
842 }
843
844 const util = @import("../aarch64.zig");
845 };
846};
847
848pub const GotPltSection = struct {
849 pub const preamble_size = 24;
850
851 pub fn size(got_plt: GotPltSection, elf_file: *Elf) usize {
852 _ = got_plt;
853 return preamble_size + elf_file.plt.symbols.items.len * 8;
854 }
855
856 pub fn write(got_plt: GotPltSection, elf_file: *Elf, writer: *std.Io.Writer) !void {
857 _ = got_plt;
858 {
859 // [0]: _DYNAMIC
860 const symbol = elf_file.linkerDefinedPtr().?.dynamicSymbol(elf_file).?;
861 try writer.writeInt(u64, @intCast(symbol.address(.{}, elf_file)), .little);
862 }
863 // [1]: 0x0
864 // [2]: 0x0
865 try writer.writeInt(u64, 0x0, .little);
866 try writer.writeInt(u64, 0x0, .little);
867 if (elf_file.section_indexes.plt) |shndx| {
868 const plt_addr = elf_file.sections.items(.shdr)[shndx].sh_addr;
869 for (0..elf_file.plt.symbols.items.len) |_| {
870 // [N]: .plt
871 try writer.writeInt(u64, plt_addr, .little);
872 }
873 }
874 }
875};
876
877pub const PltGotSection = struct {
878 symbols: std.ArrayList(Elf.Ref) = .empty,
879 output_symtab_ctx: Elf.SymtabCtx = .{},
880
881 pub fn deinit(plt_got: *PltGotSection, allocator: Allocator) void {
882 plt_got.symbols.deinit(allocator);
883 }
884
885 pub fn addSymbol(plt_got: *PltGotSection, ref: Elf.Ref, elf_file: *Elf) !void {
886 const comp = elf_file.base.comp;
887 const gpa = comp.gpa;
888 const index = @as(u32, @intCast(plt_got.symbols.items.len));
889 const symbol = elf_file.symbol(ref).?;
890 symbol.flags.has_pltgot = true;
891 symbol.addExtra(.{ .plt_got = index }, elf_file);
892 try plt_got.symbols.append(gpa, ref);
893 }
894
895 pub fn size(plt_got: PltGotSection, elf_file: *Elf) usize {
896 return plt_got.symbols.items.len * entrySize(elf_file.getTarget().cpu.arch);
897 }
898
899 pub fn entrySize(cpu_arch: std.Target.Cpu.Arch) usize {
900 return switch (cpu_arch) {
901 .x86_64 => 16,
902 .aarch64 => 4 * @sizeOf(u32),
903 else => @panic("TODO implement PltGotSection.entrySize for this arch"),
904 };
905 }
906
907 pub fn write(plt_got: PltGotSection, elf_file: *Elf, writer: *std.Io.Writer) !void {
908 const cpu_arch = elf_file.getTarget().cpu.arch;
909 switch (cpu_arch) {
910 .x86_64 => try x86_64.write(plt_got, elf_file, writer),
911 .aarch64 => try aarch64.write(plt_got, elf_file, writer),
912 else => return error.UnsupportedCpuArch,
913 }
914 }
915
916 pub fn updateSymtabSize(plt_got: *PltGotSection, elf_file: *Elf) void {
917 plt_got.output_symtab_ctx.nlocals = @as(u32, @intCast(plt_got.symbols.items.len));
918 for (plt_got.symbols.items) |ref| {
919 const name = elf_file.symbol(ref).?.name(elf_file);
920 plt_got.output_symtab_ctx.strsize += @as(u32, @intCast(name.len + "$pltgot".len)) + 1;
921 }
922 }
923
924 pub fn writeSymtab(plt_got: PltGotSection, elf_file: *Elf) void {
925 for (plt_got.symbols.items, plt_got.output_symtab_ctx.ilocal..) |ref, ilocal| {
926 const sym = elf_file.symbol(ref).?;
927 const st_name = @as(u32, @intCast(elf_file.strtab.items.len));
928 elf_file.strtab.appendSliceAssumeCapacity(sym.name(elf_file));
929 elf_file.strtab.appendSliceAssumeCapacity("$pltgot");
930 elf_file.strtab.appendAssumeCapacity(0);
931 elf_file.symtab.items[ilocal] = .{
932 .st_name = st_name,
933 .st_info = elf.STT_FUNC,
934 .st_other = 0,
935 .st_shndx = @intCast(elf_file.section_indexes.plt_got.?),
936 .st_value = @intCast(sym.pltGotAddress(elf_file)),
937 .st_size = 16,
938 };
939 }
940 }
941
942 const x86_64 = struct {
943 pub fn write(plt_got: PltGotSection, elf_file: *Elf, writer: *std.Io.Writer) !void {
944 for (plt_got.symbols.items) |ref| {
945 const sym = elf_file.symbol(ref).?;
946 const target_addr = sym.gotAddress(elf_file);
947 const source_addr = sym.pltGotAddress(elf_file);
948 const disp = @as(i64, @intCast(target_addr)) - @as(i64, @intCast(source_addr + 6)) - 4;
949 var entry = [_]u8{
950 0xf3, 0x0f, 0x1e, 0xfa, // endbr64
951 0xff, 0x25, 0x00, 0x00, 0x00, 0x00, // jmp qword ptr [rip] -> .got[N]
952 0xcc, 0xcc, 0xcc, 0xcc, 0xcc, 0xcc,
953 };
954 mem.writeInt(i32, entry[6..][0..4], @as(i32, @intCast(disp)), .little);
955 try writer.writeAll(&entry);
956 }
957 }
958 };
959
960 const aarch64 = struct {
961 fn write(plt_got: PltGotSection, elf_file: *Elf, writer: *std.Io.Writer) !void {
962 for (plt_got.symbols.items) |ref| {
963 const sym = elf_file.symbol(ref).?;
964 const target_addr = sym.gotAddress(elf_file);
965 const source_addr = sym.pltGotAddress(elf_file);
966 const pages = try util.calcNumberOfPages(source_addr, target_addr);
967 const off: u12 = @truncate(@as(u64, @bitCast(target_addr)));
968 const insts = [_]util.encoding.Instruction{
969 .adrp(.x16, pages << 12),
970 .ldr(.x17, .{ .unsigned_offset = .{ .base = .x16, .offset = off } }),
971 .br(.x17),
972 .nop(),
973 };
974 comptime assert(insts.len == 4);
975 for (insts) |inst| try writer.writeInt(util.encoding.Instruction.Backing, @bitCast(inst), .little);
976 }
977 }
978
979 const util = @import("../aarch64.zig");
980 };
981};
982
983pub const CopyRelSection = struct {
984 symbols: std.ArrayList(Elf.Ref) = .empty,
985
986 pub fn deinit(copy_rel: *CopyRelSection, allocator: Allocator) void {
987 copy_rel.symbols.deinit(allocator);
988 }
989
990 pub fn addSymbol(copy_rel: *CopyRelSection, ref: Elf.Ref, elf_file: *Elf) !void {
991 const comp = elf_file.base.comp;
992 const gpa = comp.gpa;
993 const index = @as(u32, @intCast(copy_rel.symbols.items.len));
994 const symbol = elf_file.symbol(ref).?;
995 symbol.flags.import = true;
996 symbol.flags.@"export" = true;
997 symbol.flags.has_copy_rel = true;
998 symbol.flags.weak = false;
999 symbol.addExtra(.{ .copy_rel = index }, elf_file);
1000 try copy_rel.symbols.append(gpa, ref);
1001
1002 const shared_object = symbol.file(elf_file).?.shared_object;
1003 if (shared_object.aliases == null) {
1004 try shared_object.initSymbolAliases(elf_file);
1005 }
1006
1007 const aliases = shared_object.symbolAliases(ref.index, elf_file);
1008 for (aliases) |alias| {
1009 if (alias == ref.index) continue;
1010 const alias_sym = &shared_object.symbols.items[alias];
1011 alias_sym.flags.import = true;
1012 alias_sym.flags.@"export" = true;
1013 alias_sym.flags.has_copy_rel = true;
1014 alias_sym.flags.needs_copy_rel = true;
1015 alias_sym.flags.weak = false;
1016 try elf_file.dynsym.addSymbol(.{ .index = alias, .file = shared_object.index }, elf_file);
1017 }
1018 }
1019
1020 pub fn updateSectionSize(copy_rel: CopyRelSection, shndx: u32, elf_file: *Elf) !void {
1021 const shdr = &elf_file.sections.items(.shdr)[shndx];
1022 for (copy_rel.symbols.items) |ref| {
1023 const symbol = elf_file.symbol(ref).?;
1024 const shared_object = symbol.file(elf_file).?.shared_object;
1025 const alignment = try symbol.dsoAlignment(elf_file);
1026 symbol.value = @intCast(mem.alignForward(u64, shdr.sh_size, alignment));
1027 shdr.sh_addralign = @max(shdr.sh_addralign, alignment);
1028 shdr.sh_size = @as(u64, @intCast(symbol.value)) + symbol.elfSym(elf_file).st_size;
1029
1030 const aliases = shared_object.symbolAliases(ref.index, elf_file);
1031 for (aliases) |alias| {
1032 if (alias == ref.index) continue;
1033 const alias_sym = &shared_object.symbols.items[alias];
1034 alias_sym.value = symbol.value;
1035 }
1036 }
1037 }
1038
1039 pub fn addRela(copy_rel: CopyRelSection, elf_file: *Elf) !void {
1040 const comp = elf_file.base.comp;
1041 const gpa = comp.gpa;
1042 const cpu_arch = elf_file.getTarget().cpu.arch;
1043 try elf_file.rela_dyn.ensureUnusedCapacity(gpa, copy_rel.numRela());
1044
1045 relocs_log.debug(".copy.rel", .{});
1046
1047 for (copy_rel.symbols.items) |ref| {
1048 const sym = elf_file.symbol(ref).?;
1049 assert(sym.flags.import and sym.flags.has_copy_rel);
1050 const extra = sym.extra(elf_file);
1051 elf_file.addRelaDynAssumeCapacity(.{
1052 .offset = @intCast(sym.address(.{}, elf_file)),
1053 .sym = extra.dynamic,
1054 .type = relocation.encode(.copy, cpu_arch),
1055 });
1056 }
1057 }
1058
1059 pub fn numRela(copy_rel: CopyRelSection) usize {
1060 return copy_rel.symbols.items.len;
1061 }
1062};
1063
1064pub const DynsymSection = struct {
1065 entries: std.ArrayList(Entry) = .empty,
1066
1067 pub const Entry = struct {
1068 /// Ref of the symbol which gets privilege of getting a dynamic treatment
1069 ref: Elf.Ref,
1070 /// Offset into .dynstrtab
1071 off: u32,
1072 };
1073
1074 pub fn deinit(dynsym: *DynsymSection, allocator: Allocator) void {
1075 dynsym.entries.deinit(allocator);
1076 }
1077
1078 pub fn addSymbol(dynsym: *DynsymSection, ref: Elf.Ref, elf_file: *Elf) !void {
1079 const comp = elf_file.base.comp;
1080 const gpa = comp.gpa;
1081 const index = @as(u32, @intCast(dynsym.entries.items.len + 1));
1082 const sym = elf_file.symbol(ref).?;
1083 sym.flags.has_dynamic = true;
1084 sym.addExtra(.{ .dynamic = index }, elf_file);
1085 const off = try elf_file.insertDynString(sym.name(elf_file));
1086 try dynsym.entries.append(gpa, .{ .ref = ref, .off = off });
1087 }
1088
1089 pub fn sort(dynsym: *DynsymSection, elf_file: *Elf) void {
1090 const Sort = struct {
1091 pub fn lessThan(ctx: *Elf, lhs: Entry, rhs: Entry) bool {
1092 const lhs_sym = ctx.symbol(lhs.ref).?;
1093 const rhs_sym = ctx.symbol(rhs.ref).?;
1094
1095 if (lhs_sym.flags.@"export" != rhs_sym.flags.@"export") {
1096 return rhs_sym.flags.@"export";
1097 }
1098
1099 // TODO cache hash values
1100 const nbuckets = ctx.gnu_hash.num_buckets;
1101 const lhs_hash = GnuHashSection.hasher(lhs_sym.name(ctx)) % nbuckets;
1102 const rhs_hash = GnuHashSection.hasher(rhs_sym.name(ctx)) % nbuckets;
1103
1104 if (lhs_hash == rhs_hash)
1105 return lhs_sym.extra(ctx).dynamic < rhs_sym.extra(ctx).dynamic;
1106 return lhs_hash < rhs_hash;
1107 }
1108 };
1109
1110 var num_exports: u32 = 0;
1111 for (dynsym.entries.items) |entry| {
1112 const sym = elf_file.symbol(entry.ref).?;
1113 if (sym.flags.@"export") num_exports += 1;
1114 }
1115
1116 elf_file.gnu_hash.num_buckets = @divTrunc(num_exports, GnuHashSection.load_factor) + 1;
1117
1118 std.mem.sort(Entry, dynsym.entries.items, elf_file, Sort.lessThan);
1119
1120 for (dynsym.entries.items, 1..) |entry, index| {
1121 const sym = elf_file.symbol(entry.ref).?;
1122 var extra = sym.extra(elf_file);
1123 extra.dynamic = @as(u32, @intCast(index));
1124 sym.setExtra(extra, elf_file);
1125 }
1126 }
1127
1128 pub fn size(dynsym: DynsymSection) usize {
1129 return dynsym.count() * @sizeOf(elf.Elf64_Sym);
1130 }
1131
1132 pub fn count(dynsym: DynsymSection) u32 {
1133 return @as(u32, @intCast(dynsym.entries.items.len + 1));
1134 }
1135
1136 pub fn write(dynsym: DynsymSection, elf_file: *Elf, writer: *std.Io.Writer) !void {
1137 try writer.writeStruct(Elf.null_sym, .little);
1138 for (dynsym.entries.items) |entry| {
1139 const sym = elf_file.symbol(entry.ref).?;
1140 var out_sym: elf.Elf64_Sym = Elf.null_sym;
1141 sym.setOutputSym(elf_file, &out_sym);
1142 out_sym.st_name = entry.off;
1143 try writer.writeStruct(out_sym, .little);
1144 }
1145 }
1146};
1147
1148pub const HashSection = struct {
1149 buffer: std.ArrayList(u8) = .empty,
1150
1151 pub fn deinit(hs: *HashSection, allocator: Allocator) void {
1152 hs.buffer.deinit(allocator);
1153 }
1154
1155 pub fn generate(hs: *HashSection, elf_file: *Elf) !void {
1156 if (elf_file.dynsym.count() == 1) return;
1157
1158 const comp = elf_file.base.comp;
1159 const gpa = comp.gpa;
1160 const nsyms = elf_file.dynsym.count();
1161
1162 var buckets = try gpa.alloc(u32, nsyms);
1163 defer gpa.free(buckets);
1164 @memset(buckets, 0);
1165
1166 var chains = try gpa.alloc(u32, nsyms);
1167 defer gpa.free(chains);
1168 @memset(chains, 0);
1169
1170 for (elf_file.dynsym.entries.items, 1..) |entry, i| {
1171 const name = elf_file.getDynString(entry.off);
1172 const hash = hasher(name) % buckets.len;
1173 chains[@as(u32, @intCast(i))] = buckets[hash];
1174 buckets[hash] = @as(u32, @intCast(i));
1175 }
1176
1177 try hs.buffer.ensureTotalCapacityPrecise(gpa, (2 + nsyms * 2) * 4);
1178 var w: std.Io.Writer = .fixed(hs.buffer.unusedCapacitySlice());
1179 w.writeInt(u32, @as(u32, @intCast(nsyms)), .little) catch unreachable;
1180 w.writeInt(u32, @as(u32, @intCast(nsyms)), .little) catch unreachable;
1181 w.writeAll(@ptrCast(buckets)) catch unreachable;
1182 w.writeAll(@ptrCast(chains)) catch unreachable;
1183 hs.buffer.items.len += w.end;
1184 }
1185
1186 pub inline fn size(hs: HashSection) usize {
1187 return hs.buffer.items.len;
1188 }
1189
1190 pub fn hasher(name: [:0]const u8) u32 {
1191 var h: u32 = 0;
1192 var g: u32 = 0;
1193 for (name) |c| {
1194 h = (h << 4) + c;
1195 g = h & 0xf0000000;
1196 if (g > 0) h ^= g >> 24;
1197 h &= ~g;
1198 }
1199 return h;
1200 }
1201};
1202
1203pub const GnuHashSection = struct {
1204 num_buckets: u32 = 0,
1205 num_bloom: u32 = 1,
1206 num_exports: u32 = 0,
1207
1208 pub const load_factor = 8;
1209 pub const header_size = 16;
1210 pub const bloom_shift = 26;
1211
1212 fn getExports(elf_file: *Elf) []const DynsymSection.Entry {
1213 const start = for (elf_file.dynsym.entries.items, 0..) |entry, i| {
1214 const sym = elf_file.symbol(entry.ref).?;
1215 if (sym.flags.@"export") break i;
1216 } else elf_file.dynsym.entries.items.len;
1217 return elf_file.dynsym.entries.items[start..];
1218 }
1219
1220 inline fn bitCeil(x: u64) u64 {
1221 if (@popCount(x) == 1) return x;
1222 return @as(u64, @intCast(@as(u128, 1) << (64 - @clz(x))));
1223 }
1224
1225 pub fn calcSize(hash: *GnuHashSection, elf_file: *Elf) !void {
1226 hash.num_exports = @as(u32, @intCast(getExports(elf_file).len));
1227 if (hash.num_exports > 0) {
1228 const num_bits = hash.num_exports * 12;
1229 hash.num_bloom = @as(u32, @intCast(bitCeil(@divTrunc(num_bits, 64))));
1230 }
1231 }
1232
1233 pub fn size(hash: GnuHashSection) usize {
1234 return header_size + hash.num_bloom * 8 + hash.num_buckets * 4 + hash.num_exports * 4;
1235 }
1236
1237 pub fn write(hash: GnuHashSection, elf_file: *Elf, writer: *std.Io.Writer) !void {
1238 const exports = getExports(elf_file);
1239 const export_off = elf_file.dynsym.count() - hash.num_exports;
1240
1241 try writer.writeInt(u32, hash.num_buckets, .little);
1242 try writer.writeInt(u32, export_off, .little);
1243 try writer.writeInt(u32, hash.num_bloom, .little);
1244 try writer.writeInt(u32, bloom_shift, .little);
1245
1246 const comp = elf_file.base.comp;
1247 const gpa = comp.gpa;
1248 const hashes = try gpa.alloc(u32, exports.len);
1249 defer gpa.free(hashes);
1250 const indices = try gpa.alloc(u32, exports.len);
1251 defer gpa.free(indices);
1252
1253 // Compose and write the bloom filter
1254 const bloom = try gpa.alloc(u64, hash.num_bloom);
1255 defer gpa.free(bloom);
1256 @memset(bloom, 0);
1257
1258 for (exports, 0..) |entry, i| {
1259 const sym = elf_file.symbol(entry.ref).?;
1260 const h = hasher(sym.name(elf_file));
1261 hashes[i] = h;
1262 indices[i] = h % hash.num_buckets;
1263 const idx = @divTrunc(h, 64) % hash.num_bloom;
1264 bloom[idx] |= @as(u64, 1) << @as(u6, @intCast(h % 64));
1265 bloom[idx] |= @as(u64, 1) << @as(u6, @intCast((h >> bloom_shift) % 64));
1266 }
1267
1268 try writer.writeSliceEndian(u64, bloom, .little);
1269
1270 // Fill in the hash bucket indices
1271 const buckets = try gpa.alloc(u32, hash.num_buckets);
1272 defer gpa.free(buckets);
1273 @memset(buckets, 0);
1274
1275 for (0..hash.num_exports) |i| {
1276 if (buckets[indices[i]] == 0) {
1277 buckets[indices[i]] = @as(u32, @intCast(i + export_off));
1278 }
1279 }
1280
1281 try writer.writeSliceEndian(u32, buckets, .little);
1282
1283 // Finally, write the hash table
1284 const table = try gpa.alloc(u32, hash.num_exports);
1285 defer gpa.free(table);
1286 @memset(table, 0);
1287
1288 for (0..hash.num_exports) |i| {
1289 const h = hashes[i];
1290 if (i == exports.len - 1 or indices[i] != indices[i + 1]) {
1291 table[i] = h | 1;
1292 } else {
1293 table[i] = h & ~@as(u32, 1);
1294 }
1295 }
1296
1297 try writer.writeSliceEndian(u32, table, .little);
1298 }
1299
1300 pub fn hasher(name: [:0]const u8) u32 {
1301 var h: u32 = 5381;
1302 for (name) |c| {
1303 h = (h << 5) +% h +% c;
1304 }
1305 return h;
1306 }
1307};
1308
1309pub const VerneedSection = struct {
1310 verneed: std.ArrayList(elf.Elf64_Verneed) = .empty,
1311 vernaux: std.ArrayList(elf.Vernaux) = .empty,
1312 index: elf.Versym = .{ .VERSION = elf.Versym.GLOBAL.VERSION + 1, .HIDDEN = false },
1313
1314 pub fn deinit(vern: *VerneedSection, allocator: Allocator) void {
1315 vern.verneed.deinit(allocator);
1316 vern.vernaux.deinit(allocator);
1317 }
1318
1319 pub fn generate(vern: *VerneedSection, elf_file: *Elf) !void {
1320 const dynsyms = elf_file.dynsym.entries.items;
1321 var versyms = elf_file.versym.items;
1322
1323 const VersionedSymbol = struct {
1324 /// Index in the output version table
1325 index: usize,
1326 /// Index of the defining this symbol version shared object file
1327 shared_object: File.Index,
1328 /// Version index
1329 version_index: elf.Versym,
1330
1331 fn soname(this: @This(), ctx: *Elf) []const u8 {
1332 const shared_object = ctx.file(this.shared_object).?.shared_object;
1333 return shared_object.soname();
1334 }
1335
1336 fn versionString(this: @This(), ctx: *Elf) [:0]const u8 {
1337 const shared_object = ctx.file(this.shared_object).?.shared_object;
1338 return shared_object.versionString(this.version_index);
1339 }
1340
1341 pub fn lessThan(ctx: *Elf, lhs: @This(), rhs: @This()) bool {
1342 if (lhs.shared_object == rhs.shared_object)
1343 return @as(u16, @bitCast(lhs.version_index)) < @as(u16, @bitCast(rhs.version_index));
1344 return mem.lessThan(u8, lhs.soname(ctx), rhs.soname(ctx));
1345 }
1346 };
1347
1348 const comp = elf_file.base.comp;
1349 const gpa = comp.gpa;
1350 var verneed = std.array_list.Managed(VersionedSymbol).init(gpa);
1351 defer verneed.deinit();
1352 try verneed.ensureTotalCapacity(dynsyms.len);
1353
1354 for (dynsyms, 1..) |entry, i| {
1355 const symbol = elf_file.symbol(entry.ref).?;
1356 if (symbol.flags.import and symbol.version_index.VERSION > elf.Versym.GLOBAL.VERSION) {
1357 const shared_object = symbol.file(elf_file).?.shared_object;
1358 verneed.appendAssumeCapacity(.{
1359 .index = i,
1360 .shared_object = shared_object.index,
1361 .version_index = symbol.version_index,
1362 });
1363 }
1364 }
1365
1366 mem.sort(VersionedSymbol, verneed.items, elf_file, VersionedSymbol.lessThan);
1367
1368 var last = verneed.items[0];
1369 var last_verneed = try vern.addVerneed(last.soname(elf_file), elf_file);
1370 var last_vernaux = try vern.addVernaux(last_verneed, last.versionString(elf_file), elf_file);
1371 versyms[last.index] = @bitCast(last_vernaux.other);
1372
1373 for (verneed.items[1..]) |ver| {
1374 if (ver.shared_object == last.shared_object) {
1375 // https://github.com/ziglang/zig/issues/21678
1376 if (@as(u16, @bitCast(ver.version_index)) != @as(u16, @bitCast(last.version_index))) {
1377 last_vernaux = try vern.addVernaux(last_verneed, ver.versionString(elf_file), elf_file);
1378 }
1379 } else {
1380 last_verneed = try vern.addVerneed(ver.soname(elf_file), elf_file);
1381 last_vernaux = try vern.addVernaux(last_verneed, ver.versionString(elf_file), elf_file);
1382 }
1383 last = ver;
1384 versyms[ver.index] = @bitCast(last_vernaux.other);
1385 }
1386
1387 // Fixup offsets
1388 var count: usize = 0;
1389 var verneed_off: u32 = 0;
1390 var vernaux_off: u32 = @as(u32, @intCast(vern.verneed.items.len)) * @sizeOf(elf.Elf64_Verneed);
1391 for (vern.verneed.items, 0..) |*vsym, vsym_i| {
1392 if (vsym_i < vern.verneed.items.len - 1) vsym.vn_next = @sizeOf(elf.Elf64_Verneed);
1393 vsym.vn_aux = vernaux_off - verneed_off;
1394 var inner_off: u32 = 0;
1395 for (vern.vernaux.items[count..][0..vsym.vn_cnt], 0..) |*vaux, vaux_i| {
1396 if (vaux_i < vsym.vn_cnt - 1) vaux.next = @sizeOf(elf.Vernaux);
1397 inner_off += @sizeOf(elf.Vernaux);
1398 }
1399 vernaux_off += inner_off;
1400 verneed_off += @sizeOf(elf.Elf64_Verneed);
1401 count += vsym.vn_cnt;
1402 }
1403 }
1404
1405 fn addVerneed(vern: *VerneedSection, soname: []const u8, elf_file: *Elf) !*elf.Elf64_Verneed {
1406 const comp = elf_file.base.comp;
1407 const gpa = comp.gpa;
1408 const sym = try vern.verneed.addOne(gpa);
1409 sym.* = .{
1410 .vn_version = 1,
1411 .vn_cnt = 0,
1412 .vn_file = try elf_file.insertDynString(soname),
1413 .vn_aux = 0,
1414 .vn_next = 0,
1415 };
1416 return sym;
1417 }
1418
1419 fn addVernaux(
1420 vern: *VerneedSection,
1421 verneed_sym: *elf.Elf64_Verneed,
1422 version: [:0]const u8,
1423 elf_file: *Elf,
1424 ) !elf.Vernaux {
1425 const comp = elf_file.base.comp;
1426 const gpa = comp.gpa;
1427 const sym = try vern.vernaux.addOne(gpa);
1428 sym.* = .{
1429 .hash = HashSection.hasher(version),
1430 .flags = 0,
1431 .other = @bitCast(vern.index),
1432 .name = try elf_file.insertDynString(version),
1433 .next = 0,
1434 };
1435 verneed_sym.vn_cnt += 1;
1436 vern.index.VERSION += 1;
1437 return sym.*;
1438 }
1439
1440 pub fn size(vern: VerneedSection) usize {
1441 return vern.verneed.items.len * @sizeOf(elf.Elf64_Verneed) + vern.vernaux.items.len * @sizeOf(elf.Vernaux);
1442 }
1443
1444 pub fn write(vern: VerneedSection, writer: *std.Io.Writer) !void {
1445 try writer.writeSliceEndian(elf.Elf64_Verneed, vern.verneed.items, .little);
1446 try writer.writeSliceEndian(elf.Vernaux, vern.vernaux.items, .little);
1447 }
1448};
1449
1450pub const GroupSection = struct {
1451 shndx: u32,
1452 cg_ref: Elf.Ref,
1453
1454 fn group(cgs: GroupSection, elf_file: *Elf) *Elf.Group {
1455 const cg_file = elf_file.file(cgs.cg_ref.file).?;
1456 return cg_file.object.group(cgs.cg_ref.index);
1457 }
1458
1459 pub fn symbol(cgs: GroupSection, elf_file: *Elf) *Symbol {
1460 const cg = cgs.group(elf_file);
1461 const object = cg.file(elf_file).object;
1462 const shdr = object.shdrs.items[cg.shndx];
1463 return &object.symbols.items[shdr.sh_info];
1464 }
1465
1466 pub fn size(cgs: GroupSection, elf_file: *Elf) usize {
1467 const cg = cgs.group(elf_file);
1468 const members = cg.members(elf_file);
1469 return (members.len + 1) * @sizeOf(u32);
1470 }
1471
1472 pub fn write(cgs: GroupSection, elf_file: *Elf, writer: *std.Io.Writer) !void {
1473 const cg = cgs.group(elf_file);
1474 const object = cg.file(elf_file).object;
1475 const members = cg.members(elf_file);
1476 try writer.writeInt(u32, if (cg.is_comdat) elf.GRP_COMDAT else 0, .little);
1477 for (members) |shndx| {
1478 const shdr = object.shdrs.items[shndx];
1479 switch (shdr.sh_type) {
1480 elf.SHT_RELA => {
1481 const atom_index = object.atoms_indexes.items[shdr.sh_info];
1482 const atom = object.atom(atom_index).?;
1483 const rela_shndx = for (elf_file.sections.items(.shdr), 0..) |rela_shdr, rela_shndx| {
1484 if (rela_shdr.sh_type == elf.SHT_RELA and
1485 atom.output_section_index == rela_shdr.sh_info)
1486 break rela_shndx;
1487 } else unreachable;
1488 try writer.writeInt(u32, @intCast(rela_shndx), .little);
1489 },
1490 else => {
1491 const atom_index = object.atoms_indexes.items[shndx];
1492 const atom = object.atom(atom_index).?;
1493 try writer.writeInt(u32, atom.output_section_index, .little);
1494 },
1495 }
1496 }
1497 }
1498};
1499
1500fn writeInt(value: anytype, elf_file: *Elf, writer: *std.Io.Writer) !void {
1501 const entry_size = elf_file.archPtrWidthBytes();
1502 const target = elf_file.getTarget();
1503 const endian = target.cpu.arch.endian();
1504 switch (entry_size) {
1505 2 => try writer.writeInt(i16, @intCast(value), endian),
1506 4 => try writer.writeInt(i32, @intCast(value), endian),
1507 8 => try writer.writeInt(i64, value, endian),
1508 else => unreachable,
1509 }
1510}
1511
1512const assert = std.debug.assert;
1513const builtin = @import("builtin");
1514const elf = std.elf;
1515const math = std.math;
1516const mem = std.mem;
1517const log = std.log.scoped(.link);
1518const relocs_log = std.log.scoped(.link_relocs);
1519const relocation = @import("relocation.zig");
1520const std = @import("std");
1521
1522const Allocator = std.mem.Allocator;
1523const Elf = @import("../Elf.zig");
1524const File = @import("file.zig").File;
1525const SharedObject = @import("SharedObject.zig");
1526const Symbol = @import("Symbol.zig");