master
   1base: link.File,
   2options: link.File.OpenOptions,
   3mf: MappedFile,
   4ni: Node.Known,
   5nodes: std.MultiArrayList(Node),
   6shdrs: std.ArrayList(Section),
   7phdrs: std.ArrayList(MappedFile.Node.Index),
   8si: Symbol.Known,
   9symtab: std.ArrayList(Symbol),
  10shstrtab: StringTable,
  11strtab: StringTable,
  12dynsym: std.AutoArrayHashMapUnmanaged(Symbol.Index, void),
  13dynstr: StringTable,
  14got: struct {
  15    len: u32,
  16    tlsld: GotIndex,
  17    plt: std.AutoArrayHashMapUnmanaged(Symbol.Index, void),
  18},
  19needed: std.AutoArrayHashMapUnmanaged(u32, void),
  20inputs: std.ArrayList(struct {
  21    path: std.Build.Cache.Path,
  22    member: ?[]const u8,
  23    si: Symbol.Index,
  24}),
  25input_sections: std.ArrayList(struct {
  26    ii: Node.InputIndex,
  27    file_location: MappedFile.Node.FileLocation,
  28    si: Symbol.Index,
  29}),
  30input_section_pending_index: u32,
  31globals: std.AutoArrayHashMapUnmanaged(u32, Symbol.Index),
  32navs: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, Symbol.Index),
  33uavs: std.AutoArrayHashMapUnmanaged(InternPool.Index, Symbol.Index),
  34lazy: std.EnumArray(link.File.LazySymbol.Kind, struct {
  35    map: std.AutoArrayHashMapUnmanaged(InternPool.Index, Symbol.Index),
  36    pending_index: u32,
  37}),
  38pending_uavs: std.AutoArrayHashMapUnmanaged(Node.UavMapIndex, struct {
  39    alignment: InternPool.Alignment,
  40    src_loc: Zcu.LazySrcLoc,
  41}),
  42relocs: std.ArrayList(Reloc),
  43const_prog_node: std.Progress.Node,
  44synth_prog_node: std.Progress.Node,
  45input_prog_node: std.Progress.Node,
  46
  47pub const Node = union(enum) {
  48    file,
  49    ehdr,
  50    shdr,
  51    segment: u32,
  52    section: Symbol.Index,
  53    input_section: InputSectionIndex,
  54    nav: NavMapIndex,
  55    uav: UavMapIndex,
  56    lazy_code: LazyMapRef.Index(.code),
  57    lazy_const_data: LazyMapRef.Index(.const_data),
  58
  59    pub const InputIndex = enum(u32) {
  60        _,
  61
  62        pub fn path(ii: InputIndex, elf: *const Elf) std.Build.Cache.Path {
  63            return elf.inputs.items[@intFromEnum(ii)].path;
  64        }
  65
  66        pub fn member(ii: InputIndex, elf: *const Elf) ?[]const u8 {
  67            return elf.inputs.items[@intFromEnum(ii)].member;
  68        }
  69
  70        pub fn symbol(ii: InputIndex, elf: *const Elf) Symbol.Index {
  71            return elf.inputs.items[@intFromEnum(ii)].si;
  72        }
  73
  74        pub fn endSymbol(ii: InputIndex, elf: *const Elf) Symbol.Index {
  75            const next_ii = @intFromEnum(ii) + 1;
  76            return if (next_ii < elf.inputs.items.len)
  77                @as(InputIndex, @enumFromInt(next_ii)).symbol(elf)
  78            else
  79                @enumFromInt(elf.symtab.items.len);
  80        }
  81    };
  82
  83    pub const InputSectionIndex = enum(u32) {
  84        _,
  85
  86        pub fn input(isi: InputSectionIndex, elf: *const Elf) InputIndex {
  87            return elf.input_sections.items[@intFromEnum(isi)].ii;
  88        }
  89
  90        pub fn fileLocation(isi: InputSectionIndex, elf: *const Elf) MappedFile.Node.FileLocation {
  91            return elf.input_sections.items[@intFromEnum(isi)].file_location;
  92        }
  93
  94        pub fn symbol(isi: InputSectionIndex, elf: *const Elf) Symbol.Index {
  95            return elf.input_sections.items[@intFromEnum(isi)].si;
  96        }
  97    };
  98
  99    pub const NavMapIndex = enum(u32) {
 100        _,
 101
 102        pub fn navIndex(nmi: NavMapIndex, elf: *const Elf) InternPool.Nav.Index {
 103            return elf.navs.keys()[@intFromEnum(nmi)];
 104        }
 105
 106        pub fn symbol(nmi: NavMapIndex, elf: *const Elf) Symbol.Index {
 107            return elf.navs.values()[@intFromEnum(nmi)];
 108        }
 109    };
 110
 111    pub const UavMapIndex = enum(u32) {
 112        _,
 113
 114        pub fn uavValue(umi: UavMapIndex, elf: *const Elf) InternPool.Index {
 115            return elf.uavs.keys()[@intFromEnum(umi)];
 116        }
 117
 118        pub fn symbol(umi: UavMapIndex, elf: *const Elf) Symbol.Index {
 119            return elf.uavs.values()[@intFromEnum(umi)];
 120        }
 121    };
 122
 123    pub const LazyMapRef = struct {
 124        kind: link.File.LazySymbol.Kind,
 125        index: u32,
 126
 127        pub fn Index(comptime kind: link.File.LazySymbol.Kind) type {
 128            return enum(u32) {
 129                _,
 130
 131                pub fn ref(lmi: @This()) LazyMapRef {
 132                    return .{ .kind = kind, .index = @intFromEnum(lmi) };
 133                }
 134
 135                pub fn lazySymbol(lmi: @This(), elf: *const Elf) link.File.LazySymbol {
 136                    return lmi.ref().lazySymbol(elf);
 137                }
 138
 139                pub fn symbol(lmi: @This(), elf: *const Elf) Symbol.Index {
 140                    return lmi.ref().symbol(elf);
 141                }
 142            };
 143        }
 144
 145        pub fn lazySymbol(lmr: LazyMapRef, elf: *const Elf) link.File.LazySymbol {
 146            return .{ .kind = lmr.kind, .ty = elf.lazy.getPtrConst(lmr.kind).map.keys()[lmr.index] };
 147        }
 148
 149        pub fn symbol(lmr: LazyMapRef, elf: *const Elf) Symbol.Index {
 150            return elf.lazy.getPtrConst(lmr.kind).map.values()[lmr.index];
 151        }
 152    };
 153
 154    pub const Known = struct {
 155        comptime file: MappedFile.Node.Index = .root,
 156        comptime ehdr: MappedFile.Node.Index = @enumFromInt(1),
 157        comptime shdr: MappedFile.Node.Index = @enumFromInt(2),
 158        comptime rodata: MappedFile.Node.Index = @enumFromInt(3),
 159        comptime phdr: MappedFile.Node.Index = @enumFromInt(4),
 160        comptime text: MappedFile.Node.Index = @enumFromInt(5),
 161        comptime data: MappedFile.Node.Index = @enumFromInt(6),
 162        comptime data_rel_ro: MappedFile.Node.Index = @enumFromInt(7),
 163        tls: MappedFile.Node.Index,
 164    };
 165
 166    comptime {
 167        if (!std.debug.runtime_safety) std.debug.assert(@sizeOf(Node) == 8);
 168    }
 169};
 170
 171pub const Section = struct {
 172    si: Symbol.Index,
 173    rela_si: Symbol.Index,
 174    rela_free: RelIndex,
 175
 176    pub const RelIndex = enum(u32) {
 177        none,
 178        _,
 179
 180        pub fn wrap(i: ?u32) RelIndex {
 181            return @enumFromInt((i orelse return .none) + 1);
 182        }
 183        pub fn unwrap(ri: RelIndex) ?u32 {
 184            return switch (ri) {
 185                .none => null,
 186                _ => @intFromEnum(ri) - 1,
 187            };
 188        }
 189    };
 190};
 191
 192pub const Symbol = struct {
 193    ni: MappedFile.Node.Index,
 194    /// Relocations contained within this symbol
 195    loc_relocs: Reloc.Index,
 196    /// Relocations targeting this symbol
 197    target_relocs: Reloc.Index,
 198    unused: u32,
 199
 200    pub const Index = enum(u32) {
 201        null,
 202        symtab,
 203        shstrtab,
 204        strtab,
 205        rodata,
 206        text,
 207        data,
 208        data_rel_ro,
 209        got,
 210        got_plt,
 211        plt,
 212        plt_sec,
 213        _,
 214
 215        pub fn get(si: Symbol.Index, elf: *Elf) *Symbol {
 216            return &elf.symtab.items[@intFromEnum(si)];
 217        }
 218
 219        pub fn node(si: Symbol.Index, elf: *Elf) MappedFile.Node.Index {
 220            const ni = si.get(elf).ni;
 221            assert(ni != .none);
 222            return ni;
 223        }
 224
 225        pub fn next(si: Symbol.Index) Symbol.Index {
 226            return @enumFromInt(@intFromEnum(si) + 1);
 227        }
 228
 229        pub const Shndx = enum(Tag) {
 230            UNDEF = std.elf.SHN_UNDEF,
 231            LIVEPATCH = reserve(std.elf.SHN_LIVEPATCH),
 232            ABS = reserve(std.elf.SHN_ABS),
 233            COMMON = reserve(std.elf.SHN_COMMON),
 234            _,
 235
 236            pub const Tag = u32;
 237
 238            pub const LORESERVE: Shndx = .fromSection(std.elf.SHN_LORESERVE);
 239            pub const HIRESERVE: Shndx = .fromSection(std.elf.SHN_HIRESERVE);
 240            comptime {
 241                assert(@intFromEnum(HIRESERVE) == std.math.maxInt(Tag));
 242            }
 243
 244            fn reserve(sec: std.elf.Section) Tag {
 245                assert(sec >= std.elf.SHN_LORESERVE and sec <= std.elf.SHN_HIRESERVE);
 246                return @as(Tag, std.math.maxInt(Tag) - std.elf.SHN_HIRESERVE) + sec;
 247            }
 248
 249            pub fn fromSection(sec: std.elf.Section) Shndx {
 250                return switch (sec) {
 251                    std.elf.SHN_UNDEF...std.elf.SHN_LORESERVE - 1 => @enumFromInt(sec),
 252                    std.elf.SHN_LORESERVE...std.elf.SHN_HIRESERVE => @enumFromInt(reserve(sec)),
 253                };
 254            }
 255            pub fn toSection(s: Shndx) ?std.elf.Section {
 256                return switch (@intFromEnum(s)) {
 257                    std.elf.SHN_UNDEF...std.elf.SHN_LORESERVE - 1 => |sec| @intCast(sec),
 258                    std.elf.SHN_LORESERVE...reserve(std.elf.SHN_LORESERVE) - 1 => null,
 259                    reserve(std.elf.SHN_LORESERVE)...reserve(std.elf.SHN_HIRESERVE) => |sec| @intCast(
 260                        sec - reserve(std.elf.SHN_LORESERVE) + std.elf.SHN_LORESERVE,
 261                    ),
 262                };
 263            }
 264
 265            pub fn get(s: Shndx, elf: *Elf) *Section {
 266                return &elf.shdrs.items[@intFromEnum(s)];
 267            }
 268        };
 269        pub fn shndx(si: Symbol.Index, elf: *Elf) Shndx {
 270            return .fromSection(switch (elf.symPtr(si)) {
 271                inline else => |sym| elf.targetLoad(&sym.shndx),
 272            });
 273        }
 274
 275        pub const InitOptions = struct {
 276            name: []const u8 = "",
 277            lib_name: ?[]const u8 = null,
 278            value: u64 = 0,
 279            size: u64 = 0,
 280            type: std.elf.STT,
 281            bind: std.elf.STB = .LOCAL,
 282            visibility: std.elf.STV = .DEFAULT,
 283            shndx: Shndx = .UNDEF,
 284        };
 285        pub fn init(si: Symbol.Index, elf: *Elf, opts: InitOptions) !void {
 286            const comp = elf.base.comp;
 287            const gpa = comp.gpa;
 288            const target_endian = elf.targetEndian();
 289            const name_strtab_entry = try elf.string(.strtab, opts.name);
 290            switch (elf.shdrPtr(elf.si.symtab.shndx(elf))) {
 291                inline else => |shdr| {
 292                    const old_size = elf.targetLoad(&shdr.size);
 293                    const ent_size = elf.targetLoad(&shdr.entsize);
 294                    const new_size = ent_size * elf.symtab.items.len;
 295                    if (new_size > old_size) {
 296                        elf.targetStore(&shdr.size, @intCast(new_size));
 297                        const symtab_ni = elf.si.symtab.node(elf);
 298                        _, const node_size = symtab_ni.location(&elf.mf).resolve(&elf.mf);
 299                        if (new_size > node_size) try symtab_ni.resize(
 300                            &elf.mf,
 301                            gpa,
 302                            new_size +| new_size / MappedFile.growth_factor,
 303                        );
 304                    }
 305                },
 306            }
 307            switch (elf.symPtr(si)) {
 308                inline else => |sym, class| {
 309                    const Sym = class.ElfN().Sym;
 310                    sym.* = .{
 311                        .name = name_strtab_entry,
 312                        .value = @intCast(opts.value),
 313                        .size = @intCast(opts.size),
 314                        .info = .{ .type = opts.type, .bind = opts.bind },
 315                        .other = .{ .visibility = opts.visibility },
 316                        .shndx = opts.shndx.toSection().?,
 317                    };
 318                    if (target_endian != native_endian) std.mem.byteSwapAllFields(Sym, sym);
 319                },
 320            }
 321            switch (elf.shdrPtr(elf.si.symtab.shndx(elf))) {
 322                inline else => |shdr| elf.targetStore(&shdr.info, @max(
 323                    elf.targetLoad(&shdr.info),
 324                    @intFromEnum(si) + 1,
 325                )),
 326            }
 327
 328            if (opts.bind == .LOCAL) return;
 329            no_entry: {
 330                if (std.mem.eql(u8, opts.name, entry: switch (elf.options.entry) {
 331                    .default => switch (comp.config.output_mode) {
 332                        .Exe => continue :entry .enabled,
 333                        .Lib, .Obj => continue :entry .disabled,
 334                    },
 335                    .disabled => break :no_entry,
 336                    .enabled => "_start",
 337                    .named => |named| named,
 338                })) {
 339                    elf.si.entry = si;
 340                    switch (elf.ehdrPtr()) {
 341                        inline else => |ehdr| elf.targetStore(&ehdr.entry, @intCast(opts.value)),
 342                    }
 343                }
 344            }
 345
 346            if (elf.si.dynsym == .null) return;
 347            const dsi = elf.dynsym.count();
 348            try elf.dynsym.putNoClobber(gpa, si, {});
 349            const name_dynstr_entry = try elf.string(.dynstr, opts.name);
 350            switch (elf.shdrPtr(elf.si.dynsym.shndx(elf))) {
 351                inline else => |shdr| {
 352                    const old_size = elf.targetLoad(&shdr.size);
 353                    const ent_size = elf.targetLoad(&shdr.entsize);
 354                    const new_size = ent_size * elf.dynsym.count();
 355                    if (new_size > old_size) {
 356                        elf.targetStore(&shdr.size, @intCast(new_size));
 357                        const dynsym_ni = elf.si.dynsym.node(elf);
 358                        _, const node_size = dynsym_ni.location(&elf.mf).resolve(&elf.mf);
 359                        if (new_size > node_size) try dynsym_ni.resize(
 360                            &elf.mf,
 361                            gpa,
 362                            new_size +| new_size / MappedFile.growth_factor,
 363                        );
 364                    }
 365                },
 366            }
 367            switch (elf.dynsymSlice()) {
 368                inline else => |dynsyms, class| {
 369                    const Sym = class.ElfN().Sym;
 370                    const dynsym = &dynsyms[dsi];
 371                    dynsym.* = .{
 372                        .name = name_dynstr_entry,
 373                        .value = @intCast(opts.value),
 374                        .size = @intCast(opts.size),
 375                        .info = .{ .type = opts.type, .bind = opts.bind },
 376                        .other = .{ .visibility = opts.visibility },
 377                        .shndx = opts.shndx.toSection().?,
 378                    };
 379                    if (target_endian != native_endian) std.mem.byteSwapAllFields(Sym, dynsym);
 380                },
 381            }
 382
 383            if (opts.type != .FUNC or opts.shndx != .UNDEF) return;
 384            const plt_index: u32 = @intCast(elf.got.plt.count());
 385            try elf.got.plt.putNoClobber(gpa, si, {});
 386            switch (elf.ehdrField(.machine)) {
 387                else => |machine| @panic(@tagName(machine)),
 388                .X86_64 => {
 389                    const plt_ni = elf.si.plt.node(elf);
 390                    _, const plt_node_size = plt_ni.location(&elf.mf).resolve(&elf.mf);
 391                    const plt_addr = plt_addr: switch (elf.shdrPtr(elf.si.plt.shndx(elf))) {
 392                        inline else => |shdr| {
 393                            const old_size = 16 * (1 + plt_index);
 394                            const new_size = old_size + 16;
 395                            elf.targetStore(&shdr.size, new_size);
 396                            if (new_size > plt_node_size) try plt_ni.resize(
 397                                &elf.mf,
 398                                gpa,
 399                                new_size +| new_size / MappedFile.growth_factor,
 400                            );
 401                            const plt_slice = plt_ni.slice(&elf.mf)[old_size..new_size];
 402                            @memcpy(plt_slice, &[16]u8{
 403                                0xf3, 0x0f, 0x1e, 0xfa, // endbr64
 404                                0x68, 0x00, 0x00, 0x00, 0x00, // push $0x0
 405                                0xe9, 0x00, 0x00, 0x00, 0x00, // jmp 0
 406                                0x66, 0x90, // xchg %ax,%ax
 407                            });
 408                            std.mem.writeInt(u32, plt_slice[5..][0..4], plt_index, target_endian);
 409                            std.mem.writeInt(
 410                                i32,
 411                                plt_slice[10..][0..4],
 412                                2 - @as(i32, @intCast(new_size)),
 413                                target_endian,
 414                            );
 415                            break :plt_addr elf.targetLoad(&shdr.addr) + old_size;
 416                        },
 417                    };
 418
 419                    const got_plt_shndx = elf.si.got_plt.shndx(elf);
 420                    const got_plt_ni = elf.si.got_plt.node(elf);
 421                    _, const got_plt_node_size = got_plt_ni.location(&elf.mf).resolve(&elf.mf);
 422                    const got_plt_addr = got_plt_addr: switch (elf.shdrPtr(got_plt_shndx)) {
 423                        inline else => |shdr, class| {
 424                            const Addr = class.ElfN().Addr;
 425                            const addr_size = @sizeOf(Addr);
 426                            const old_size = addr_size * (3 + plt_index);
 427                            const new_size = old_size + addr_size;
 428                            elf.targetStore(&shdr.size, new_size);
 429                            if (new_size > got_plt_node_size) try got_plt_ni.resize(
 430                                &elf.mf,
 431                                gpa,
 432                                new_size +| new_size / MappedFile.growth_factor,
 433                            );
 434                            std.mem.writeInt(
 435                                Addr,
 436                                got_plt_ni.slice(&elf.mf)[old_size..][0..addr_size],
 437                                @intCast(plt_addr),
 438                                target_endian,
 439                            );
 440                            break :got_plt_addr elf.targetLoad(&shdr.addr) + old_size;
 441                        },
 442                    };
 443
 444                    const plt_sec_ni = elf.si.plt_sec.node(elf);
 445                    _, const plt_sec_node_size = plt_sec_ni.location(&elf.mf).resolve(&elf.mf);
 446                    switch (elf.shdrPtr(elf.si.plt_sec.shndx(elf))) {
 447                        inline else => |shdr| {
 448                            const old_size = 16 * plt_index;
 449                            const new_size = old_size + 16;
 450                            elf.targetStore(&shdr.size, new_size);
 451                            if (new_size > plt_sec_node_size) try plt_sec_ni.resize(
 452                                &elf.mf,
 453                                gpa,
 454                                new_size +| new_size / MappedFile.growth_factor,
 455                            );
 456                            const plt_sec_slice = plt_sec_ni.slice(&elf.mf)[old_size..new_size];
 457                            @memcpy(plt_sec_slice, &[16]u8{
 458                                0xf3, 0x0f, 0x1e, 0xfa, // endbr64
 459                                0xff, 0x25, 0x00, 0x00, 0x00, 0x00, // jmp *0x0(%rip)
 460                                0x66, 0x0f, 0x1f, 0x44, 0x00, 0x00, // nopw 0x0(%rax,%rax,1)
 461                            });
 462                            std.mem.writeInt(
 463                                i32,
 464                                plt_sec_slice[6..][0..4],
 465                                @intCast(@as(i64, @bitCast(
 466                                    got_plt_addr -% (elf.targetLoad(&shdr.addr) + old_size + 10),
 467                                ))),
 468                                target_endian,
 469                            );
 470                        },
 471                    }
 472
 473                    const rela_plt_si = got_plt_shndx.get(elf).rela_si;
 474                    const rela_plt_ni = rela_plt_si.node(elf);
 475                    _, const rela_plt_node_size = rela_plt_ni.location(&elf.mf).resolve(&elf.mf);
 476                    switch (elf.shdrPtr(rela_plt_si.shndx(elf))) {
 477                        inline else => |shdr, class| {
 478                            const Rela = class.ElfN().Rela;
 479                            const rela_size = elf.targetLoad(&shdr.entsize);
 480                            const old_size = rela_size * plt_index;
 481                            const new_size = old_size + rela_size;
 482                            elf.targetStore(&shdr.size, new_size);
 483                            if (new_size > rela_plt_node_size) try rela_plt_ni.resize(
 484                                &elf.mf,
 485                                gpa,
 486                                new_size +| new_size / MappedFile.growth_factor,
 487                            );
 488                            const rela: *Rela = @ptrCast(@alignCast(
 489                                rela_plt_ni.slice(&elf.mf)[@intCast(old_size)..@intCast(new_size)],
 490                            ));
 491                            rela.* = .{
 492                                .offset = @intCast(got_plt_addr),
 493                                .info = .{
 494                                    .type = @intFromEnum(std.elf.R_X86_64.JUMP_SLOT),
 495                                    .sym = @intCast(dsi),
 496                                },
 497                                .addend = 0,
 498                            };
 499                            if (target_endian != native_endian) std.mem.byteSwapAllFields(Rela, rela);
 500                        },
 501                    }
 502                    try rela_plt_ni.resized(gpa, &elf.mf);
 503                },
 504            }
 505        }
 506
 507        pub fn flushMoved(si: Symbol.Index, elf: *Elf, value: u64) void {
 508            switch (elf.symPtr(si)) {
 509                inline else => |sym, class| {
 510                    elf.targetStore(&sym.value, @intCast(value));
 511                    if (si == elf.si.entry) {
 512                        @branchHint(.unlikely);
 513                        @field(elf.ehdrPtr(), @tagName(class)).entry = sym.value;
 514                    }
 515                },
 516            }
 517            si.applyLocationRelocs(elf);
 518            si.applyTargetRelocs(elf);
 519        }
 520
 521        pub fn applyLocationRelocs(si: Symbol.Index, elf: *Elf) void {
 522            if (elf.ehdrField(.type) == .REL) return;
 523            switch (si.get(elf).loc_relocs) {
 524                .none => {},
 525                else => |loc_relocs| for (elf.relocs.items[@intFromEnum(loc_relocs)..]) |*reloc| {
 526                    if (reloc.loc != si) break;
 527                    reloc.apply(elf);
 528                },
 529            }
 530        }
 531
 532        pub fn applyTargetRelocs(si: Symbol.Index, elf: *Elf) void {
 533            if (elf.ehdrField(.type) == .REL) return;
 534            var ri = si.get(elf).target_relocs;
 535            while (ri != .none) {
 536                const reloc = ri.get(elf);
 537                assert(reloc.target == si);
 538                reloc.apply(elf);
 539                ri = reloc.next;
 540            }
 541        }
 542
 543        pub fn deleteLocationRelocs(si: Symbol.Index, elf: *Elf) void {
 544            const sym = si.get(elf);
 545            for (elf.relocs.items[@intFromEnum(sym.loc_relocs)..]) |*reloc| {
 546                if (reloc.loc != si) break;
 547                reloc.delete(elf);
 548            }
 549            sym.loc_relocs = .none;
 550        }
 551    };
 552
 553    pub const Known = struct {
 554        comptime symtab: Symbol.Index = .symtab,
 555        comptime shstrtab: Symbol.Index = .shstrtab,
 556        comptime strtab: Symbol.Index = .strtab,
 557        comptime rodata: Symbol.Index = .rodata,
 558        comptime text: Symbol.Index = .text,
 559        comptime data: Symbol.Index = .data,
 560        comptime data_rel_ro: Symbol.Index = .data_rel_ro,
 561        comptime got: Symbol.Index = .got,
 562        comptime got_plt: Symbol.Index = .got_plt,
 563        comptime plt: Symbol.Index = .plt,
 564        comptime plt_sec: Symbol.Index = .plt_sec,
 565        dynsym: Symbol.Index,
 566        dynstr: Symbol.Index,
 567        dynamic: Symbol.Index,
 568        tdata: Symbol.Index,
 569        entry: Symbol.Index,
 570    };
 571
 572    comptime {
 573        if (!std.debug.runtime_safety) std.debug.assert(@sizeOf(Symbol) == 16);
 574    }
 575};
 576
 577pub const StringTable = struct {
 578    map: std.HashMapUnmanaged(u32, void, StringTable.Context, std.hash_map.default_max_load_percentage),
 579
 580    const Context = struct {
 581        slice: []const u8,
 582
 583        pub fn eql(_: Context, lhs_key: u32, rhs_key: u32) bool {
 584            return lhs_key == rhs_key;
 585        }
 586
 587        pub fn hash(ctx: Context, key: u32) u64 {
 588            return std.hash_map.hashString(std.mem.sliceTo(ctx.slice[key..], 0));
 589        }
 590    };
 591
 592    const Adapter = struct {
 593        slice: []const u8,
 594
 595        pub fn eql(adapter: Adapter, lhs_key: []const u8, rhs_key: u32) bool {
 596            return std.mem.startsWith(u8, adapter.slice[rhs_key..], lhs_key) and
 597                adapter.slice[rhs_key + lhs_key.len] == 0;
 598        }
 599
 600        pub fn hash(_: Adapter, key: []const u8) u64 {
 601            assert(std.mem.indexOfScalar(u8, key, 0) == null);
 602            return std.hash_map.hashString(key);
 603        }
 604    };
 605
 606    pub fn get(st: *StringTable, elf: *Elf, si: Symbol.Index, key: []const u8) !u32 {
 607        const gpa = elf.base.comp.gpa;
 608        const ni = si.node(elf);
 609        const slice_const = ni.sliceConst(&elf.mf);
 610        const gop = try st.map.getOrPutContextAdapted(
 611            gpa,
 612            key,
 613            StringTable.Adapter{ .slice = slice_const },
 614            .{ .slice = slice_const },
 615        );
 616        if (gop.found_existing) return gop.key_ptr.*;
 617        try ni.resized(gpa, &elf.mf);
 618        const old_size, const new_size = size: switch (elf.shdrPtr(si.shndx(elf))) {
 619            inline else => |shdr| {
 620                const old_size: u32 = @intCast(elf.targetLoad(&shdr.size));
 621                const new_size: u32 = @intCast(old_size + key.len + 1);
 622                elf.targetStore(&shdr.size, new_size);
 623                break :size .{ old_size, new_size };
 624            },
 625        };
 626        _, const node_size = ni.location(&elf.mf).resolve(&elf.mf);
 627        if (new_size > node_size)
 628            try ni.resize(&elf.mf, gpa, new_size +| new_size / MappedFile.growth_factor);
 629        const slice = ni.slice(&elf.mf)[old_size..];
 630        @memcpy(slice[0..key.len], key);
 631        slice[key.len] = 0;
 632        gop.key_ptr.* = old_size;
 633        return old_size;
 634    }
 635};
 636
 637pub const GotIndex = enum(u32) {
 638    none = std.math.maxInt(u32),
 639    _,
 640
 641    pub fn wrap(i: ?u32) GotIndex {
 642        const gi: GotIndex = @enumFromInt(i orelse return .none);
 643        assert(gi != .none);
 644        return gi;
 645    }
 646    pub fn unwrap(gi: GotIndex) ?u32 {
 647        return switch (gi) {
 648            _ => @intFromEnum(gi),
 649            .none => null,
 650        };
 651    }
 652};
 653
 654pub const Reloc = extern struct {
 655    type: Reloc.Type,
 656    prev: Reloc.Index,
 657    next: Reloc.Index,
 658    loc: Symbol.Index,
 659    target: Symbol.Index,
 660    index: Section.RelIndex,
 661    offset: u64,
 662    addend: i64,
 663
 664    pub const Type = extern union {
 665        X86_64: std.elf.R_X86_64,
 666        AARCH64: std.elf.R_AARCH64,
 667        RISCV: std.elf.R_RISCV,
 668        PPC64: std.elf.R_PPC64,
 669
 670        pub fn none(elf: *Elf) Reloc.Type {
 671            return switch (elf.ehdrField(.machine)) {
 672                else => unreachable,
 673                .AARCH64 => .{ .AARCH64 = .NONE },
 674                .PPC64 => .{ .PPC64 = .NONE },
 675                .RISCV => .{ .RISCV = .NONE },
 676                .X86_64 => .{ .X86_64 = .NONE },
 677            };
 678        }
 679        pub fn absAddr(elf: *Elf) Reloc.Type {
 680            return switch (elf.ehdrField(.machine)) {
 681                else => unreachable,
 682                .AARCH64 => .{ .AARCH64 = .ABS64 },
 683                .PPC64 => .{ .PPC64 = .ADDR64 },
 684                .RISCV => .{ .RISCV = .@"64" },
 685                .X86_64 => .{ .X86_64 = .@"64" },
 686            };
 687        }
 688        pub fn sizeAddr(elf: *Elf) Reloc.Type {
 689            return switch (elf.ehdrField(.machine)) {
 690                else => unreachable,
 691                .X86_64 => .{ .X86_64 = .SIZE64 },
 692            };
 693        }
 694
 695        pub fn wrap(int: u32, elf: *Elf) Reloc.Type {
 696            return switch (elf.ehdrField(.machine)) {
 697                else => unreachable,
 698                inline .AARCH64,
 699                .PPC64,
 700                .RISCV,
 701                .X86_64,
 702                => |machine| @unionInit(Reloc.Type, @tagName(machine), @enumFromInt(int)),
 703            };
 704        }
 705        pub fn unwrap(rt: Reloc.Type, elf: *Elf) u32 {
 706            return switch (elf.ehdrField(.machine)) {
 707                else => unreachable,
 708                inline .AARCH64,
 709                .PPC64,
 710                .RISCV,
 711                .X86_64,
 712                => |machine| @intFromEnum(@field(rt, @tagName(machine))),
 713            };
 714        }
 715    };
 716
 717    pub const Index = enum(u32) {
 718        none = std.math.maxInt(u32),
 719        _,
 720
 721        pub fn get(si: Reloc.Index, elf: *Elf) *Reloc {
 722            return &elf.relocs.items[@intFromEnum(si)];
 723        }
 724    };
 725
 726    pub fn apply(reloc: *const Reloc, elf: *Elf) void {
 727        assert(elf.ehdrField(.type) != .REL);
 728        const loc_ni = reloc.loc.get(elf).ni;
 729        switch (loc_ni) {
 730            .none => return,
 731            else => |ni| if (ni.hasMoved(&elf.mf)) return,
 732        }
 733        switch (reloc.target.get(elf).ni) {
 734            .none => {},
 735            else => |ni| if (ni.hasMoved(&elf.mf)) return,
 736        }
 737        const loc_slice = loc_ni.slice(&elf.mf)[@intCast(reloc.offset)..];
 738        const target_endian = elf.targetEndian();
 739        switch (elf.symtabSlice()) {
 740            inline else => |symtab, class| {
 741                const loc_sym = &symtab[@intFromEnum(reloc.loc)];
 742                const loc_shndx = elf.targetLoad(&loc_sym.shndx);
 743                assert(loc_shndx != std.elf.SHN_UNDEF);
 744                const target_sym = &symtab[@intFromEnum(reloc.target)];
 745                const target_value =
 746                    elf.targetLoad(&target_sym.value) +% @as(u64, @bitCast(reloc.addend));
 747                switch (elf.ehdrField(.machine)) {
 748                    else => |machine| @panic(@tagName(machine)),
 749                    .X86_64 => switch (reloc.type.X86_64) {
 750                        else => |kind| @panic(@tagName(kind)),
 751                        .@"64" => std.mem.writeInt(
 752                            u64,
 753                            loc_slice[0..8],
 754                            target_value,
 755                            target_endian,
 756                        ),
 757                        .PC32 => std.mem.writeInt(
 758                            i32,
 759                            loc_slice[0..4],
 760                            @intCast(@as(i64, @bitCast(target_value -%
 761                                (elf.targetLoad(&loc_sym.value) + reloc.offset)))),
 762                            target_endian,
 763                        ),
 764                        .PLT32 => std.mem.writeInt(
 765                            i32,
 766                            loc_slice[0..4],
 767                            @intCast(@as(i64, @bitCast(
 768                                if (elf.got.plt.getIndex(reloc.target)) |plt_index|
 769                                    elf.targetLoad(&@field(
 770                                        elf.shdrPtr(elf.si.plt_sec.shndx(elf)),
 771                                        @tagName(class),
 772                                    ).addr) +% 16 * plt_index +%
 773                                        @as(u64, @bitCast(reloc.addend)) -%
 774                                        (elf.targetLoad(&loc_sym.value) + reloc.offset)
 775                                else
 776                                    target_value -%
 777                                        (elf.targetLoad(&loc_sym.value) + reloc.offset),
 778                            ))),
 779                            target_endian,
 780                        ),
 781                        .@"32" => std.mem.writeInt(
 782                            u32,
 783                            loc_slice[0..4],
 784                            @intCast(target_value),
 785                            target_endian,
 786                        ),
 787                        .@"32S" => std.mem.writeInt(
 788                            i32,
 789                            loc_slice[0..4],
 790                            @intCast(@as(i64, @bitCast(target_value))),
 791                            target_endian,
 792                        ),
 793                        .TLSLD => std.mem.writeInt(
 794                            i32,
 795                            loc_slice[0..4],
 796                            @intCast(@as(i64, @bitCast(
 797                                elf.targetLoad(&symtab[@intFromEnum(elf.si.got)].value) +%
 798                                    @as(u64, @bitCast(reloc.addend)) +%
 799                                    @as(u64, 8) * elf.got.tlsld.unwrap().? -%
 800                                    (elf.targetLoad(&loc_sym.value) + reloc.offset),
 801                            ))),
 802                            target_endian,
 803                        ),
 804                        .DTPOFF32 => std.mem.writeInt(
 805                            i32,
 806                            loc_slice[0..4],
 807                            @intCast(@as(i64, @bitCast(target_value))),
 808                            target_endian,
 809                        ),
 810                        .TPOFF32 => {
 811                            const phdr = @field(elf.phdrSlice(), @tagName(class));
 812                            const ph = &phdr[elf.getNode(elf.ni.tls).segment];
 813                            assert(elf.targetLoad(&ph.type) == .TLS);
 814                            std.mem.writeInt(
 815                                i32,
 816                                loc_slice[0..4],
 817                                @intCast(@as(i64, @bitCast(target_value -% elf.targetLoad(&ph.memsz)))),
 818                                target_endian,
 819                            );
 820                        },
 821                        .SIZE32 => std.mem.writeInt(
 822                            u32,
 823                            loc_slice[0..4],
 824                            @intCast(
 825                                elf.targetLoad(&target_sym.size) +% @as(u64, @bitCast(reloc.addend)),
 826                            ),
 827                            target_endian,
 828                        ),
 829                        .SIZE64 => std.mem.writeInt(
 830                            u64,
 831                            loc_slice[0..8],
 832                            elf.targetLoad(&target_sym.size) +% @as(u64, @bitCast(reloc.addend)),
 833                            target_endian,
 834                        ),
 835                    },
 836                }
 837            },
 838        }
 839    }
 840
 841    pub fn delete(reloc: *Reloc, elf: *Elf) void {
 842        switch (reloc.prev) {
 843            .none => {
 844                const target = reloc.target.get(elf);
 845                assert(target.target_relocs.get(elf) == reloc);
 846                target.target_relocs = reloc.next;
 847            },
 848            else => |prev| prev.get(elf).next = reloc.next,
 849        }
 850        switch (reloc.next) {
 851            .none => {},
 852            else => |next| next.get(elf).prev = reloc.prev,
 853        }
 854        switch (elf.ehdrField(.type)) {
 855            .NONE, .CORE, _ => unreachable,
 856            .REL => {
 857                const sh = reloc.loc.shndx(elf).get(elf);
 858                switch (elf.shdrPtr(sh.rela_si.shndx(elf))) {
 859                    inline else => |shdr, class| {
 860                        const Rela = class.ElfN().Rela;
 861                        const ent_size = elf.targetLoad(&shdr.entsize);
 862                        const start = ent_size * reloc.index.unwrap().?;
 863                        const rela_slice = sh.rela_si.node(elf).slice(&elf.mf);
 864                        const rela: *Rela = @ptrCast(@alignCast(
 865                            rela_slice[@intCast(start)..][0..@intCast(ent_size)],
 866                        ));
 867                        rela.* = .{
 868                            .offset = @intFromEnum(sh.rela_free),
 869                            .info = .{
 870                                .type = @intCast(Reloc.Type.none(elf).unwrap(elf)),
 871                                .sym = 0,
 872                            },
 873                            .addend = 0,
 874                        };
 875                    },
 876                }
 877                sh.rela_free = reloc.index;
 878            },
 879            .EXEC, .DYN => assert(reloc.index == .none),
 880        }
 881        reloc.* = undefined;
 882    }
 883
 884    comptime {
 885        if (!std.debug.runtime_safety) std.debug.assert(@sizeOf(Reloc) == 40);
 886    }
 887};
 888
 889pub fn open(
 890    arena: std.mem.Allocator,
 891    comp: *Compilation,
 892    path: std.Build.Cache.Path,
 893    options: link.File.OpenOptions,
 894) !*Elf {
 895    return create(arena, comp, path, options);
 896}
 897pub fn createEmpty(
 898    arena: std.mem.Allocator,
 899    comp: *Compilation,
 900    path: std.Build.Cache.Path,
 901    options: link.File.OpenOptions,
 902) !*Elf {
 903    return create(arena, comp, path, options);
 904}
 905fn create(
 906    arena: std.mem.Allocator,
 907    comp: *Compilation,
 908    path: std.Build.Cache.Path,
 909    options: link.File.OpenOptions,
 910) !*Elf {
 911    const target = &comp.root_mod.resolved_target.result;
 912    assert(target.ofmt == .elf);
 913    const class: std.elf.CLASS = switch (target.ptrBitWidth()) {
 914        0...32 => .@"32",
 915        33...64 => .@"64",
 916        else => return error.UnsupportedELFArchitecture,
 917    };
 918    const data: std.elf.DATA = switch (target.cpu.arch.endian()) {
 919        .little => .@"2LSB",
 920        .big => .@"2MSB",
 921    };
 922    const osabi: std.elf.OSABI = switch (target.os.tag) {
 923        else => if (target.abi.isGnu()) .GNU else .NONE,
 924        .freestanding, .other => .STANDALONE,
 925        .netbsd => .NETBSD,
 926        .illumos => .SOLARIS,
 927        .freebsd, .ps4 => .FREEBSD,
 928        .openbsd => .OPENBSD,
 929        .cuda => .CUDA,
 930        .amdhsa => .AMDGPU_HSA,
 931        .amdpal => .AMDGPU_PAL,
 932        .mesa3d => .AMDGPU_MESA3D,
 933    };
 934    const @"type": std.elf.ET = switch (comp.config.output_mode) {
 935        .Exe => if (comp.config.pie or target.os.tag == .haiku) .DYN else .EXEC,
 936        .Lib => switch (comp.config.link_mode) {
 937            .static => .REL,
 938            .dynamic => .DYN,
 939        },
 940        .Obj => .REL,
 941    };
 942    const machine = target.toElfMachine();
 943    const maybe_interp = switch (comp.config.link_mode) {
 944        .static => null,
 945        .dynamic => switch (comp.config.output_mode) {
 946            .Exe => target.dynamic_linker.get(),
 947            .Lib => if (comp.root_mod.resolved_target.is_explicit_dynamic_linker)
 948                target.dynamic_linker.get()
 949            else
 950                null,
 951            .Obj => null,
 952        },
 953    };
 954
 955    const elf = try arena.create(Elf);
 956    const file = try path.root_dir.handle.adaptToNewApi().createFile(comp.io, path.sub_path, .{
 957        .read = true,
 958        .mode = link.File.determineMode(comp.config.output_mode, comp.config.link_mode),
 959    });
 960    errdefer file.close(comp.io);
 961    elf.* = .{
 962        .base = .{
 963            .tag = .elf2,
 964
 965            .comp = comp,
 966            .emit = path,
 967
 968            .file = .adaptFromNewApi(file),
 969            .gc_sections = false,
 970            .print_gc_sections = false,
 971            .build_id = .none,
 972            .allow_shlib_undefined = false,
 973            .stack_size = 0,
 974        },
 975        .options = options,
 976        .mf = try .init(file, comp.gpa),
 977        .ni = .{
 978            .tls = .none,
 979        },
 980        .nodes = .empty,
 981        .shdrs = .empty,
 982        .phdrs = .empty,
 983        .si = .{
 984            .dynsym = .null,
 985            .dynstr = .null,
 986            .dynamic = .null,
 987            .tdata = .null,
 988            .entry = .null,
 989        },
 990        .symtab = .empty,
 991        .shstrtab = .{
 992            .map = .empty,
 993        },
 994        .strtab = .{
 995            .map = .empty,
 996        },
 997        .dynsym = .empty,
 998        .dynstr = .{
 999            .map = .empty,
1000        },
1001        .got = .{
1002            .len = 0,
1003            .tlsld = .none,
1004            .plt = .empty,
1005        },
1006        .needed = .empty,
1007        .inputs = .empty,
1008        .input_sections = .empty,
1009        .input_section_pending_index = 0,
1010        .globals = .empty,
1011        .navs = .empty,
1012        .uavs = .empty,
1013        .lazy = comptime .initFill(.{
1014            .map = .empty,
1015            .pending_index = 0,
1016        }),
1017        .pending_uavs = .empty,
1018        .relocs = .empty,
1019        .const_prog_node = .none,
1020        .synth_prog_node = .none,
1021        .input_prog_node = .none,
1022    };
1023    errdefer elf.deinit();
1024
1025    try elf.initHeaders(class, data, osabi, @"type", machine, maybe_interp);
1026    return elf;
1027}
1028
1029pub fn deinit(elf: *Elf) void {
1030    const gpa = elf.base.comp.gpa;
1031    elf.mf.deinit(gpa);
1032    elf.nodes.deinit(gpa);
1033    elf.shdrs.deinit(gpa);
1034    elf.phdrs.deinit(gpa);
1035    elf.symtab.deinit(gpa);
1036    elf.shstrtab.map.deinit(gpa);
1037    elf.strtab.map.deinit(gpa);
1038    elf.dynsym.deinit(gpa);
1039    elf.dynstr.map.deinit(gpa);
1040    elf.got.plt.deinit(gpa);
1041    elf.needed.deinit(gpa);
1042    for (elf.inputs.items) |input| if (input.member) |m| gpa.free(m);
1043    elf.inputs.deinit(gpa);
1044    elf.input_sections.deinit(gpa);
1045    elf.globals.deinit(gpa);
1046    elf.navs.deinit(gpa);
1047    elf.uavs.deinit(gpa);
1048    for (&elf.lazy.values) |*lazy| lazy.map.deinit(gpa);
1049    elf.pending_uavs.deinit(gpa);
1050    elf.relocs.deinit(gpa);
1051    elf.* = undefined;
1052}
1053
1054fn initHeaders(
1055    elf: *Elf,
1056    class: std.elf.CLASS,
1057    data: std.elf.DATA,
1058    osabi: std.elf.OSABI,
1059    @"type": std.elf.ET,
1060    machine: std.elf.EM,
1061    maybe_interp: ?[]const u8,
1062) !void {
1063    const comp = elf.base.comp;
1064    const gpa = comp.gpa;
1065    const have_dynamic_section = switch (@"type") {
1066        .NONE, .CORE, _ => unreachable,
1067        .REL => false,
1068        .EXEC => comp.config.link_mode == .dynamic,
1069        .DYN => true,
1070    };
1071    const addr_align: std.mem.Alignment = switch (class) {
1072        .NONE, _ => unreachable,
1073        .@"32" => .@"4",
1074        .@"64" => .@"8",
1075    };
1076
1077    const shnum: u32 = 1;
1078    var phnum: u32 = 0;
1079    const phdr_phndx = phnum;
1080    phnum += 1;
1081    const interp_phndx = if (maybe_interp) |_| phndx: {
1082        defer phnum += 1;
1083        break :phndx phnum;
1084    } else undefined;
1085    const rodata_phndx = phnum;
1086    phnum += 1;
1087    const text_phndx = phnum;
1088    phnum += 1;
1089    const data_phndx = phnum;
1090    phnum += 1;
1091    const tls_phndx = if (comp.config.any_non_single_threaded) phndx: {
1092        defer phnum += 1;
1093        break :phndx phnum;
1094    } else undefined;
1095    const dynamic_phndx = if (have_dynamic_section) phndx: {
1096        defer phnum += 1;
1097        break :phndx phnum;
1098    } else undefined;
1099    const relro_phndx = phnum;
1100    phnum += 1;
1101
1102    const expected_nodes_len = expected_nodes_len: switch (@"type") {
1103        .NONE, .CORE, _ => unreachable,
1104        .REL => {
1105            defer phnum = 0;
1106            break :expected_nodes_len 5 + phnum;
1107        },
1108        .EXEC, .DYN => break :expected_nodes_len 9 + phnum * 2 +
1109            @as(usize, 4) * @intFromBool(have_dynamic_section),
1110    };
1111    try elf.nodes.ensureTotalCapacity(gpa, expected_nodes_len);
1112    try elf.shdrs.ensureTotalCapacity(gpa, shnum);
1113    try elf.phdrs.resize(gpa, phnum);
1114    try elf.symtab.ensureTotalCapacity(gpa, 1);
1115    if (have_dynamic_section) try elf.dynsym.ensureTotalCapacity(gpa, 1);
1116    elf.nodes.appendAssumeCapacity(.file);
1117
1118    switch (class) {
1119        .NONE, _ => unreachable,
1120        inline else => |ct_class| {
1121            const ElfN = ct_class.ElfN();
1122            assert(elf.ni.ehdr == try elf.mf.addOnlyChildNode(gpa, elf.ni.file, .{
1123                .size = @sizeOf(ElfN.Ehdr),
1124                .alignment = addr_align,
1125                .fixed = true,
1126            }));
1127            elf.nodes.appendAssumeCapacity(.ehdr);
1128
1129            const ehdr: *ElfN.Ehdr = @ptrCast(@alignCast(elf.ni.ehdr.slice(&elf.mf)));
1130            const EI = std.elf.EI;
1131            @memcpy(ehdr.ident[0..std.elf.MAGIC.len], std.elf.MAGIC);
1132            ehdr.ident[EI.CLASS] = @intFromEnum(class);
1133            ehdr.ident[EI.DATA] = @intFromEnum(data);
1134            ehdr.ident[EI.VERSION] = 1;
1135            ehdr.ident[EI.OSABI] = @intFromEnum(osabi);
1136            ehdr.ident[EI.ABIVERSION] = 0;
1137            @memset(ehdr.ident[EI.PAD..], 0);
1138            ehdr.type = @"type";
1139            ehdr.machine = machine;
1140            ehdr.version = 1;
1141            ehdr.entry = 0;
1142            ehdr.phoff = 0;
1143            ehdr.shoff = 0;
1144            ehdr.flags = 0;
1145            ehdr.ehsize = @sizeOf(ElfN.Ehdr);
1146            ehdr.phentsize = @sizeOf(ElfN.Phdr);
1147            ehdr.phnum = @min(phnum, std.elf.PN_XNUM);
1148            ehdr.shentsize = @sizeOf(ElfN.Shdr);
1149            ehdr.shnum = if (shnum < std.elf.SHN_LORESERVE) shnum else 0;
1150            ehdr.shstrndx = std.elf.SHN_UNDEF;
1151            if (elf.targetEndian() != native_endian) std.mem.byteSwapAllFields(ElfN.Ehdr, ehdr);
1152        },
1153    }
1154
1155    assert(elf.ni.shdr == try elf.mf.addLastChildNode(gpa, elf.ni.file, .{
1156        .size = elf.ehdrField(.shentsize) * elf.ehdrField(.shnum),
1157        .alignment = elf.mf.flags.block_size,
1158        .moved = true,
1159        .resized = true,
1160    }));
1161    elf.nodes.appendAssumeCapacity(.shdr);
1162
1163    var ph_vaddr: u32 = if (@"type" != .REL) ph_vaddr: {
1164        assert(elf.ni.rodata == try elf.mf.addLastChildNode(gpa, elf.ni.file, .{
1165            .alignment = elf.mf.flags.block_size,
1166            .moved = true,
1167            .bubbles_moved = false,
1168        }));
1169        elf.nodes.appendAssumeCapacity(.{ .segment = rodata_phndx });
1170        elf.phdrs.items[rodata_phndx] = elf.ni.rodata;
1171
1172        assert(elf.ni.phdr == try elf.mf.addOnlyChildNode(gpa, elf.ni.rodata, .{
1173            .size = elf.ehdrField(.phentsize) * elf.ehdrField(.phnum),
1174            .alignment = addr_align,
1175            .moved = true,
1176            .resized = true,
1177            .bubbles_moved = false,
1178        }));
1179        elf.nodes.appendAssumeCapacity(.{ .segment = phdr_phndx });
1180        elf.phdrs.items[phdr_phndx] = elf.ni.phdr;
1181
1182        assert(elf.ni.text == try elf.mf.addLastChildNode(gpa, elf.ni.file, .{
1183            .alignment = elf.mf.flags.block_size,
1184            .moved = true,
1185            .bubbles_moved = false,
1186        }));
1187        elf.nodes.appendAssumeCapacity(.{ .segment = text_phndx });
1188        elf.phdrs.items[text_phndx] = elf.ni.text;
1189
1190        assert(elf.ni.data == try elf.mf.addLastChildNode(gpa, elf.ni.file, .{
1191            .alignment = elf.mf.flags.block_size,
1192            .moved = true,
1193            .bubbles_moved = false,
1194        }));
1195        elf.nodes.appendAssumeCapacity(.{ .segment = data_phndx });
1196        elf.phdrs.items[data_phndx] = elf.ni.data;
1197
1198        assert(elf.ni.data_rel_ro == try elf.mf.addOnlyChildNode(gpa, elf.ni.data, .{
1199            .alignment = elf.mf.flags.block_size,
1200            .moved = true,
1201            .bubbles_moved = false,
1202        }));
1203        elf.nodes.appendAssumeCapacity(.{ .segment = relro_phndx });
1204        elf.phdrs.items[relro_phndx] = elf.ni.data_rel_ro;
1205
1206        break :ph_vaddr switch (elf.ehdrField(.type)) {
1207            .NONE, .CORE, _ => unreachable,
1208            .REL, .DYN => 0,
1209            .EXEC => switch (machine) {
1210                .@"386" => 0x400000,
1211                .AARCH64, .X86_64 => 0x200000,
1212                .PPC, .PPC64 => 0x10000000,
1213                .S390, .S390_OLD => 0x1000000,
1214                .OLD_SPARCV9, .SPARCV9 => 0x100000,
1215                else => 0x10000,
1216            },
1217        };
1218    } else undefined;
1219    switch (class) {
1220        .NONE, _ => unreachable,
1221        inline else => |ct_class| {
1222            const ElfN = ct_class.ElfN();
1223            const target_endian = elf.targetEndian();
1224
1225            if (@"type" != .REL) {
1226                const phdr: []ElfN.Phdr = @ptrCast(@alignCast(elf.ni.phdr.slice(&elf.mf)));
1227                const ph_phdr = &phdr[phdr_phndx];
1228                ph_phdr.* = .{
1229                    .type = .PHDR,
1230                    .offset = 0,
1231                    .vaddr = 0,
1232                    .paddr = 0,
1233                    .filesz = 0,
1234                    .memsz = 0,
1235                    .flags = .{ .R = true },
1236                    .@"align" = @intCast(elf.ni.phdr.alignment(&elf.mf).toByteUnits()),
1237                };
1238                if (target_endian != native_endian) std.mem.byteSwapAllFields(ElfN.Phdr, ph_phdr);
1239
1240                if (maybe_interp) |_| {
1241                    const ph_interp = &phdr[interp_phndx];
1242                    ph_interp.* = .{
1243                        .type = .INTERP,
1244                        .offset = 0,
1245                        .vaddr = 0,
1246                        .paddr = 0,
1247                        .filesz = 0,
1248                        .memsz = 0,
1249                        .flags = .{ .R = true },
1250                        .@"align" = 1,
1251                    };
1252                    if (target_endian != native_endian) std.mem.byteSwapAllFields(ElfN.Phdr, ph_interp);
1253                }
1254
1255                _, const rodata_size = elf.ni.rodata.location(&elf.mf).resolve(&elf.mf);
1256                const ph_rodata = &phdr[rodata_phndx];
1257                ph_rodata.* = .{
1258                    .type = if (rodata_size == 0) .NULL else .LOAD,
1259                    .offset = 0,
1260                    .vaddr = ph_vaddr,
1261                    .paddr = ph_vaddr,
1262                    .filesz = @intCast(rodata_size),
1263                    .memsz = @intCast(rodata_size),
1264                    .flags = .{ .R = true },
1265                    .@"align" = @intCast(elf.ni.rodata.alignment(&elf.mf).toByteUnits()),
1266                };
1267                if (target_endian != native_endian) std.mem.byteSwapAllFields(ElfN.Phdr, ph_rodata);
1268                ph_vaddr += @intCast(rodata_size);
1269
1270                _, const text_size = elf.ni.text.location(&elf.mf).resolve(&elf.mf);
1271                const ph_text = &phdr[text_phndx];
1272                ph_text.* = .{
1273                    .type = if (text_size == 0) .NULL else .LOAD,
1274                    .offset = 0,
1275                    .vaddr = ph_vaddr,
1276                    .paddr = ph_vaddr,
1277                    .filesz = @intCast(text_size),
1278                    .memsz = @intCast(text_size),
1279                    .flags = .{ .R = true, .X = true },
1280                    .@"align" = @intCast(elf.ni.text.alignment(&elf.mf).toByteUnits()),
1281                };
1282                if (target_endian != native_endian) std.mem.byteSwapAllFields(ElfN.Phdr, ph_text);
1283                ph_vaddr += @intCast(text_size);
1284
1285                _, const data_size = elf.ni.data.location(&elf.mf).resolve(&elf.mf);
1286                const ph_data = &phdr[data_phndx];
1287                ph_data.* = .{
1288                    .type = if (data_size == 0) .NULL else .LOAD,
1289                    .offset = 0,
1290                    .vaddr = ph_vaddr,
1291                    .paddr = ph_vaddr,
1292                    .filesz = @intCast(data_size),
1293                    .memsz = @intCast(data_size),
1294                    .flags = .{ .R = true, .W = true },
1295                    .@"align" = @intCast(elf.ni.data.alignment(&elf.mf).toByteUnits()),
1296                };
1297                if (target_endian != native_endian) std.mem.byteSwapAllFields(ElfN.Phdr, ph_data);
1298                ph_vaddr += @intCast(data_size);
1299
1300                if (comp.config.any_non_single_threaded) {
1301                    const ph_tls = &phdr[tls_phndx];
1302                    ph_tls.* = .{
1303                        .type = .TLS,
1304                        .offset = 0,
1305                        .vaddr = 0,
1306                        .paddr = 0,
1307                        .filesz = 0,
1308                        .memsz = 0,
1309                        .flags = .{ .R = true },
1310                        .@"align" = @intCast(elf.mf.flags.block_size.toByteUnits()),
1311                    };
1312                    if (target_endian != native_endian) std.mem.byteSwapAllFields(ElfN.Phdr, ph_tls);
1313                }
1314
1315                if (have_dynamic_section) {
1316                    const ph_dynamic = &phdr[dynamic_phndx];
1317                    ph_dynamic.* = .{
1318                        .type = .DYNAMIC,
1319                        .offset = 0,
1320                        .vaddr = 0,
1321                        .paddr = 0,
1322                        .filesz = 0,
1323                        .memsz = 0,
1324                        .flags = .{ .R = true, .W = true },
1325                        .@"align" = @intCast(addr_align.toByteUnits()),
1326                    };
1327                    if (target_endian != native_endian) std.mem.byteSwapAllFields(ElfN.Phdr, ph_dynamic);
1328                }
1329
1330                const ph_relro = &phdr[relro_phndx];
1331                ph_relro.* = .{
1332                    .type = .GNU_RELRO,
1333                    .offset = 0,
1334                    .vaddr = 0,
1335                    .paddr = 0,
1336                    .filesz = 0,
1337                    .memsz = 0,
1338                    .flags = .{ .R = true },
1339                    .@"align" = @intCast(elf.mf.flags.block_size.toByteUnits()),
1340                };
1341                if (target_endian != native_endian) std.mem.byteSwapAllFields(ElfN.Phdr, ph_relro);
1342            }
1343
1344            const sh_undef: *ElfN.Shdr = @ptrCast(@alignCast(elf.ni.shdr.slice(&elf.mf)));
1345            sh_undef.* = .{
1346                .name = try elf.string(.shstrtab, ""),
1347                .type = .NULL,
1348                .flags = .{ .shf = .{} },
1349                .addr = 0,
1350                .offset = 0,
1351                .size = if (shnum < std.elf.SHN_LORESERVE) 0 else shnum,
1352                .link = 0,
1353                .info = if (phnum < std.elf.PN_XNUM) 0 else phnum,
1354                .addralign = 0,
1355                .entsize = 0,
1356            };
1357            if (target_endian != native_endian) std.mem.byteSwapAllFields(ElfN.Shdr, sh_undef);
1358            elf.shdrs.appendAssumeCapacity(.{ .si = .null, .rela_si = .null, .rela_free = .none });
1359
1360            elf.symtab.addOneAssumeCapacity().* = .{
1361                .ni = .none,
1362                .loc_relocs = .none,
1363                .target_relocs = .none,
1364                .unused = 0,
1365            };
1366            assert(elf.si.symtab == try elf.addSection(elf.ni.file, .{
1367                .type = .SYMTAB,
1368                .size = @sizeOf(ElfN.Sym) * 1,
1369                .addralign = addr_align,
1370                .entsize = @sizeOf(ElfN.Sym),
1371                .node_align = elf.mf.flags.block_size,
1372            }));
1373            const symtab_null = @field(elf.symPtr(.null), @tagName(ct_class));
1374            symtab_null.* = .{
1375                .name = try elf.string(.strtab, ""),
1376                .value = 0,
1377                .size = 0,
1378                .info = .{ .type = .NOTYPE, .bind = .LOCAL },
1379                .other = .{ .visibility = .DEFAULT },
1380                .shndx = std.elf.SHN_UNDEF,
1381            };
1382            if (target_endian != native_endian) std.mem.byteSwapAllFields(ElfN.Sym, symtab_null);
1383
1384            const ehdr = @field(elf.ehdrPtr(), @tagName(ct_class));
1385            ehdr.shstrndx = ehdr.shnum;
1386        },
1387    }
1388    assert(elf.si.shstrtab == try elf.addSection(elf.ni.file, .{
1389        .type = .STRTAB,
1390        .size = 1,
1391        .entsize = 1,
1392        .node_align = elf.mf.flags.block_size,
1393    }));
1394    try elf.renameSection(.symtab, ".symtab");
1395    try elf.renameSection(.shstrtab, ".shstrtab");
1396    elf.si.shstrtab.node(elf).slice(&elf.mf)[0] = 0;
1397
1398    assert(elf.si.strtab == try elf.addSection(elf.ni.file, .{
1399        .name = ".strtab",
1400        .type = .STRTAB,
1401        .size = 1,
1402        .entsize = 1,
1403        .node_align = elf.mf.flags.block_size,
1404    }));
1405    switch (elf.shdrPtr(elf.si.symtab.shndx(elf))) {
1406        inline else => |shdr| elf.targetStore(&shdr.link, @intFromEnum(elf.si.strtab.shndx(elf))),
1407    }
1408    elf.si.strtab.node(elf).slice(&elf.mf)[0] = 0;
1409
1410    assert(elf.si.rodata == try elf.addSection(elf.ni.rodata, .{
1411        .name = ".rodata",
1412        .flags = .{ .ALLOC = true },
1413        .addralign = elf.mf.flags.block_size,
1414    }));
1415    assert(elf.si.text == try elf.addSection(elf.ni.text, .{
1416        .name = ".text",
1417        .flags = .{ .ALLOC = true, .EXECINSTR = true },
1418        .addralign = elf.mf.flags.block_size,
1419    }));
1420    assert(elf.si.data == try elf.addSection(elf.ni.data, .{
1421        .name = ".data",
1422        .flags = .{ .WRITE = true, .ALLOC = true },
1423        .addralign = elf.mf.flags.block_size,
1424    }));
1425    assert(elf.si.data_rel_ro == try elf.addSection(elf.ni.data_rel_ro, .{
1426        .name = ".data.rel.ro",
1427        .flags = .{ .WRITE = true, .ALLOC = true },
1428        .addralign = elf.mf.flags.block_size,
1429    }));
1430    if (@"type" != .REL) {
1431        assert(elf.si.got == try elf.addSection(elf.ni.data_rel_ro, .{
1432            .name = ".got",
1433            .flags = .{ .WRITE = true, .ALLOC = true },
1434            .addralign = addr_align,
1435        }));
1436        assert(elf.si.got_plt == try elf.addSection(
1437            if (elf.options.z_now) elf.ni.data_rel_ro else elf.ni.data,
1438            .{
1439                .name = ".got.plt",
1440                .type = .PROGBITS,
1441                .flags = .{ .WRITE = true, .ALLOC = true },
1442                .size = switch (machine) {
1443                    else => @panic(@tagName(machine)),
1444                    .@"386" => 3 * 4,
1445                    .X86_64 => 3 * 8,
1446                },
1447                .addralign = addr_align,
1448            },
1449        ));
1450        const plt_size: std.elf.Xword, const plt_align: std.mem.Alignment, const plt_sec =
1451            switch (machine) {
1452                else => @panic(@tagName(machine)),
1453                .X86_64 => .{ 16, .@"16", true },
1454            };
1455        assert(elf.si.plt == try elf.addSection(elf.ni.text, .{
1456            .name = ".plt",
1457            .type = .PROGBITS,
1458            .flags = .{ .ALLOC = true, .EXECINSTR = true },
1459            .size = plt_size,
1460            .addralign = plt_align,
1461            .node_align = elf.mf.flags.block_size,
1462        }));
1463        if (plt_sec) assert(elf.si.plt_sec == try elf.addSection(elf.ni.text, .{
1464            .name = ".plt.sec",
1465            .flags = .{ .ALLOC = true, .EXECINSTR = true },
1466            .addralign = plt_align,
1467            .node_align = elf.mf.flags.block_size,
1468        }));
1469        if (maybe_interp) |interp| {
1470            const interp_ni = try elf.mf.addLastChildNode(gpa, elf.ni.rodata, .{
1471                .size = interp.len + 1,
1472                .moved = true,
1473                .resized = true,
1474                .bubbles_moved = false,
1475            });
1476            elf.nodes.appendAssumeCapacity(.{ .segment = interp_phndx });
1477            elf.phdrs.items[interp_phndx] = interp_ni;
1478
1479            const sec_interp_si = try elf.addSection(interp_ni, .{
1480                .name = ".interp",
1481                .type = .PROGBITS,
1482                .flags = .{ .ALLOC = true },
1483                .size = @intCast(interp.len + 1),
1484            });
1485            const sec_interp = sec_interp_si.node(elf).slice(&elf.mf);
1486            @memcpy(sec_interp[0..interp.len], interp);
1487            sec_interp[interp.len] = 0;
1488        }
1489        if (have_dynamic_section) {
1490            const dynamic_ni = try elf.mf.addLastChildNode(gpa, elf.ni.data_rel_ro, .{
1491                .alignment = addr_align,
1492                .moved = true,
1493                .bubbles_moved = false,
1494            });
1495            elf.nodes.appendAssumeCapacity(.{ .segment = dynamic_phndx });
1496            elf.phdrs.items[dynamic_phndx] = dynamic_ni;
1497
1498            elf.si.dynstr = try elf.addSection(elf.ni.rodata, .{
1499                .name = ".dynstr",
1500                .type = .STRTAB,
1501                .flags = .{ .ALLOC = true },
1502                .size = 1,
1503                .entsize = 1,
1504                .node_align = elf.mf.flags.block_size,
1505            });
1506            const dynstr_shndx = elf.si.dynstr.shndx(elf);
1507            elf.dynsym.putAssumeCapacityNoClobber(.null, {});
1508            switch (class) {
1509                .NONE, _ => unreachable,
1510                inline else => |ct_class| {
1511                    const Sym = ct_class.ElfN().Sym;
1512                    elf.si.dynsym = try elf.addSection(elf.ni.rodata, .{
1513                        .name = ".dynsym",
1514                        .type = .DYNSYM,
1515                        .flags = .{ .ALLOC = true },
1516                        .size = @sizeOf(Sym) * 1,
1517                        .link = @intFromEnum(dynstr_shndx),
1518                        .info = 1,
1519                        .addralign = addr_align,
1520                        .entsize = @sizeOf(Sym),
1521                        .node_align = elf.mf.flags.block_size,
1522                    });
1523                    const dynsym_null = &@field(elf.dynsymSlice(), @tagName(ct_class))[0];
1524                    dynsym_null.* = .{
1525                        .name = try elf.string(.dynstr, ""),
1526                        .value = 0,
1527                        .size = 0,
1528                        .info = .{ .type = .NOTYPE, .bind = .LOCAL },
1529                        .other = .{ .visibility = .DEFAULT },
1530                        .shndx = std.elf.SHN_UNDEF,
1531                    };
1532                    if (elf.targetEndian() != native_endian) std.mem.byteSwapAllFields(
1533                        Sym,
1534                        dynsym_null,
1535                    );
1536                },
1537            }
1538            const rela_size: std.elf.Word = switch (class) {
1539                .NONE, _ => unreachable,
1540                inline else => |ct_class| @sizeOf(ct_class.ElfN().Rela),
1541            };
1542            elf.si.got.shndx(elf).get(elf).rela_si = try elf.addSection(elf.ni.rodata, .{
1543                .name = ".rela.dyn",
1544                .type = .RELA,
1545                .flags = .{ .ALLOC = true },
1546                .link = @intFromEnum(elf.si.dynsym.shndx(elf)),
1547                .addralign = addr_align,
1548                .entsize = rela_size,
1549                .node_align = elf.mf.flags.block_size,
1550            });
1551            const got_plt_shndx = elf.si.got_plt.shndx(elf);
1552            got_plt_shndx.get(elf).rela_si = try elf.addSection(elf.ni.rodata, .{
1553                .name = ".rela.plt",
1554                .type = .RELA,
1555                .flags = .{ .ALLOC = true, .INFO_LINK = true },
1556                .link = @intFromEnum(elf.si.dynsym.shndx(elf)),
1557                .info = @intFromEnum(got_plt_shndx),
1558                .addralign = addr_align,
1559                .entsize = rela_size,
1560                .node_align = elf.mf.flags.block_size,
1561            });
1562            elf.si.dynamic = try elf.addSection(dynamic_ni, .{
1563                .name = ".dynamic",
1564                .type = .DYNAMIC,
1565                .flags = .{ .ALLOC = true, .WRITE = true },
1566                .link = @intFromEnum(dynstr_shndx),
1567                .entsize = @intCast(addr_align.toByteUnits() * 2),
1568                .node_align = addr_align,
1569            });
1570            switch (machine) {
1571                else => @panic(@tagName(machine)),
1572                .X86_64 => {
1573                    @memcpy(elf.si.plt.node(elf).slice(&elf.mf)[0..16], &[16]u8{
1574                        0xff, 0x35, 0x00, 0x00, 0x00, 0x00, // push 0x0(%rip)
1575                        0xff, 0x25, 0x00, 0x00, 0x00, 0x00, // jmp *0x0(%rip)
1576                        0x0f, 0x1f, 0x40, 0x00, // nopl 0x0(%rax)
1577                    });
1578                    const plt_sym = elf.si.plt.get(elf);
1579                    assert(plt_sym.loc_relocs == .none);
1580                    plt_sym.loc_relocs = @enumFromInt(elf.relocs.items.len);
1581                    try elf.ensureUnusedRelocCapacity(elf.si.plt, 2);
1582                    elf.addRelocAssumeCapacity(
1583                        elf.si.plt,
1584                        2,
1585                        elf.si.got_plt,
1586                        8 * 1 - 4,
1587                        .{ .X86_64 = .PC32 },
1588                    );
1589                    elf.addRelocAssumeCapacity(
1590                        elf.si.plt,
1591                        8,
1592                        elf.si.got_plt,
1593                        8 * 2 - 4,
1594                        .{ .X86_64 = .PC32 },
1595                    );
1596                },
1597            }
1598        }
1599        if (comp.config.any_non_single_threaded) {
1600            elf.ni.tls = try elf.mf.addLastChildNode(gpa, elf.ni.rodata, .{
1601                .alignment = elf.mf.flags.block_size,
1602                .moved = true,
1603                .bubbles_moved = false,
1604            });
1605            elf.nodes.appendAssumeCapacity(.{ .segment = tls_phndx });
1606            elf.phdrs.items[tls_phndx] = elf.ni.tls;
1607        }
1608    } else {
1609        assert(maybe_interp == null);
1610        assert(!have_dynamic_section);
1611    }
1612    if (comp.config.any_non_single_threaded) elf.si.tdata = try elf.addSection(elf.ni.tls, .{
1613        .name = ".tdata",
1614        .flags = .{ .WRITE = true, .ALLOC = true, .TLS = true },
1615        .addralign = elf.mf.flags.block_size,
1616    });
1617    assert(elf.nodes.len == expected_nodes_len);
1618}
1619
1620pub fn startProgress(elf: *Elf, prog_node: std.Progress.Node) void {
1621    prog_node.increaseEstimatedTotalItems(4);
1622    elf.const_prog_node = prog_node.start("Constants", elf.pending_uavs.count());
1623    elf.synth_prog_node = prog_node.start("Synthetics", count: {
1624        var count: usize = 0;
1625        for (&elf.lazy.values) |*lazy| count += lazy.map.count() - lazy.pending_index;
1626        break :count count;
1627    });
1628    elf.mf.update_prog_node = prog_node.start("Relocations", elf.mf.updates.items.len);
1629    elf.input_prog_node = prog_node.start(
1630        "Inputs",
1631        elf.input_sections.items.len - elf.input_section_pending_index,
1632    );
1633}
1634
1635pub fn endProgress(elf: *Elf) void {
1636    elf.input_prog_node.end();
1637    elf.input_prog_node = .none;
1638    elf.mf.update_prog_node.end();
1639    elf.mf.update_prog_node = .none;
1640    elf.synth_prog_node.end();
1641    elf.synth_prog_node = .none;
1642    elf.const_prog_node.end();
1643    elf.const_prog_node = .none;
1644}
1645
1646fn getNode(elf: *const Elf, ni: MappedFile.Node.Index) Node {
1647    return elf.nodes.get(@intFromEnum(ni));
1648}
1649fn computeNodeVAddr(elf: *Elf, ni: MappedFile.Node.Index) u64 {
1650    const parent_vaddr = parent_vaddr: {
1651        const parent_ni = ni.parent(&elf.mf);
1652        const parent_si = switch (elf.getNode(parent_ni)) {
1653            .file => return 0,
1654            .ehdr, .shdr => unreachable,
1655            .segment => |phndx| break :parent_vaddr switch (elf.phdrSlice()) {
1656                inline else => |phdr| elf.targetLoad(&phdr[phndx].vaddr),
1657            },
1658            .section => |si| si,
1659            .input_section => unreachable,
1660            inline .nav, .uav, .lazy_code, .lazy_const_data => |mi| mi.symbol(elf),
1661        };
1662        break :parent_vaddr if (parent_si == elf.si.tdata) 0 else switch (elf.symPtr(parent_si)) {
1663            inline else => |sym| elf.targetLoad(&sym.value),
1664        };
1665    };
1666    const offset, _ = ni.location(&elf.mf).resolve(&elf.mf);
1667    return parent_vaddr + offset;
1668}
1669
1670pub fn identClass(elf: *const Elf) std.elf.CLASS {
1671    return @enumFromInt(elf.mf.contents[std.elf.EI.CLASS]);
1672}
1673pub fn identData(elf: *const Elf) std.elf.DATA {
1674    return @enumFromInt(elf.mf.contents[std.elf.EI.DATA]);
1675}
1676
1677pub fn targetEndian(elf: *const Elf) std.builtin.Endian {
1678    return switch (elf.identData()) {
1679        .NONE, _ => unreachable,
1680        .@"2LSB" => .little,
1681        .@"2MSB" => .big,
1682    };
1683}
1684fn targetLoad(elf: *const Elf, ptr: anytype) @typeInfo(@TypeOf(ptr)).pointer.child {
1685    const Child = @typeInfo(@TypeOf(ptr)).pointer.child;
1686    return switch (@typeInfo(Child)) {
1687        else => @compileError(@typeName(Child)),
1688        .int => std.mem.toNative(Child, ptr.*, elf.targetEndian()),
1689        .@"enum" => |@"enum"| @enumFromInt(elf.targetLoad(@as(*@"enum".tag_type, @ptrCast(ptr)))),
1690        .@"struct" => |@"struct"| @bitCast(
1691            elf.targetLoad(@as(*@"struct".backing_integer.?, @ptrCast(ptr))),
1692        ),
1693    };
1694}
1695fn targetStore(elf: *const Elf, ptr: anytype, val: @typeInfo(@TypeOf(ptr)).pointer.child) void {
1696    const Child = @typeInfo(@TypeOf(ptr)).pointer.child;
1697    return switch (@typeInfo(Child)) {
1698        else => @compileError(@typeName(Child)),
1699        .int => ptr.* = std.mem.nativeTo(Child, val, elf.targetEndian()),
1700        .@"enum" => |@"enum"| elf.targetStore(
1701            @as(*@"enum".tag_type, @ptrCast(ptr)),
1702            @intFromEnum(val),
1703        ),
1704        .@"struct" => |@"struct"| elf.targetStore(
1705            @as(*@"struct".backing_integer.?, @ptrCast(ptr)),
1706            @bitCast(val),
1707        ),
1708    };
1709}
1710
1711pub const EhdrPtr = union(std.elf.CLASS) {
1712    NONE: noreturn,
1713    @"32": *std.elf.Elf32.Ehdr,
1714    @"64": *std.elf.Elf64.Ehdr,
1715};
1716pub fn ehdrPtr(elf: *Elf) EhdrPtr {
1717    const slice = elf.ni.ehdr.slice(&elf.mf);
1718    return switch (elf.identClass()) {
1719        .NONE, _ => unreachable,
1720        inline else => |class| @unionInit(
1721            EhdrPtr,
1722            @tagName(class),
1723            @ptrCast(@alignCast(slice)),
1724        ),
1725    };
1726}
1727pub fn ehdrField(
1728    elf: *Elf,
1729    comptime field: std.meta.FieldEnum(std.elf.Elf64.Ehdr),
1730) @FieldType(std.elf.Elf64.Ehdr, @tagName(field)) {
1731    return switch (elf.ehdrPtr()) {
1732        inline else => |ehdr| elf.targetLoad(&@field(ehdr, @tagName(field))),
1733    };
1734}
1735
1736pub const PhdrSlice = union(std.elf.CLASS) {
1737    NONE: noreturn,
1738    @"32": []std.elf.Elf32.Phdr,
1739    @"64": []std.elf.Elf64.Phdr,
1740};
1741pub fn phdrSlice(elf: *Elf) PhdrSlice {
1742    assert(elf.ehdrField(.type) != .REL);
1743    const slice = elf.ni.phdr.slice(&elf.mf);
1744    return switch (elf.identClass()) {
1745        .NONE, _ => unreachable,
1746        inline else => |class| @unionInit(
1747            PhdrSlice,
1748            @tagName(class),
1749            @ptrCast(@alignCast(slice)),
1750        ),
1751    };
1752}
1753
1754pub const ShdrSlice = union(std.elf.CLASS) {
1755    NONE: noreturn,
1756    @"32": []std.elf.Elf32.Shdr,
1757    @"64": []std.elf.Elf64.Shdr,
1758};
1759pub fn shdrSlice(elf: *Elf) ShdrSlice {
1760    const slice = elf.ni.shdr.slice(&elf.mf);
1761    return switch (elf.identClass()) {
1762        .NONE, _ => unreachable,
1763        inline else => |class| @unionInit(
1764            ShdrSlice,
1765            @tagName(class),
1766            @ptrCast(@alignCast(slice)),
1767        ),
1768    };
1769}
1770
1771pub const ShdrPtr = union(std.elf.CLASS) {
1772    NONE: noreturn,
1773    @"32": *std.elf.Elf32.Shdr,
1774    @"64": *std.elf.Elf64.Shdr,
1775};
1776pub fn shdrPtr(elf: *Elf, shndx: Symbol.Index.Shndx) ShdrPtr {
1777    return switch (elf.shdrSlice()) {
1778        inline else => |shdrs, class| @unionInit(ShdrPtr, @tagName(class), &shdrs[@intFromEnum(shndx)]),
1779    };
1780}
1781
1782pub const SymtabSlice = union(std.elf.CLASS) {
1783    NONE: noreturn,
1784    @"32": []std.elf.Elf32.Sym,
1785    @"64": []std.elf.Elf64.Sym,
1786};
1787pub fn symtabSlice(elf: *Elf) SymtabSlice {
1788    const slice = elf.si.symtab.node(elf).slice(&elf.mf);
1789    return switch (elf.identClass()) {
1790        .NONE, _ => unreachable,
1791        inline else => |class| @unionInit(SymtabSlice, @tagName(class), @ptrCast(@alignCast(
1792            slice[0..std.mem.alignBackwardAnyAlign(usize, slice.len, @sizeOf(class.ElfN().Sym))],
1793        ))),
1794    };
1795}
1796
1797pub const SymPtr = union(std.elf.CLASS) {
1798    NONE: noreturn,
1799    @"32": *std.elf.Elf32.Sym,
1800    @"64": *std.elf.Elf64.Sym,
1801};
1802pub fn symPtr(elf: *Elf, si: Symbol.Index) SymPtr {
1803    return switch (elf.symtabSlice()) {
1804        inline else => |syms, class| @unionInit(SymPtr, @tagName(class), &syms[@intFromEnum(si)]),
1805    };
1806}
1807
1808pub fn dynsymSlice(elf: *Elf) SymtabSlice {
1809    const slice = elf.si.dynsym.node(elf).slice(&elf.mf);
1810    return switch (elf.identClass()) {
1811        .NONE, _ => unreachable,
1812        inline else => |class| @unionInit(SymtabSlice, @tagName(class), @ptrCast(@alignCast(
1813            slice[0..std.mem.alignBackwardAnyAlign(usize, slice.len, @sizeOf(class.ElfN().Sym))],
1814        ))),
1815    };
1816}
1817
1818fn addSymbolAssumeCapacity(elf: *Elf) Symbol.Index {
1819    defer elf.symtab.addOneAssumeCapacity().* = .{
1820        .ni = .none,
1821        .loc_relocs = .none,
1822        .target_relocs = .none,
1823        .unused = 0,
1824    };
1825    return @enumFromInt(elf.symtab.items.len);
1826}
1827
1828fn initSymbolAssumeCapacity(elf: *Elf, opts: Symbol.Index.InitOptions) !Symbol.Index {
1829    const si = elf.addSymbolAssumeCapacity();
1830    try si.init(elf, opts);
1831    return si;
1832}
1833
1834pub fn globalSymbol(elf: *Elf, opts: struct {
1835    name: []const u8,
1836    lib_name: ?[]const u8 = null,
1837    type: std.elf.STT,
1838    bind: std.elf.STB = .GLOBAL,
1839    visibility: std.elf.STV = .DEFAULT,
1840}) !Symbol.Index {
1841    const gpa = elf.base.comp.gpa;
1842    try elf.symtab.ensureUnusedCapacity(gpa, 1);
1843    const global_gop = try elf.globals.getOrPut(gpa, try elf.string(.strtab, opts.name));
1844    if (!global_gop.found_existing) global_gop.value_ptr.* = try elf.initSymbolAssumeCapacity(.{
1845        .name = opts.name,
1846        .lib_name = opts.lib_name,
1847        .type = opts.type,
1848        .bind = opts.bind,
1849        .visibility = opts.visibility,
1850    });
1851    return global_gop.value_ptr.*;
1852}
1853
1854fn navType(
1855    ip: *const InternPool,
1856    nav_status: @FieldType(InternPool.Nav, "status"),
1857    any_non_single_threaded: bool,
1858) std.elf.STT {
1859    return switch (nav_status) {
1860        .unresolved => unreachable,
1861        .type_resolved => |tr| if (any_non_single_threaded and tr.is_threadlocal)
1862            .TLS
1863        else if (ip.isFunctionType(tr.type))
1864            .FUNC
1865        else
1866            .OBJECT,
1867        .fully_resolved => |fr| switch (ip.indexToKey(fr.val)) {
1868            else => .OBJECT,
1869            .variable => |variable| if (any_non_single_threaded and variable.is_threadlocal)
1870                .TLS
1871            else
1872                .OBJECT,
1873            .@"extern" => |@"extern"| if (any_non_single_threaded and @"extern".is_threadlocal)
1874                .TLS
1875            else if (ip.isFunctionType(@"extern".ty))
1876                .FUNC
1877            else
1878                .OBJECT,
1879            .func => .FUNC,
1880        },
1881    };
1882}
1883fn namedSection(elf: *const Elf, name: []const u8) ?Symbol.Index {
1884    if (std.mem.eql(u8, name, ".rodata") or
1885        std.mem.startsWith(u8, name, ".rodata.")) return elf.si.rodata;
1886    if (std.mem.eql(u8, name, ".text") or
1887        std.mem.startsWith(u8, name, ".text.")) return elf.si.text;
1888    if (std.mem.eql(u8, name, ".data") or
1889        std.mem.startsWith(u8, name, ".data.")) return elf.si.data;
1890    if (std.mem.eql(u8, name, ".tdata") or
1891        std.mem.startsWith(u8, name, ".tdata.")) return elf.si.tdata;
1892    return null;
1893}
1894fn navSection(
1895    elf: *Elf,
1896    ip: *const InternPool,
1897    nav_fr: @FieldType(@FieldType(InternPool.Nav, "status"), "fully_resolved"),
1898) Symbol.Index {
1899    if (nav_fr.@"linksection".toSlice(ip)) |@"linksection"|
1900        if (elf.namedSection(@"linksection")) |si| return si;
1901    return switch (navType(
1902        ip,
1903        .{ .fully_resolved = nav_fr },
1904        elf.base.comp.config.any_non_single_threaded,
1905    )) {
1906        else => unreachable,
1907        .FUNC => elf.si.text,
1908        .OBJECT => elf.si.data,
1909        .TLS => elf.si.tdata,
1910    };
1911}
1912fn navMapIndex(elf: *Elf, zcu: *Zcu, nav_index: InternPool.Nav.Index) !Node.NavMapIndex {
1913    const gpa = zcu.gpa;
1914    const ip = &zcu.intern_pool;
1915    const nav = ip.getNav(nav_index);
1916    try elf.symtab.ensureUnusedCapacity(gpa, 1);
1917    const nav_gop = try elf.navs.getOrPut(gpa, nav_index);
1918    if (!nav_gop.found_existing) nav_gop.value_ptr.* = try elf.initSymbolAssumeCapacity(.{
1919        .name = nav.fqn.toSlice(ip),
1920        .type = navType(ip, nav.status, elf.base.comp.config.any_non_single_threaded),
1921    });
1922    return @enumFromInt(nav_gop.index);
1923}
1924pub fn navSymbol(elf: *Elf, zcu: *Zcu, nav_index: InternPool.Nav.Index) !Symbol.Index {
1925    const ip = &zcu.intern_pool;
1926    const nav = ip.getNav(nav_index);
1927    if (nav.getExtern(ip)) |@"extern"| return elf.globalSymbol(.{
1928        .name = @"extern".name.toSlice(ip),
1929        .lib_name = @"extern".lib_name.toSlice(ip),
1930        .type = navType(ip, nav.status, elf.base.comp.config.any_non_single_threaded),
1931        .bind = switch (@"extern".linkage) {
1932            .internal => .LOCAL,
1933            .strong => .GLOBAL,
1934            .weak => .WEAK,
1935            .link_once => return error.LinkOnceUnsupported,
1936        },
1937        .visibility = switch (@"extern".visibility) {
1938            .default => .DEFAULT,
1939            .hidden => .HIDDEN,
1940            .protected => .PROTECTED,
1941        },
1942    });
1943    const nmi = try elf.navMapIndex(zcu, nav_index);
1944    return nmi.symbol(elf);
1945}
1946
1947fn uavMapIndex(elf: *Elf, uav_val: InternPool.Index) !Node.UavMapIndex {
1948    const gpa = elf.base.comp.gpa;
1949    try elf.symtab.ensureUnusedCapacity(gpa, 1);
1950    const uav_gop = try elf.uavs.getOrPut(gpa, uav_val);
1951    if (!uav_gop.found_existing)
1952        uav_gop.value_ptr.* = try elf.initSymbolAssumeCapacity(.{ .type = .OBJECT });
1953    return @enumFromInt(uav_gop.index);
1954}
1955pub fn uavSymbol(elf: *Elf, uav_val: InternPool.Index) !Symbol.Index {
1956    const umi = try elf.uavMapIndex(uav_val);
1957    return umi.symbol(elf);
1958}
1959
1960pub fn lazySymbol(elf: *Elf, lazy: link.File.LazySymbol) !Symbol.Index {
1961    const gpa = elf.base.comp.gpa;
1962    try elf.symtab.ensureUnusedCapacity(gpa, 1);
1963    const lazy_gop = try elf.lazy.getPtr(lazy.kind).map.getOrPut(gpa, lazy.ty);
1964    if (!lazy_gop.found_existing) {
1965        lazy_gop.value_ptr.* = try elf.initSymbolAssumeCapacity(.{
1966            .type = switch (lazy.kind) {
1967                .code => .FUNC,
1968                .const_data => .OBJECT,
1969            },
1970        });
1971        elf.synth_prog_node.increaseEstimatedTotalItems(1);
1972    }
1973    return lazy_gop.value_ptr.*;
1974}
1975
1976pub fn loadInput(elf: *Elf, input: link.Input) (std.fs.File.Reader.SizeError ||
1977    std.Io.File.Reader.Error || MappedFile.Error || error{ EndOfStream, BadMagic, LinkFailure })!void {
1978    const io = elf.base.comp.io;
1979    var buf: [4096]u8 = undefined;
1980    switch (input) {
1981        .object => |object| {
1982            var fr = object.file.reader(io, &buf);
1983            elf.loadObject(object.path, null, &fr, .{
1984                .offset = fr.logicalPos(),
1985                .size = try fr.getSize(),
1986            }) catch |err| switch (err) {
1987                error.ReadFailed => return fr.err.?,
1988                else => |e| return e,
1989            };
1990        },
1991        .archive => |archive| {
1992            var fr = archive.file.reader(io, &buf);
1993            elf.loadArchive(archive.path, &fr) catch |err| switch (err) {
1994                error.ReadFailed => return fr.err.?,
1995                else => |e| return e,
1996            };
1997        },
1998        .res => unreachable,
1999        .dso => |dso| {
2000            try elf.needed.ensureUnusedCapacity(elf.base.comp.gpa, 1);
2001            var fr = dso.file.reader(io, &buf);
2002            elf.loadDso(dso.path, &fr) catch |err| switch (err) {
2003                error.ReadFailed => return fr.err.?,
2004                else => |e| return e,
2005            };
2006        },
2007        .dso_exact => |dso_exact| try elf.loadDsoExact(dso_exact.name),
2008    }
2009}
2010fn loadArchive(elf: *Elf, path: std.Build.Cache.Path, fr: *std.Io.File.Reader) !void {
2011    const comp = elf.base.comp;
2012    const gpa = comp.gpa;
2013    const diags = &comp.link_diags;
2014    const r = &fr.interface;
2015
2016    log.debug("loadArchive({f})", .{path.fmtEscapeString()});
2017    if (!std.mem.eql(u8, try r.take(std.elf.ARMAG.len), std.elf.ARMAG)) return error.BadMagic;
2018    var strtab: std.Io.Writer.Allocating = .init(gpa);
2019    defer strtab.deinit();
2020    while (r.takeStruct(std.elf.ar_hdr, native_endian)) |header| {
2021        if (!std.mem.eql(u8, &header.ar_fmag, std.elf.ARFMAG))
2022            return diags.failParse(path, "bad file magic", .{});
2023        const offset = fr.logicalPos();
2024        const size = header.size() catch
2025            return diags.failParse(path, "bad member size", .{});
2026        if (std.mem.eql(u8, &header.ar_name, std.elf.STRNAME)) {
2027            strtab.clearRetainingCapacity();
2028            try strtab.ensureTotalCapacityPrecise(size);
2029            r.streamExact(&strtab.writer, size) catch |err| switch (err) {
2030                error.WriteFailed => return error.OutOfMemory,
2031                else => |e| return e,
2032            };
2033            continue;
2034        }
2035        load_object: {
2036            const member = header.name() orelse member: {
2037                const strtab_offset = header.nameOffset() catch |err| switch (err) {
2038                    error.Overflow => break :member error.Overflow,
2039                    error.InvalidCharacter => break :load_object,
2040                } orelse break :load_object;
2041                const strtab_written = strtab.written();
2042                if (strtab_offset > strtab_written.len) break :member error.Overflow;
2043                const member = std.mem.sliceTo(strtab_written[strtab_offset..], '\n');
2044                break :member if (std.mem.endsWith(u8, member, "/"))
2045                    member[0 .. member.len - "/".len]
2046                else
2047                    member;
2048            } catch |err| switch (err) {
2049                error.Overflow => return diags.failParse(path, "bad member name offset", .{}),
2050            };
2051            if (!std.mem.endsWith(u8, member, ".o")) break :load_object;
2052            try elf.loadObject(path, member, fr, .{ .offset = offset, .size = size });
2053        }
2054        try fr.seekTo(std.mem.alignForward(u64, offset + size, 2));
2055    } else |err| switch (err) {
2056        error.EndOfStream => if (!fr.atEnd()) return error.EndOfStream,
2057        else => |e| return e,
2058    }
2059}
2060fn fmtMemberString(member: ?[]const u8) std.fmt.Alt(?[]const u8, memberStringEscape) {
2061    return .{ .data = member };
2062}
2063fn memberStringEscape(member: ?[]const u8, w: *std.Io.Writer) std.Io.Writer.Error!void {
2064    try w.print("({f})", .{std.zig.fmtString(member orelse return)});
2065}
2066fn loadObject(
2067    elf: *Elf,
2068    path: std.Build.Cache.Path,
2069    member: ?[]const u8,
2070    fr: *std.Io.File.Reader,
2071    fl: MappedFile.Node.FileLocation,
2072) !void {
2073    const comp = elf.base.comp;
2074    const gpa = comp.gpa;
2075    const diags = &comp.link_diags;
2076    const r = &fr.interface;
2077
2078    const ii: Node.InputIndex = @enumFromInt(elf.inputs.items.len);
2079    log.debug("loadObject({f}{f})", .{ path.fmtEscapeString(), fmtMemberString(member) });
2080    const ident = try r.peek(std.elf.EI.OSABI);
2081    if (!std.mem.eql(u8, ident[0..std.elf.MAGIC.len], std.elf.MAGIC)) return error.BadMagic;
2082    if (!std.mem.eql(u8, ident[std.elf.MAGIC.len..], elf.mf.contents[std.elf.MAGIC.len..ident.len]))
2083        return diags.failParse(path, "bad ident", .{});
2084    try elf.symtab.ensureUnusedCapacity(gpa, 1);
2085    try elf.inputs.ensureUnusedCapacity(gpa, 1);
2086    elf.inputs.addOneAssumeCapacity().* = .{
2087        .path = path,
2088        .member = if (member) |m| try gpa.dupe(u8, m) else null,
2089        .si = try elf.initSymbolAssumeCapacity(.{
2090            .name = std.fs.path.stem(member orelse path.sub_path),
2091            .type = .FILE,
2092            .shndx = .ABS,
2093        }),
2094    };
2095    const target_endian = elf.targetEndian();
2096    switch (elf.identClass()) {
2097        .NONE, _ => unreachable,
2098        inline else => |class| {
2099            const ElfN = class.ElfN();
2100            const ehdr = try r.peekStruct(ElfN.Ehdr, target_endian);
2101            if (ehdr.type != .REL) return diags.failParse(path, "unsupported object type", .{});
2102            if (ehdr.machine != elf.ehdrField(.machine))
2103                return diags.failParse(path, "bad machine", .{});
2104            if (ehdr.shoff == 0 or ehdr.shnum <= 1) return;
2105            if (ehdr.shoff + ehdr.shentsize * ehdr.shnum > fl.size)
2106                return diags.failParse(path, "bad section header location", .{});
2107            if (ehdr.shentsize < @sizeOf(ElfN.Shdr))
2108                return diags.failParse(path, "unsupported shentsize", .{});
2109            const sections = try gpa.alloc(struct { shdr: ElfN.Shdr, si: Symbol.Index }, ehdr.shnum);
2110            defer gpa.free(sections);
2111            try fr.seekTo(fl.offset + ehdr.shoff);
2112            for (sections) |*section| {
2113                section.* = .{
2114                    .shdr = try r.peekStruct(ElfN.Shdr, target_endian),
2115                    .si = .null,
2116                };
2117                try r.discardAll(ehdr.shentsize);
2118                switch (section.shdr.type) {
2119                    .NULL, .NOBITS => {},
2120                    else => if (section.shdr.offset + section.shdr.size > fl.size)
2121                        return diags.failParse(path, "bad section location", .{}),
2122                }
2123            }
2124            const shstrtab = shstrtab: {
2125                if (ehdr.shstrndx == std.elf.SHN_UNDEF or ehdr.shstrndx >= ehdr.shnum)
2126                    return diags.failParse(path, "missing section names", .{});
2127                const shdr = &sections[ehdr.shstrndx].shdr;
2128                if (shdr.type != .STRTAB) return diags.failParse(path, "invalid shstrtab type", .{});
2129                const shstrtab = try gpa.alloc(u8, @intCast(shdr.size));
2130                errdefer gpa.free(shstrtab);
2131                try fr.seekTo(fl.offset + shdr.offset);
2132                try r.readSliceAll(shstrtab);
2133                break :shstrtab shstrtab;
2134            };
2135            defer gpa.free(shstrtab);
2136            try elf.nodes.ensureUnusedCapacity(gpa, ehdr.shnum - 1);
2137            try elf.symtab.ensureUnusedCapacity(gpa, ehdr.shnum - 1);
2138            try elf.input_sections.ensureUnusedCapacity(gpa, ehdr.shnum - 1);
2139            for (sections[1..]) |*section| switch (section.shdr.type) {
2140                else => {},
2141                .PROGBITS, .NOBITS => {
2142                    if (section.shdr.name >= shstrtab.len) continue;
2143                    const name = std.mem.sliceTo(shstrtab[section.shdr.name..], 0);
2144                    const parent_si = elf.namedSection(name) orelse continue;
2145                    const ni = try elf.mf.addLastChildNode(gpa, parent_si.node(elf), .{
2146                        .size = section.shdr.size,
2147                        .alignment = .fromByteUnits(std.math.ceilPowerOfTwoAssert(
2148                            usize,
2149                            @intCast(@max(section.shdr.addralign, 1)),
2150                        )),
2151                        .moved = true,
2152                    });
2153                    elf.nodes.appendAssumeCapacity(.{
2154                        .input_section = @enumFromInt(elf.input_sections.items.len),
2155                    });
2156                    section.si = try elf.initSymbolAssumeCapacity(.{
2157                        .type = .SECTION,
2158                        .shndx = parent_si.shndx(elf),
2159                    });
2160                    section.si.get(elf).ni = ni;
2161                    elf.input_sections.addOneAssumeCapacity().* = .{
2162                        .ii = ii,
2163                        .si = section.si,
2164                        .file_location = .{
2165                            .offset = fl.offset + section.shdr.offset,
2166                            .size = section.shdr.size,
2167                        },
2168                    };
2169                    elf.synth_prog_node.increaseEstimatedTotalItems(1);
2170                },
2171            };
2172            var symmap: std.ArrayList(Symbol.Index) = .empty;
2173            defer symmap.deinit(gpa);
2174            for (sections[1..], 1..) |*symtab, symtab_shndx| switch (symtab.shdr.type) {
2175                else => {},
2176                .SYMTAB => {
2177                    if (symtab.shdr.entsize < @sizeOf(ElfN.Sym))
2178                        return diags.failParse(path, "unsupported symtab entsize", .{});
2179                    const strtab = strtab: {
2180                        if (symtab.shdr.link == std.elf.SHN_UNDEF or symtab.shdr.link >= ehdr.shnum)
2181                            return diags.failParse(path, "missing symbol names", .{});
2182                        const shdr = &sections[symtab.shdr.link].shdr;
2183                        if (shdr.type != .STRTAB)
2184                            return diags.failParse(path, "invalid strtab type", .{});
2185                        const strtab = try gpa.alloc(u8, @intCast(shdr.size));
2186                        errdefer gpa.free(strtab);
2187                        try fr.seekTo(fl.offset + shdr.offset);
2188                        try r.readSliceAll(strtab);
2189                        break :strtab strtab;
2190                    };
2191                    defer gpa.free(strtab);
2192                    const symnum = std.math.sub(u32, std.math.divExact(
2193                        u32,
2194                        @intCast(symtab.shdr.size),
2195                        @intCast(symtab.shdr.entsize),
2196                    ) catch return diags.failParse(
2197                        path,
2198                        "symtab section size (0x{x}) is not a multiple of entsize (0x{x})",
2199                        .{ symtab.shdr.size, symtab.shdr.entsize },
2200                    ), 1) catch continue;
2201                    symmap.clearRetainingCapacity();
2202                    try symmap.resize(gpa, symnum);
2203                    try elf.symtab.ensureUnusedCapacity(gpa, symnum);
2204                    try elf.globals.ensureUnusedCapacity(gpa, symnum);
2205                    try fr.seekTo(fl.offset + symtab.shdr.offset + symtab.shdr.entsize);
2206                    for (symmap.items) |*si| {
2207                        si.* = .null;
2208                        const input_sym = try r.peekStruct(ElfN.Sym, target_endian);
2209                        try r.discardAll64(symtab.shdr.entsize);
2210                        if (input_sym.name >= strtab.len or input_sym.shndx == std.elf.SHN_UNDEF or
2211                            input_sym.shndx >= ehdr.shnum) continue;
2212                        switch (input_sym.info.type) {
2213                            .NOTYPE, .OBJECT, .FUNC => {},
2214                            .SECTION => {
2215                                const section = &sections[input_sym.shndx];
2216                                if (input_sym.value == section.shdr.addr) si.* = section.si;
2217                                continue;
2218                            },
2219                            else => continue,
2220                        }
2221                        const name = std.mem.sliceTo(strtab[input_sym.name..], 0);
2222                        const parent_si = sections[input_sym.shndx].si;
2223                        si.* = try elf.initSymbolAssumeCapacity(.{
2224                            .name = name,
2225                            .value = input_sym.value,
2226                            .size = input_sym.size,
2227                            .type = input_sym.info.type,
2228                            .bind = input_sym.info.bind,
2229                            .visibility = input_sym.other.visibility,
2230                            .shndx = parent_si.shndx(elf),
2231                        });
2232                        si.get(elf).ni = parent_si.get(elf).ni;
2233                        switch (input_sym.info.bind) {
2234                            else => {},
2235                            .GLOBAL => {
2236                                const gop = elf.globals.getOrPutAssumeCapacity(elf.targetLoad(
2237                                    &@field(elf.symPtr(si.*), @tagName(class)).name,
2238                                ));
2239                                if (gop.found_existing) switch (elf.targetLoad(
2240                                    switch (elf.symPtr(gop.value_ptr.*)) {
2241                                        inline else => |sym| &sym.info,
2242                                    },
2243                                ).bind) {
2244                                    else => unreachable,
2245                                    .GLOBAL => return diags.failParse(
2246                                        path,
2247                                        "multiple definitions of '{s}'",
2248                                        .{name},
2249                                    ),
2250                                    .WEAK => {},
2251                                };
2252                                gop.value_ptr.* = si.*;
2253                            },
2254                            .WEAK => {
2255                                const gop = elf.globals.getOrPutAssumeCapacity(elf.targetLoad(
2256                                    &@field(elf.symPtr(si.*), @tagName(class)).name,
2257                                ));
2258                                if (!gop.found_existing) gop.value_ptr.* = si.*;
2259                            },
2260                        }
2261                    }
2262                    for (sections[1..]) |*rels| switch (rels.shdr.type) {
2263                        else => {},
2264                        inline .REL, .RELA => |sht| {
2265                            if (rels.shdr.link != symtab_shndx or rels.shdr.info == std.elf.SHN_UNDEF or
2266                                rels.shdr.info >= ehdr.shnum) continue;
2267                            const Rel = switch (sht) {
2268                                else => comptime unreachable,
2269                                .REL => ElfN.Rel,
2270                                .RELA => ElfN.Rela,
2271                            };
2272                            if (rels.shdr.entsize < @sizeOf(Rel))
2273                                return diags.failParse(path, "unsupported rel entsize", .{});
2274
2275                            const loc_sec = &sections[rels.shdr.info];
2276                            if (loc_sec.si == .null) continue;
2277                            const loc_sym = loc_sec.si.get(elf);
2278                            assert(loc_sym.loc_relocs == .none);
2279                            loc_sym.loc_relocs = @enumFromInt(elf.relocs.items.len);
2280
2281                            const relnum = std.math.divExact(
2282                                u32,
2283                                @intCast(rels.shdr.size),
2284                                @intCast(rels.shdr.entsize),
2285                            ) catch return diags.failParse(
2286                                path,
2287                                "relocation section size (0x{x}) is not a multiple of entsize (0x{x})",
2288                                .{ rels.shdr.size, rels.shdr.entsize },
2289                            );
2290                            try elf.ensureUnusedRelocCapacity(loc_sec.si, relnum);
2291                            try fr.seekTo(fl.offset + rels.shdr.offset);
2292                            for (0..relnum) |_| {
2293                                const rel = try r.peekStruct(Rel, target_endian);
2294                                try r.discardAll64(rels.shdr.entsize);
2295                                if (rel.info.sym == 0 or rel.info.sym > symnum) continue;
2296                                const target_si = symmap.items[rel.info.sym - 1];
2297                                if (target_si == .null) continue;
2298                                elf.addRelocAssumeCapacity(
2299                                    loc_sec.si,
2300                                    rel.offset - loc_sec.shdr.addr,
2301                                    target_si,
2302                                    rel.addend,
2303                                    .wrap(rel.info.type, elf),
2304                                );
2305                            }
2306                        },
2307                    };
2308                },
2309            };
2310        },
2311    }
2312}
2313fn loadDso(elf: *Elf, path: std.Build.Cache.Path, fr: *std.Io.File.Reader) !void {
2314    const comp = elf.base.comp;
2315    const diags = &comp.link_diags;
2316    const r = &fr.interface;
2317
2318    log.debug("loadDso({f})", .{path.fmtEscapeString()});
2319    const ident = try r.peek(std.elf.EI.NIDENT);
2320    if (!std.mem.eql(u8, ident[0..std.elf.MAGIC.len], std.elf.MAGIC)) return error.BadMagic;
2321    if (!std.mem.eql(u8, ident[std.elf.MAGIC.len..], elf.mf.contents[std.elf.MAGIC.len..ident.len]))
2322        return diags.failParse(path, "bad ident", .{});
2323    const target_endian = elf.targetEndian();
2324    switch (elf.identClass()) {
2325        .NONE, _ => unreachable,
2326        inline else => |class| {
2327            const ElfN = class.ElfN();
2328            const ehdr = try r.peekStruct(ElfN.Ehdr, target_endian);
2329            if (ehdr.type != .DYN) return diags.failParse(path, "unsupported dso type", .{});
2330            if (ehdr.machine != elf.ehdrField(.machine))
2331                return diags.failParse(path, "bad machine", .{});
2332            if (ehdr.phoff == 0 or ehdr.phnum <= 1)
2333                return diags.failParse(path, "no program headers", .{});
2334            try fr.seekTo(ehdr.phoff);
2335            const dynamic_ph = for (0..ehdr.phnum) |_| {
2336                const ph = try r.peekStruct(ElfN.Phdr, target_endian);
2337                try r.discardAll(ehdr.phentsize);
2338                switch (ph.type) {
2339                    else => {},
2340                    .DYNAMIC => break ph,
2341                }
2342            } else return diags.failParse(path, "no dynamic segment", .{});
2343            const dynnum = std.math.divExact(
2344                u32,
2345                @intCast(dynamic_ph.filesz),
2346                @sizeOf(ElfN.Addr) * 2,
2347            ) catch return diags.failParse(
2348                path,
2349                "dynamic segment filesz (0x{x}) is not a multiple of entsize (0x{x})",
2350                .{ dynamic_ph.filesz, @sizeOf(ElfN.Addr) * 2 },
2351            );
2352            var strtab: ?ElfN.Addr = null;
2353            var strsz: ?ElfN.Addr = null;
2354            var soname: ?ElfN.Addr = null;
2355            try fr.seekTo(dynamic_ph.offset);
2356            for (0..dynnum) |_| {
2357                const tag = try r.takeInt(ElfN.Addr, target_endian);
2358                const val = try r.takeInt(ElfN.Addr, target_endian);
2359                switch (tag) {
2360                    else => {},
2361                    std.elf.DT_STRTAB => strtab = val,
2362                    std.elf.DT_STRSZ => strsz = val,
2363                    std.elf.DT_SONAME => soname = val,
2364                }
2365            }
2366            if (strtab == null or soname == null)
2367                return elf.loadDsoExact(std.fs.path.basename(path.sub_path));
2368            if (strsz) |size| if (soname.? >= size)
2369                return diags.failParse(path, "bad soname string", .{});
2370            try fr.seekTo(ehdr.phoff);
2371            const ph = for (0..ehdr.phnum) |_| {
2372                const ph = try r.peekStruct(ElfN.Phdr, target_endian);
2373                try r.discardAll(ehdr.phentsize);
2374                switch (ph.type) {
2375                    else => {},
2376                    .LOAD => if (strtab.? >= ph.vaddr and
2377                        strtab.? + (strsz orelse 0) <= ph.vaddr + ph.filesz) break ph,
2378                }
2379            } else return diags.failParse(path, "strtab not part of a loaded segment", .{});
2380            try fr.seekTo(strtab.? + soname.? - ph.vaddr + ph.offset);
2381            return elf.loadDsoExact(r.peekSentinel(0) catch |err| switch (err) {
2382                error.StreamTooLong => return diags.failParse(path, "soname too lang", .{}),
2383                else => |e| return e,
2384            });
2385        },
2386    }
2387}
2388fn loadDsoExact(elf: *Elf, name: []const u8) !void {
2389    log.debug("loadDsoExact({f})", .{std.zig.fmtString(name)});
2390    try elf.needed.put(elf.base.comp.gpa, try elf.string(.dynstr, name), {});
2391}
2392
2393pub fn prelink(elf: *Elf, prog_node: std.Progress.Node) !void {
2394    _ = prog_node;
2395    elf.prelinkInner() catch |err| switch (err) {
2396        error.OutOfMemory => return error.OutOfMemory,
2397        else => |e| return elf.base.comp.link_diags.fail("prelink failed: {t}", .{e}),
2398    };
2399}
2400fn prelinkInner(elf: *Elf) !void {
2401    const comp = elf.base.comp;
2402    const gpa = comp.gpa;
2403    try elf.symtab.ensureUnusedCapacity(gpa, 1);
2404    try elf.inputs.ensureUnusedCapacity(gpa, 1);
2405    const zcu_name = try std.fmt.allocPrint(gpa, "{s}_zcu", .{
2406        std.fs.path.stem(elf.base.emit.sub_path),
2407    });
2408    defer gpa.free(zcu_name);
2409    const si = try elf.initSymbolAssumeCapacity(.{ .name = zcu_name, .type = .FILE, .shndx = .ABS });
2410    elf.inputs.addOneAssumeCapacity().* = .{
2411        .path = elf.base.emit,
2412        .member = null,
2413        .si = si,
2414    };
2415
2416    if (elf.si.dynamic != .null) switch (elf.identClass()) {
2417        .NONE, _ => unreachable,
2418        inline else => |ct_class| {
2419            const ElfN = ct_class.ElfN();
2420            const flags: ElfN.Addr = if (elf.options.z_now) std.elf.DF_BIND_NOW else 0;
2421            const flags_1: ElfN.Addr = if (elf.options.z_now) std.elf.DF_1_NOW else 0;
2422            const needed_len = elf.needed.count();
2423            const dynamic_len = needed_len + @intFromBool(elf.options.soname != null) +
2424                @intFromBool(flags != 0) + @intFromBool(flags_1 != 0) +
2425                @intFromBool(comp.config.output_mode == .Exe) + 12;
2426            const dynamic_size: u32 = @intCast(@sizeOf(ElfN.Addr) * 2 * dynamic_len);
2427            const dynamic_ni = elf.si.dynamic.node(elf);
2428            try dynamic_ni.resize(&elf.mf, gpa, dynamic_size);
2429            switch (elf.shdrPtr(elf.si.dynamic.shndx(elf))) {
2430                inline else => |shdr| elf.targetStore(&shdr.size, dynamic_size),
2431            }
2432            const sec_dynamic = dynamic_ni.slice(&elf.mf);
2433            const dynamic_entries: [][2]ElfN.Addr = @ptrCast(@alignCast(sec_dynamic));
2434            var dynamic_index: usize = 0;
2435            for (
2436                dynamic_entries[dynamic_index..][0..needed_len],
2437                elf.needed.keys(),
2438            ) |*dynamic_entry, needed| dynamic_entry.* = .{ std.elf.DT_NEEDED, needed };
2439            dynamic_index += needed_len;
2440            if (elf.options.soname) |soname| {
2441                dynamic_entries[dynamic_index] = .{ std.elf.DT_SONAME, try elf.string(.dynstr, soname) };
2442                dynamic_index += 1;
2443            }
2444            if (flags != 0) {
2445                dynamic_entries[dynamic_index] = .{ std.elf.DT_FLAGS, flags };
2446                dynamic_index += 1;
2447            }
2448            if (flags_1 != 0) {
2449                dynamic_entries[dynamic_index] = .{ std.elf.DT_FLAGS_1, flags_1 };
2450                dynamic_index += 1;
2451            }
2452            if (comp.config.output_mode == .Exe) {
2453                dynamic_entries[dynamic_index] = .{ std.elf.DT_DEBUG, 0 };
2454                dynamic_index += 1;
2455            }
2456            const rela_dyn_si = elf.si.got.shndx(elf).get(elf).rela_si;
2457            const rela_plt_si = elf.si.got_plt.shndx(elf).get(elf).rela_si;
2458            dynamic_entries[dynamic_index..][0..12].* = .{
2459                .{ std.elf.DT_RELA, @intCast(elf.computeNodeVAddr(rela_dyn_si.node(elf))) },
2460                .{ std.elf.DT_RELASZ, elf.targetLoad(
2461                    &@field(elf.shdrPtr(rela_dyn_si.shndx(elf)), @tagName(ct_class)).size,
2462                ) },
2463                .{ std.elf.DT_RELAENT, @sizeOf(ElfN.Rela) },
2464                .{ std.elf.DT_JMPREL, @intCast(elf.computeNodeVAddr(rela_plt_si.node(elf))) },
2465                .{ std.elf.DT_PLTRELSZ, elf.targetLoad(
2466                    &@field(elf.shdrPtr(rela_plt_si.shndx(elf)), @tagName(ct_class)).size,
2467                ) },
2468                .{ std.elf.DT_PLTGOT, @intCast(elf.computeNodeVAddr(elf.si.got_plt.node(elf))) },
2469                .{ std.elf.DT_PLTREL, std.elf.DT_RELA },
2470                .{ std.elf.DT_SYMTAB, @intCast(elf.computeNodeVAddr(elf.si.dynsym.node(elf))) },
2471                .{ std.elf.DT_SYMENT, @sizeOf(ElfN.Sym) },
2472                .{ std.elf.DT_STRTAB, @intCast(elf.computeNodeVAddr(elf.si.dynstr.node(elf))) },
2473                .{ std.elf.DT_STRSZ, elf.targetLoad(
2474                    &@field(elf.shdrPtr(elf.si.dynstr.shndx(elf)), @tagName(ct_class)).size,
2475                ) },
2476                .{ std.elf.DT_NULL, 0 },
2477            };
2478            dynamic_index += 12;
2479            assert(dynamic_index == dynamic_len);
2480            if (elf.targetEndian() != native_endian) for (dynamic_entries) |*dynamic_entry|
2481                std.mem.byteSwapAllFields(@TypeOf(dynamic_entry.*), dynamic_entry);
2482
2483            const dynamic_sym = elf.si.dynamic.get(elf);
2484            assert(dynamic_sym.loc_relocs == .none);
2485            dynamic_sym.loc_relocs = @enumFromInt(elf.relocs.items.len);
2486            try elf.ensureUnusedRelocCapacity(elf.si.dynamic, 5);
2487            elf.addRelocAssumeCapacity(
2488                elf.si.dynamic,
2489                @sizeOf(ElfN.Addr) * (2 * (dynamic_len - 12) + 1),
2490                rela_dyn_si,
2491                0,
2492                .absAddr(elf),
2493            );
2494            elf.addRelocAssumeCapacity(
2495                elf.si.dynamic,
2496                @sizeOf(ElfN.Addr) * (2 * (dynamic_len - 9) + 1),
2497                rela_plt_si,
2498                0,
2499                .absAddr(elf),
2500            );
2501            elf.addRelocAssumeCapacity(
2502                elf.si.dynamic,
2503                @sizeOf(ElfN.Addr) * (2 * (dynamic_len - 7) + 1),
2504                elf.si.got_plt,
2505                0,
2506                .absAddr(elf),
2507            );
2508            elf.addRelocAssumeCapacity(
2509                elf.si.dynamic,
2510                @sizeOf(ElfN.Addr) * (2 * (dynamic_len - 5) + 1),
2511                elf.si.dynsym,
2512                0,
2513                .absAddr(elf),
2514            );
2515            elf.addRelocAssumeCapacity(
2516                elf.si.dynamic,
2517                @sizeOf(ElfN.Addr) * (2 * (dynamic_len - 3) + 1),
2518                elf.si.dynstr,
2519                0,
2520                .absAddr(elf),
2521            );
2522        },
2523    };
2524}
2525
2526pub fn getNavVAddr(
2527    elf: *Elf,
2528    pt: Zcu.PerThread,
2529    nav: InternPool.Nav.Index,
2530    reloc_info: link.File.RelocInfo,
2531) !u64 {
2532    return elf.getVAddr(reloc_info, try elf.navSymbol(pt.zcu, nav));
2533}
2534
2535pub fn getUavVAddr(
2536    elf: *Elf,
2537    uav: InternPool.Index,
2538    reloc_info: link.File.RelocInfo,
2539) !u64 {
2540    return elf.getVAddr(reloc_info, try elf.uavSymbol(uav));
2541}
2542
2543pub fn getVAddr(elf: *Elf, reloc_info: link.File.RelocInfo, target_si: Symbol.Index) !u64 {
2544    try elf.addReloc(
2545        @enumFromInt(reloc_info.parent.atom_index),
2546        reloc_info.offset,
2547        target_si,
2548        reloc_info.addend,
2549        .absAddr(elf),
2550    );
2551    return switch (elf.symPtr(target_si)) {
2552        inline else => |sym| elf.targetLoad(&sym.value),
2553    };
2554}
2555
2556fn addSection(elf: *Elf, segment_ni: MappedFile.Node.Index, opts: struct {
2557    name: []const u8 = "",
2558    type: std.elf.SHT = .NULL,
2559    flags: std.elf.SHF = .{},
2560    size: std.elf.Xword = 0,
2561    link: std.elf.Word = 0,
2562    info: std.elf.Word = 0,
2563    addralign: std.mem.Alignment = .@"1",
2564    entsize: std.elf.Word = 0,
2565    node_align: std.mem.Alignment = .@"1",
2566    fixed: bool = false,
2567}) !Symbol.Index {
2568    switch (opts.type) {
2569        .NULL => assert(opts.size == 0),
2570        .PROGBITS => assert(opts.size > 0),
2571        else => {},
2572    }
2573    const gpa = elf.base.comp.gpa;
2574    try elf.nodes.ensureUnusedCapacity(gpa, 1);
2575    try elf.shdrs.ensureUnusedCapacity(gpa, 1);
2576    try elf.symtab.ensureUnusedCapacity(gpa, 1);
2577
2578    const shstrtab_entry = try elf.string(.shstrtab, opts.name);
2579    const shndx: Symbol.Index.Shndx, const new_shdr_size = shndx: switch (elf.ehdrPtr()) {
2580        inline else => |ehdr, class| {
2581            const shndx, const shnum = alloc_shndx: switch (elf.targetLoad(&ehdr.shnum)) {
2582                1...std.elf.SHN_LORESERVE - 2 => |shndx| {
2583                    const shnum = shndx + 1;
2584                    elf.targetStore(&ehdr.shnum, shnum);
2585                    break :alloc_shndx .{ shndx, shnum };
2586                },
2587                std.elf.SHN_LORESERVE - 1 => |shndx| {
2588                    const shnum = shndx + 1;
2589                    elf.targetStore(&ehdr.shnum, 0);
2590                    elf.targetStore(&@field(elf.shdrPtr(.UNDEF), @tagName(class)).size, shnum);
2591                    break :alloc_shndx .{ shndx, shnum };
2592                },
2593                std.elf.SHN_LORESERVE...std.elf.SHN_HIRESERVE => unreachable,
2594                0 => {
2595                    const shnum_ptr = &@field(elf.shdrPtr(.UNDEF), @tagName(class)).size;
2596                    const shndx: u32 = @intCast(elf.targetLoad(shnum_ptr));
2597                    const shnum = shndx + 1;
2598                    elf.targetStore(shnum_ptr, shnum);
2599                    break :alloc_shndx .{ shndx, shnum };
2600                },
2601            };
2602            assert(shndx < @intFromEnum(Symbol.Index.Shndx.LORESERVE));
2603            break :shndx .{ @enumFromInt(shndx), elf.targetLoad(&ehdr.shentsize) * shnum };
2604        },
2605    };
2606    _, const shdr_node_size = elf.ni.shdr.location(&elf.mf).resolve(&elf.mf);
2607    if (new_shdr_size > shdr_node_size)
2608        try elf.ni.shdr.resize(&elf.mf, gpa, new_shdr_size +| new_shdr_size / MappedFile.growth_factor);
2609    const ni = try elf.mf.addLastChildNode(gpa, switch (elf.ehdrField(.type)) {
2610        .NONE, .CORE, _ => unreachable,
2611        .REL => elf.ni.file,
2612        .EXEC, .DYN => segment_ni,
2613    }, .{
2614        .size = opts.size,
2615        .alignment = opts.addralign.max(opts.node_align),
2616        .fixed = opts.fixed,
2617        .resized = opts.size > 0,
2618    });
2619    const si = elf.addSymbolAssumeCapacity();
2620    elf.nodes.appendAssumeCapacity(.{ .section = si });
2621    elf.shdrs.appendAssumeCapacity(.{ .si = si, .rela_si = .null, .rela_free = .none });
2622    si.get(elf).ni = ni;
2623    const addr = elf.computeNodeVAddr(ni);
2624    const offset = ni.fileLocation(&elf.mf, false).offset;
2625    try si.init(elf, .{ .value = addr, .type = .SECTION, .shndx = shndx });
2626    switch (elf.shdrPtr(shndx)) {
2627        inline else => |shdr, class| {
2628            shdr.* = .{
2629                .name = shstrtab_entry,
2630                .type = opts.type,
2631                .flags = .{ .shf = opts.flags },
2632                .addr = @intCast(addr),
2633                .offset = @intCast(offset),
2634                .size = @intCast(opts.size),
2635                .link = opts.link,
2636                .info = opts.info,
2637                .addralign = @intCast(opts.addralign.toByteUnits()),
2638                .entsize = opts.entsize,
2639            };
2640            if (elf.targetEndian() != native_endian) std.mem.byteSwapAllFields(class.ElfN().Shdr, shdr);
2641        },
2642    }
2643    return si;
2644}
2645
2646fn renameSection(elf: *Elf, si: Symbol.Index, name: []const u8) !void {
2647    const shstrtab_entry = try elf.string(.shstrtab, name);
2648    switch (elf.shdrPtr(si.shndx(elf))) {
2649        inline else => |shdr| elf.targetStore(&shdr.name, shstrtab_entry),
2650    }
2651}
2652
2653fn sectionName(elf: *Elf, si: Symbol.Index) [:0]const u8 {
2654    const name = elf.si.shstrtab.node(elf).slice(&elf.mf)[switch (elf.shdrPtr(si.shndx(elf))) {
2655        inline else => |shdr| elf.targetLoad(&shdr.name),
2656    }..];
2657    return name[0..std.mem.indexOfScalar(u8, name, 0).? :0];
2658}
2659
2660fn string(elf: *Elf, comptime section: enum { shstrtab, strtab, dynstr }, key: []const u8) !u32 {
2661    if (key.len == 0) return 0;
2662    return @field(elf, @tagName(section)).get(elf, @field(elf.si, @tagName(section)), key);
2663}
2664
2665pub fn addReloc(
2666    elf: *Elf,
2667    loc_si: Symbol.Index,
2668    offset: u64,
2669    target_si: Symbol.Index,
2670    addend: i64,
2671    @"type": Reloc.Type,
2672) !void {
2673    try elf.ensureUnusedRelocCapacity(loc_si, 1);
2674    elf.addRelocAssumeCapacity(loc_si, offset, target_si, addend, @"type");
2675}
2676pub fn ensureUnusedRelocCapacity(elf: *Elf, loc_si: Symbol.Index, len: usize) !void {
2677    if (len == 0) return;
2678    const gpa = elf.base.comp.gpa;
2679    try elf.relocs.ensureUnusedCapacity(gpa, len);
2680    const class = elf.identClass();
2681    const rela_si, const rela_len = rela: switch (elf.ehdrField(.type)) {
2682        .NONE, .CORE, _ => unreachable,
2683        .REL => {
2684            const shndx = loc_si.shndx(elf);
2685            const sh = shndx.get(elf);
2686            if (sh.rela_si == .null) {
2687                var stack = std.heap.stackFallback(32, gpa);
2688                const allocator = stack.get();
2689
2690                const rela_name =
2691                    try std.fmt.allocPrint(allocator, ".rela{s}", .{elf.sectionName(sh.si)});
2692                defer allocator.free(rela_name);
2693
2694                sh.rela_si = try elf.addSection(.none, .{
2695                    .name = rela_name,
2696                    .type = .RELA,
2697                    .link = @intFromEnum(elf.si.symtab.shndx(elf)),
2698                    .info = @intFromEnum(shndx),
2699                    .addralign = switch (class) {
2700                        .NONE, _ => unreachable,
2701                        .@"32" => .@"4",
2702                        .@"64" => .@"8",
2703                    },
2704                    .entsize = switch (class) {
2705                        .NONE, _ => unreachable,
2706                        inline else => |ct_class| @sizeOf(ct_class.ElfN().Rela),
2707                    },
2708                    .node_align = elf.mf.flags.block_size,
2709                });
2710            }
2711            break :rela .{ sh.rela_si, len };
2712        },
2713        .EXEC, .DYN => switch (elf.got.tlsld) {
2714            _ => return,
2715            .none => if (elf.si.dynamic != .null) {
2716                try elf.mf.updates.ensureUnusedCapacity(gpa, 1);
2717                const got_ni = elf.si.got.node(elf);
2718                _, const got_node_size = got_ni.location(&elf.mf).resolve(&elf.mf);
2719                const got_size = switch (class) {
2720                    .NONE, _ => unreachable,
2721                    inline else => |ct_class| (elf.got.len + 2) * @sizeOf(ct_class.ElfN().Addr),
2722                };
2723                if (got_size > got_node_size)
2724                    try got_ni.resize(&elf.mf, gpa, got_size +| got_size / MappedFile.growth_factor);
2725                break :rela .{ elf.si.got.shndx(elf).get(elf).rela_si, 1 };
2726            } else return,
2727        },
2728    };
2729    const rela_ni = rela_si.node(elf);
2730    _, const rela_node_size = rela_ni.location(&elf.mf).resolve(&elf.mf);
2731    const rela_size = switch (elf.shdrPtr(rela_si.shndx(elf))) {
2732        inline else => |shdr| elf.targetLoad(&shdr.size) + elf.targetLoad(&shdr.entsize) * rela_len,
2733    };
2734    if (rela_size > rela_node_size)
2735        try rela_ni.resize(&elf.mf, gpa, rela_size +| rela_size / MappedFile.growth_factor);
2736}
2737pub fn addRelocAssumeCapacity(
2738    elf: *Elf,
2739    loc_si: Symbol.Index,
2740    offset: u64,
2741    target_si: Symbol.Index,
2742    addend: i64,
2743    @"type": Reloc.Type,
2744) void {
2745    const target = target_si.get(elf);
2746    const ri: Reloc.Index = @enumFromInt(elf.relocs.items.len);
2747    elf.relocs.addOneAssumeCapacity().* = .{
2748        .type = @"type",
2749        .prev = .none,
2750        .next = target.target_relocs,
2751        .loc = loc_si,
2752        .target = target_si,
2753        .index = index: switch (elf.ehdrField(.type)) {
2754            .NONE, .CORE, _ => unreachable,
2755            .REL => {
2756                const sh = loc_si.shndx(elf).get(elf);
2757                switch (elf.shdrPtr(sh.rela_si.shndx(elf))) {
2758                    inline else => |shdr, class| {
2759                        const Rela = class.ElfN().Rela;
2760                        const ent_size = elf.targetLoad(&shdr.entsize);
2761                        const rela_slice = sh.rela_si.node(elf).slice(&elf.mf);
2762                        const index: u32 = if (sh.rela_free.unwrap()) |index| alloc_index: {
2763                            const rela: *Rela = @ptrCast(@alignCast(
2764                                rela_slice[@intCast(ent_size * index)..][0..@intCast(ent_size)],
2765                            ));
2766                            sh.rela_free = @enumFromInt(rela.offset);
2767                            break :alloc_index index;
2768                        } else alloc_index: {
2769                            const old_size = elf.targetLoad(&shdr.size);
2770                            const new_size = old_size + ent_size;
2771                            elf.targetStore(&shdr.size, @intCast(new_size));
2772                            break :alloc_index @intCast(@divExact(old_size, ent_size));
2773                        };
2774                        const rela: *Rela = @ptrCast(@alignCast(
2775                            rela_slice[@intCast(ent_size * index)..][0..@intCast(ent_size)],
2776                        ));
2777                        rela.* = .{
2778                            .offset = @intCast(offset),
2779                            .info = .{
2780                                .type = @intCast(@"type".unwrap(elf)),
2781                                .sym = @intCast(@intFromEnum(target_si)),
2782                            },
2783                            .addend = @intCast(addend),
2784                        };
2785                        if (elf.targetEndian() != native_endian) std.mem.byteSwapAllFields(Rela, rela);
2786                        break :index .wrap(index);
2787                    },
2788                }
2789            },
2790            .EXEC, .DYN => {
2791                switch (elf.ehdrField(.machine)) {
2792                    else => |machine| @panic(@tagName(machine)),
2793                    .AARCH64, .PPC64, .RISCV => {},
2794                    .X86_64 => switch (@"type".X86_64) {
2795                        else => {},
2796                        .TLSLD => switch (elf.got.tlsld) {
2797                            _ => {},
2798                            .none => if (elf.si.dynamic != .null) {
2799                                const tlsld_index = elf.got.len;
2800                                elf.got.tlsld = .wrap(tlsld_index);
2801                                elf.got.len = tlsld_index + 2;
2802                                const got_addr = got_addr: switch (elf.shdrPtr(elf.si.got.shndx(elf))) {
2803                                    inline else => |shdr, class| {
2804                                        const addr_size = @sizeOf(class.ElfN().Addr);
2805                                        const old_size = addr_size * tlsld_index;
2806                                        const new_size = old_size + addr_size * 2;
2807                                        @memset(
2808                                            elf.si.got.node(elf).slice(&elf.mf)[old_size..new_size],
2809                                            0,
2810                                        );
2811                                        break :got_addr elf.targetLoad(&shdr.addr) + old_size;
2812                                    },
2813                                };
2814                                const rela_dyn_si = elf.si.got.shndx(elf).get(elf).rela_si;
2815                                const rela_dyn_ni = rela_dyn_si.node(elf);
2816                                switch (elf.shdrPtr(rela_dyn_si.shndx(elf))) {
2817                                    inline else => |shdr, class| {
2818                                        const Rela = class.ElfN().Rela;
2819                                        const old_size = elf.targetLoad(&shdr.size);
2820                                        const new_size = old_size + elf.targetLoad(&shdr.entsize);
2821                                        elf.targetStore(&shdr.size, new_size);
2822                                        const rela: *Rela = @ptrCast(@alignCast(rela_dyn_ni
2823                                            .slice(&elf.mf)[@intCast(old_size)..@intCast(new_size)]));
2824                                        rela.* = .{
2825                                            .offset = @intCast(got_addr),
2826                                            .info = .{
2827                                                .type = @intFromEnum(std.elf.R_X86_64.DTPMOD64),
2828                                                .sym = 0,
2829                                            },
2830                                            .addend = 0,
2831                                        };
2832                                        if (elf.targetEndian() != native_endian)
2833                                            std.mem.byteSwapAllFields(Rela, rela);
2834                                    },
2835                                }
2836                                rela_dyn_ni.resizedAssumeCapacity(&elf.mf);
2837                            },
2838                        },
2839                    },
2840                }
2841                break :index .none;
2842            },
2843        },
2844        .offset = offset,
2845        .addend = addend,
2846    };
2847    switch (target.target_relocs) {
2848        .none => {},
2849        else => |target_ri| target_ri.get(elf).prev = ri,
2850    }
2851    target.target_relocs = ri;
2852}
2853
2854pub fn updateNav(elf: *Elf, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index) !void {
2855    elf.updateNavInner(pt, nav_index) catch |err| switch (err) {
2856        error.OutOfMemory,
2857        error.Overflow,
2858        error.RelocationNotByteAligned,
2859        => |e| return e,
2860        else => |e| return elf.base.cgFail(nav_index, "linker failed to update variable: {t}", .{e}),
2861    };
2862}
2863fn updateNavInner(elf: *Elf, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index) !void {
2864    const zcu = pt.zcu;
2865    const gpa = zcu.gpa;
2866    const ip = &zcu.intern_pool;
2867
2868    const nav = ip.getNav(nav_index);
2869    const nav_val = nav.status.fully_resolved.val;
2870    const nav_init = switch (ip.indexToKey(nav_val)) {
2871        else => nav_val,
2872        .variable => |variable| variable.init,
2873        .@"extern", .func => .none,
2874    };
2875    if (nav_init == .none or !Type.fromInterned(ip.typeOf(nav_init)).hasRuntimeBits(zcu)) return;
2876
2877    const nmi = try elf.navMapIndex(zcu, nav_index);
2878    const si = nmi.symbol(elf);
2879    const ni = ni: {
2880        const sym = si.get(elf);
2881        switch (sym.ni) {
2882            .none => {
2883                try elf.nodes.ensureUnusedCapacity(gpa, 1);
2884                const sec_si = elf.navSection(ip, nav.status.fully_resolved);
2885                const ni = try elf.mf.addLastChildNode(gpa, sec_si.node(elf), .{
2886                    .alignment = pt.navAlignment(nav_index).toStdMem(),
2887                    .moved = true,
2888                });
2889                elf.nodes.appendAssumeCapacity(.{ .nav = nmi });
2890                sym.ni = ni;
2891                switch (elf.symPtr(si)) {
2892                    inline else => |sym_ptr, class| sym_ptr.shndx =
2893                        @field(elf.symPtr(sec_si), @tagName(class)).shndx,
2894                }
2895            },
2896            else => si.deleteLocationRelocs(elf),
2897        }
2898        assert(sym.loc_relocs == .none);
2899        sym.loc_relocs = @enumFromInt(elf.relocs.items.len);
2900        break :ni sym.ni;
2901    };
2902
2903    var nw: MappedFile.Node.Writer = undefined;
2904    ni.writer(&elf.mf, gpa, &nw);
2905    defer nw.deinit();
2906    codegen.generateSymbol(
2907        &elf.base,
2908        pt,
2909        zcu.navSrcLoc(nav_index),
2910        .fromInterned(nav_init),
2911        &nw.interface,
2912        .{ .atom_index = @intFromEnum(si) },
2913    ) catch |err| switch (err) {
2914        error.WriteFailed => return error.OutOfMemory,
2915        else => |e| return e,
2916    };
2917    switch (elf.symPtr(si)) {
2918        inline else => |sym| elf.targetStore(&sym.size, @intCast(nw.interface.end)),
2919    }
2920    si.applyLocationRelocs(elf);
2921}
2922
2923pub fn lowerUav(
2924    elf: *Elf,
2925    pt: Zcu.PerThread,
2926    uav_val: InternPool.Index,
2927    uav_align: InternPool.Alignment,
2928    src_loc: Zcu.LazySrcLoc,
2929) !codegen.SymbolResult {
2930    const zcu = pt.zcu;
2931    const gpa = zcu.gpa;
2932
2933    try elf.pending_uavs.ensureUnusedCapacity(gpa, 1);
2934    const umi = elf.uavMapIndex(uav_val) catch |err| switch (err) {
2935        error.OutOfMemory => return error.OutOfMemory,
2936        else => |e| return .{ .fail = try Zcu.ErrorMsg.create(
2937            gpa,
2938            src_loc,
2939            "linker failed to update constant: {s}",
2940            .{@errorName(e)},
2941        ) },
2942    };
2943    const si = umi.symbol(elf);
2944    if (switch (si.get(elf).ni) {
2945        .none => true,
2946        else => |ni| uav_align.toStdMem().order(ni.alignment(&elf.mf)).compare(.gt),
2947    }) {
2948        const gop = elf.pending_uavs.getOrPutAssumeCapacity(umi);
2949        if (gop.found_existing) {
2950            gop.value_ptr.alignment = gop.value_ptr.alignment.max(uav_align);
2951        } else {
2952            gop.value_ptr.* = .{
2953                .alignment = uav_align,
2954                .src_loc = src_loc,
2955            };
2956            elf.const_prog_node.increaseEstimatedTotalItems(1);
2957        }
2958    }
2959    return .{ .sym_index = @intFromEnum(si) };
2960}
2961
2962pub fn updateFunc(
2963    elf: *Elf,
2964    pt: Zcu.PerThread,
2965    func_index: InternPool.Index,
2966    mir: *const codegen.AnyMir,
2967) !void {
2968    elf.updateFuncInner(pt, func_index, mir) catch |err| switch (err) {
2969        error.OutOfMemory,
2970        error.Overflow,
2971        error.RelocationNotByteAligned,
2972        error.CodegenFail,
2973        => |e| return e,
2974        else => |e| return elf.base.cgFail(
2975            pt.zcu.funcInfo(func_index).owner_nav,
2976            "linker failed to update function: {s}",
2977            .{@errorName(e)},
2978        ),
2979    };
2980}
2981fn updateFuncInner(
2982    elf: *Elf,
2983    pt: Zcu.PerThread,
2984    func_index: InternPool.Index,
2985    mir: *const codegen.AnyMir,
2986) !void {
2987    const zcu = pt.zcu;
2988    const gpa = zcu.gpa;
2989    const ip = &zcu.intern_pool;
2990    const func = zcu.funcInfo(func_index);
2991    const nav = ip.getNav(func.owner_nav);
2992
2993    const nmi = try elf.navMapIndex(zcu, func.owner_nav);
2994    const si = nmi.symbol(elf);
2995    log.debug("updateFunc({f}) = {d}", .{ nav.fqn.fmt(ip), si });
2996    const ni = ni: {
2997        const sym = si.get(elf);
2998        switch (sym.ni) {
2999            .none => {
3000                try elf.nodes.ensureUnusedCapacity(gpa, 1);
3001                const sec_si = elf.navSection(ip, nav.status.fully_resolved);
3002                const mod = zcu.navFileScope(func.owner_nav).mod.?;
3003                const target = &mod.resolved_target.result;
3004                const ni = try elf.mf.addLastChildNode(gpa, sec_si.node(elf), .{
3005                    .alignment = switch (nav.status.fully_resolved.alignment) {
3006                        .none => switch (mod.optimize_mode) {
3007                            .Debug,
3008                            .ReleaseSafe,
3009                            .ReleaseFast,
3010                            => target_util.defaultFunctionAlignment(target),
3011                            .ReleaseSmall => target_util.minFunctionAlignment(target),
3012                        },
3013                        else => |a| a.maxStrict(target_util.minFunctionAlignment(target)),
3014                    }.toStdMem(),
3015                    .moved = true,
3016                });
3017                elf.nodes.appendAssumeCapacity(.{ .nav = nmi });
3018                sym.ni = ni;
3019                switch (elf.symPtr(si)) {
3020                    inline else => |sym_ptr, class| sym_ptr.shndx =
3021                        @field(elf.symPtr(sec_si), @tagName(class)).shndx,
3022                }
3023            },
3024            else => si.deleteLocationRelocs(elf),
3025        }
3026        assert(sym.loc_relocs == .none);
3027        sym.loc_relocs = @enumFromInt(elf.relocs.items.len);
3028        break :ni sym.ni;
3029    };
3030
3031    var nw: MappedFile.Node.Writer = undefined;
3032    ni.writer(&elf.mf, gpa, &nw);
3033    defer nw.deinit();
3034    codegen.emitFunction(
3035        &elf.base,
3036        pt,
3037        zcu.navSrcLoc(func.owner_nav),
3038        func_index,
3039        @intFromEnum(si),
3040        mir,
3041        &nw.interface,
3042        .none,
3043    ) catch |err| switch (err) {
3044        error.WriteFailed => return nw.err.?,
3045        else => |e| return e,
3046    };
3047    switch (elf.symPtr(si)) {
3048        inline else => |sym| elf.targetStore(&sym.size, @intCast(nw.interface.end)),
3049    }
3050    si.applyLocationRelocs(elf);
3051}
3052
3053pub fn updateErrorData(elf: *Elf, pt: Zcu.PerThread) !void {
3054    elf.flushLazy(pt, .{
3055        .kind = .const_data,
3056        .index = @intCast(elf.lazy.getPtr(.const_data).map.getIndex(.anyerror_type) orelse return),
3057    }) catch |err| switch (err) {
3058        error.OutOfMemory => return error.OutOfMemory,
3059        error.CodegenFail => return error.LinkFailure,
3060        else => |e| return elf.base.comp.link_diags.fail("updateErrorData failed: {t}", .{e}),
3061    };
3062}
3063
3064pub fn flush(
3065    elf: *Elf,
3066    arena: std.mem.Allocator,
3067    tid: Zcu.PerThread.Id,
3068    prog_node: std.Progress.Node,
3069) !void {
3070    _ = arena;
3071    _ = prog_node;
3072    while (try elf.idle(tid)) {}
3073}
3074
3075pub fn idle(elf: *Elf, tid: Zcu.PerThread.Id) !bool {
3076    const comp = elf.base.comp;
3077    task: {
3078        while (elf.pending_uavs.pop()) |pending_uav| {
3079            const sub_prog_node = elf.idleProgNode(tid, elf.const_prog_node, .{ .uav = pending_uav.key });
3080            defer sub_prog_node.end();
3081            elf.flushUav(
3082                .{ .zcu = comp.zcu.?, .tid = tid },
3083                pending_uav.key,
3084                pending_uav.value.alignment,
3085                pending_uav.value.src_loc,
3086            ) catch |err| switch (err) {
3087                error.OutOfMemory => return error.OutOfMemory,
3088                else => |e| return comp.link_diags.fail(
3089                    "linker failed to lower constant: {t}",
3090                    .{e},
3091                ),
3092            };
3093            break :task;
3094        }
3095        var lazy_it = elf.lazy.iterator();
3096        while (lazy_it.next()) |lazy| if (lazy.value.pending_index < lazy.value.map.count()) {
3097            const pt: Zcu.PerThread = .{ .zcu = comp.zcu.?, .tid = tid };
3098            const lmr: Node.LazyMapRef = .{ .kind = lazy.key, .index = lazy.value.pending_index };
3099            lazy.value.pending_index += 1;
3100            const kind = switch (lmr.kind) {
3101                .code => "code",
3102                .const_data => "data",
3103            };
3104            var name: [std.Progress.Node.max_name_len]u8 = undefined;
3105            const sub_prog_node = elf.synth_prog_node.start(
3106                std.fmt.bufPrint(&name, "lazy {s} for {f}", .{
3107                    kind,
3108                    Type.fromInterned(lmr.lazySymbol(elf).ty).fmt(pt),
3109                }) catch &name,
3110                0,
3111            );
3112            defer sub_prog_node.end();
3113            elf.flushLazy(pt, lmr) catch |err| switch (err) {
3114                error.OutOfMemory => return error.OutOfMemory,
3115                else => |e| return comp.link_diags.fail(
3116                    "linker failed to lower lazy {s}: {t}",
3117                    .{ kind, e },
3118                ),
3119            };
3120            break :task;
3121        };
3122        if (elf.input_section_pending_index < elf.input_sections.items.len) {
3123            const isi: Node.InputSectionIndex = @enumFromInt(elf.input_section_pending_index);
3124            elf.input_section_pending_index += 1;
3125            const sub_prog_node = elf.idleProgNode(tid, elf.input_prog_node, elf.getNode(isi.symbol(elf).node(elf)));
3126            defer sub_prog_node.end();
3127            elf.flushInputSection(isi) catch |err| switch (err) {
3128                else => |e| {
3129                    const ii = isi.input(elf);
3130                    return comp.link_diags.fail(
3131                        "linker failed to read input section '{s}' from \"{f}{f}\": {t}",
3132                        .{
3133                            elf.sectionName(
3134                                elf.getNode(isi.symbol(elf).node(elf).parent(&elf.mf)).section,
3135                            ),
3136                            ii.path(elf).fmtEscapeString(),
3137                            fmtMemberString(ii.member(elf)),
3138                            e,
3139                        },
3140                    );
3141                },
3142            };
3143            break :task;
3144        }
3145        while (elf.mf.updates.pop()) |ni| {
3146            const clean_moved = ni.cleanMoved(&elf.mf);
3147            const clean_resized = ni.cleanResized(&elf.mf);
3148            if (clean_moved or clean_resized) {
3149                const sub_prog_node = elf.idleProgNode(tid, elf.mf.update_prog_node, elf.getNode(ni));
3150                defer sub_prog_node.end();
3151                if (clean_moved) try elf.flushMoved(ni);
3152                if (clean_resized) try elf.flushResized(ni);
3153                break :task;
3154            } else elf.mf.update_prog_node.completeOne();
3155        }
3156    }
3157    if (elf.pending_uavs.count() > 0) return true;
3158    for (&elf.lazy.values) |lazy| if (lazy.map.count() > lazy.pending_index) return true;
3159    if (elf.input_sections.items.len > elf.input_section_pending_index) return true;
3160    if (elf.mf.updates.items.len > 0) return true;
3161    return false;
3162}
3163
3164fn idleProgNode(
3165    elf: *Elf,
3166    tid: Zcu.PerThread.Id,
3167    prog_node: std.Progress.Node,
3168    node: Node,
3169) std.Progress.Node {
3170    var name: [std.Progress.Node.max_name_len]u8 = undefined;
3171    return prog_node.start(name: switch (node) {
3172        else => |tag| @tagName(tag),
3173        .section => |si| elf.sectionName(si),
3174        .input_section => |isi| {
3175            const ii = isi.input(elf);
3176            break :name std.fmt.bufPrint(&name, "{f}{f} {s}", .{
3177                ii.path(elf).fmtEscapeString(),
3178                fmtMemberString(ii.member(elf)),
3179                elf.sectionName(elf.getNode(isi.symbol(elf).node(elf).parent(&elf.mf)).section),
3180            }) catch &name;
3181        },
3182        .nav => |nmi| {
3183            const ip = &elf.base.comp.zcu.?.intern_pool;
3184            break :name ip.getNav(nmi.navIndex(elf)).fqn.toSlice(ip);
3185        },
3186        .uav => |umi| std.fmt.bufPrint(&name, "{f}", .{
3187            Value.fromInterned(umi.uavValue(elf)).fmtValue(.{ .zcu = elf.base.comp.zcu.?, .tid = tid }),
3188        }) catch &name,
3189    }, 0);
3190}
3191
3192fn flushUav(
3193    elf: *Elf,
3194    pt: Zcu.PerThread,
3195    umi: Node.UavMapIndex,
3196    uav_align: InternPool.Alignment,
3197    src_loc: Zcu.LazySrcLoc,
3198) !void {
3199    const zcu = pt.zcu;
3200    const gpa = zcu.gpa;
3201
3202    const uav_val = umi.uavValue(elf);
3203    const si = umi.symbol(elf);
3204    const ni = ni: {
3205        const sym = si.get(elf);
3206        switch (sym.ni) {
3207            .none => {
3208                try elf.nodes.ensureUnusedCapacity(gpa, 1);
3209                const sec_si = elf.si.data;
3210                const ni = try elf.mf.addLastChildNode(gpa, sec_si.node(elf), .{
3211                    .alignment = uav_align.toStdMem(),
3212                    .moved = true,
3213                });
3214                elf.nodes.appendAssumeCapacity(.{ .uav = umi });
3215                sym.ni = ni;
3216                switch (elf.symPtr(si)) {
3217                    inline else => |sym_ptr, class| sym_ptr.shndx =
3218                        @field(elf.symPtr(sec_si), @tagName(class)).shndx,
3219                }
3220            },
3221            else => {
3222                if (sym.ni.alignment(&elf.mf).order(uav_align.toStdMem()).compare(.gte)) return;
3223                si.deleteLocationRelocs(elf);
3224            },
3225        }
3226        assert(sym.loc_relocs == .none);
3227        sym.loc_relocs = @enumFromInt(elf.relocs.items.len);
3228        break :ni sym.ni;
3229    };
3230
3231    var nw: MappedFile.Node.Writer = undefined;
3232    ni.writer(&elf.mf, gpa, &nw);
3233    defer nw.deinit();
3234    codegen.generateSymbol(
3235        &elf.base,
3236        pt,
3237        src_loc,
3238        .fromInterned(uav_val),
3239        &nw.interface,
3240        .{ .atom_index = @intFromEnum(si) },
3241    ) catch |err| switch (err) {
3242        error.WriteFailed => return error.OutOfMemory,
3243        else => |e| return e,
3244    };
3245    switch (elf.symPtr(si)) {
3246        inline else => |sym| elf.targetStore(&sym.size, @intCast(nw.interface.end)),
3247    }
3248    si.applyLocationRelocs(elf);
3249}
3250
3251fn flushLazy(elf: *Elf, pt: Zcu.PerThread, lmr: Node.LazyMapRef) !void {
3252    const zcu = pt.zcu;
3253    const gpa = zcu.gpa;
3254
3255    const lazy = lmr.lazySymbol(elf);
3256    const si = lmr.symbol(elf);
3257    const ni = ni: {
3258        const sym = si.get(elf);
3259        switch (sym.ni) {
3260            .none => {
3261                try elf.nodes.ensureUnusedCapacity(gpa, 1);
3262                const sec_si: Symbol.Index = switch (lazy.kind) {
3263                    .code => .text,
3264                    .const_data => .rodata,
3265                };
3266                const ni = try elf.mf.addLastChildNode(gpa, sec_si.node(elf), .{ .moved = true });
3267                elf.nodes.appendAssumeCapacity(switch (lazy.kind) {
3268                    .code => .{ .lazy_code = @enumFromInt(lmr.index) },
3269                    .const_data => .{ .lazy_const_data = @enumFromInt(lmr.index) },
3270                });
3271                sym.ni = ni;
3272                switch (elf.symPtr(si)) {
3273                    inline else => |sym_ptr, class| sym_ptr.shndx =
3274                        @field(elf.symPtr(sec_si), @tagName(class)).shndx,
3275                }
3276            },
3277            else => si.deleteLocationRelocs(elf),
3278        }
3279        assert(sym.loc_relocs == .none);
3280        sym.loc_relocs = @enumFromInt(elf.relocs.items.len);
3281        break :ni sym.ni;
3282    };
3283
3284    var required_alignment: InternPool.Alignment = .none;
3285    var nw: MappedFile.Node.Writer = undefined;
3286    ni.writer(&elf.mf, gpa, &nw);
3287    defer nw.deinit();
3288    try codegen.generateLazySymbol(
3289        &elf.base,
3290        pt,
3291        Type.fromInterned(lazy.ty).srcLocOrNull(pt.zcu) orelse .unneeded,
3292        lazy,
3293        &required_alignment,
3294        &nw.interface,
3295        .none,
3296        .{ .atom_index = @intFromEnum(si) },
3297    );
3298    switch (elf.symPtr(si)) {
3299        inline else => |sym| elf.targetStore(&sym.size, @intCast(nw.interface.end)),
3300    }
3301    si.applyLocationRelocs(elf);
3302}
3303
3304fn flushInputSection(elf: *Elf, isi: Node.InputSectionIndex) !void {
3305    const file_loc = isi.fileLocation(elf);
3306    if (file_loc.size == 0) return;
3307    const comp = elf.base.comp;
3308    const gpa = comp.gpa;
3309    const ii = isi.input(elf);
3310    const path = ii.path(elf);
3311    const file = try path.root_dir.handle.adaptToNewApi().openFile(comp.io, path.sub_path, .{});
3312    defer file.close(comp.io);
3313    var fr = file.reader(comp.io, &.{});
3314    try fr.seekTo(file_loc.offset);
3315    var nw: MappedFile.Node.Writer = undefined;
3316    const si = isi.symbol(elf);
3317    si.node(elf).writer(&elf.mf, gpa, &nw);
3318    defer nw.deinit();
3319    if (try nw.interface.sendFileAll(&fr, .limited(@intCast(file_loc.size))) != file_loc.size)
3320        return error.EndOfStream;
3321    si.applyLocationRelocs(elf);
3322}
3323
3324fn flushFileOffset(elf: *Elf, ni: MappedFile.Node.Index) !void {
3325    switch (elf.getNode(ni)) {
3326        else => unreachable,
3327        .ehdr => assert(ni.fileLocation(&elf.mf, false).offset == 0),
3328        .shdr => switch (elf.ehdrPtr()) {
3329            inline else => |ehdr| elf.targetStore(
3330                &ehdr.shoff,
3331                @intCast(ni.fileLocation(&elf.mf, false).offset),
3332            ),
3333        },
3334        .segment => |phndx| {
3335            switch (elf.phdrSlice()) {
3336                inline else => |phdr| elf.targetStore(
3337                    &phdr[phndx].offset,
3338                    @intCast(ni.fileLocation(&elf.mf, false).offset),
3339                ),
3340            }
3341            var child_it = ni.children(&elf.mf);
3342            while (child_it.next()) |child_ni| try elf.flushFileOffset(child_ni);
3343        },
3344        .section => |si| switch (elf.shdrPtr(si.shndx(elf))) {
3345            inline else => |shdr| elf.targetStore(&shdr.offset, @intCast(
3346                ni.fileLocation(&elf.mf, false).offset,
3347            )),
3348        },
3349    }
3350}
3351
3352fn flushMoved(elf: *Elf, ni: MappedFile.Node.Index) !void {
3353    switch (elf.getNode(ni)) {
3354        .file => unreachable,
3355        .ehdr, .shdr => try elf.flushFileOffset(ni),
3356        .segment => |phndx| {
3357            try elf.flushFileOffset(ni);
3358            switch (elf.phdrSlice()) {
3359                inline else => |phdr, class| {
3360                    const ph = &phdr[phndx];
3361                    switch (elf.targetLoad(&ph.type)) {
3362                        else => unreachable,
3363                        .NULL, .LOAD => return,
3364                        .DYNAMIC, .INTERP => {},
3365                        .PHDR => @field(elf.ehdrPtr(), @tagName(class)).phoff = ph.offset,
3366                        .TLS, std.elf.PT.GNU_RELRO => {},
3367                    }
3368                    elf.targetStore(&ph.vaddr, @intCast(elf.computeNodeVAddr(ni)));
3369                    ph.paddr = ph.vaddr;
3370                },
3371            }
3372        },
3373        .section => |si| {
3374            try elf.flushFileOffset(ni);
3375            const addr = elf.computeNodeVAddr(ni);
3376            const shndx = si.shndx(elf);
3377            switch (elf.shdrPtr(shndx)) {
3378                inline else => |shdr, class| {
3379                    const flags = elf.targetLoad(&shdr.flags).shf;
3380                    if (flags.ALLOC) {
3381                        if (elf.si.dynamic != .null) {
3382                            if (si == elf.si.got) {
3383                                const old_addr = elf.targetLoad(&shdr.addr);
3384                                const rela_dyn_si = shndx.get(elf).rela_si;
3385                                const relas: []class.ElfN().Rela = @ptrCast(@alignCast(
3386                                    rela_dyn_si.node(elf).slice(&elf.mf)[0..@intCast(
3387                                        elf.targetLoad(&@field(
3388                                            elf.shdrPtr(rela_dyn_si.shndx(elf)),
3389                                            @tagName(class),
3390                                        ).size),
3391                                    )],
3392                                ));
3393                                switch (elf.ehdrField(.machine)) {
3394                                    else => |machine| @panic(@tagName(machine)),
3395                                    .AARCH64, .PPC64, .RISCV => {},
3396                                    .X86_64 => for (relas) |*rela| switch (@as(
3397                                        std.elf.R_X86_64,
3398                                        @enumFromInt(elf.targetLoad(&rela.info).type),
3399                                    )) {
3400                                        else => |@"type"| @panic(@tagName(@"type")),
3401                                        .RELATIVE => {},
3402                                        .GLOB_DAT, .DTPMOD64, .DTPOFF64 => elf.targetStore(
3403                                            &rela.offset,
3404                                            @intCast(elf.targetLoad(&rela.offset) - old_addr + addr),
3405                                        ),
3406                                    },
3407                                }
3408                            } else if (si == elf.si.got_plt) {
3409                                const target_endian = elf.targetEndian();
3410                                const old_addr = elf.targetLoad(&shdr.addr);
3411                                const rela_plt_si = shndx.get(elf).rela_si;
3412                                const relas: []class.ElfN().Rela = @ptrCast(@alignCast(
3413                                    rela_plt_si.node(elf).slice(&elf.mf)[0..@intCast(
3414                                        elf.targetLoad(&@field(
3415                                            elf.shdrPtr(rela_plt_si.shndx(elf)),
3416                                            @tagName(class),
3417                                        ).size),
3418                                    )],
3419                                ));
3420                                const plt_sec_slice = elf.si.plt_sec.node(elf).slice(&elf.mf);
3421                                switch (elf.ehdrField(.machine)) {
3422                                    else => |machine| @panic(@tagName(machine)),
3423                                    .AARCH64, .PPC64, .RISCV => {},
3424                                    .X86_64 => {
3425                                        for (relas) |*rela| switch (@as(
3426                                            std.elf.R_X86_64,
3427                                            @enumFromInt(elf.targetLoad(&rela.info).type),
3428                                        )) {
3429                                            else => |@"type"| @panic(@tagName(@"type")),
3430                                            .JUMP_SLOT => elf.targetStore(
3431                                                &rela.offset,
3432                                                @intCast(elf.targetLoad(&rela.offset) - old_addr + addr),
3433                                            ),
3434                                        };
3435                                        for (0..elf.got.plt.count()) |plt_index| {
3436                                            const slice = plt_sec_slice[16 * plt_index + 6 ..][0..4];
3437                                            std.mem.writeInt(
3438                                                i32,
3439                                                slice,
3440                                                @intCast(@as(i64, @bitCast(@as(u64, @bitCast(@as(
3441                                                    i64,
3442                                                    std.mem.readInt(i32, slice, target_endian),
3443                                                ))) -% old_addr +% addr))),
3444                                                target_endian,
3445                                            );
3446                                        }
3447                                    },
3448                                }
3449                            } else if (si == elf.si.plt_sec) {
3450                                const target_endian = elf.targetEndian();
3451                                const old_addr = elf.targetLoad(&shdr.addr);
3452                                const plt_sec_slice = ni.slice(&elf.mf);
3453                                switch (elf.ehdrField(.machine)) {
3454                                    else => |machine| @panic(@tagName(machine)),
3455                                    .AARCH64, .PPC64, .RISCV => {},
3456                                    .X86_64 => for (0..elf.got.plt.count()) |plt_index| {
3457                                        const slice = plt_sec_slice[16 * plt_index + 6 ..][0..4];
3458                                        std.mem.writeInt(
3459                                            i32,
3460                                            slice,
3461                                            @intCast(@as(i64, @bitCast(@as(u64, @bitCast(@as(
3462                                                i64,
3463                                                std.mem.readInt(i32, slice, target_endian),
3464                                            ))) -% addr +% old_addr))),
3465                                            target_endian,
3466                                        );
3467                                    },
3468                                }
3469                            }
3470                        }
3471                        elf.targetStore(&shdr.addr, @intCast(addr));
3472                        @field(elf.symPtr(si), @tagName(class)).value = shdr.addr;
3473                    }
3474                },
3475            }
3476            si.flushMoved(elf, addr);
3477        },
3478        .input_section => |isi| {
3479            const old_addr = switch (elf.symPtr(isi.symbol(elf))) {
3480                inline else => |sym| elf.targetLoad(&sym.value),
3481            };
3482            const new_addr = elf.computeNodeVAddr(ni);
3483            const ii = isi.input(elf);
3484            var si = ii.symbol(elf);
3485            const end_si = ii.endSymbol(elf);
3486            while (cond: {
3487                si = si.next();
3488                break :cond si != end_si;
3489            }) {
3490                if (si.get(elf).ni != ni) continue;
3491                si.flushMoved(elf, switch (elf.symPtr(si)) {
3492                    inline else => |sym| elf.targetLoad(&sym.value),
3493                } - old_addr + new_addr);
3494            }
3495        },
3496        inline .nav, .uav, .lazy_code, .lazy_const_data => |mi| mi.symbol(elf).flushMoved(
3497            elf,
3498            elf.computeNodeVAddr(ni),
3499        ),
3500    }
3501    try ni.childrenMoved(elf.base.comp.gpa, &elf.mf);
3502}
3503
3504fn flushResized(elf: *Elf, ni: MappedFile.Node.Index) !void {
3505    _, const size = ni.location(&elf.mf).resolve(&elf.mf);
3506    switch (elf.getNode(ni)) {
3507        .file => {},
3508        .ehdr => unreachable,
3509        .shdr => {},
3510        .segment => |phndx| switch (elf.phdrSlice()) {
3511            inline else => |phdr| {
3512                assert(elf.phdrs.items[phndx] == ni);
3513                const ph = &phdr[phndx];
3514                elf.targetStore(&ph.filesz, @intCast(size));
3515                if (size > elf.targetLoad(&ph.memsz)) {
3516                    switch (elf.targetLoad(&ph.type)) {
3517                        else => unreachable,
3518                        .NULL => if (size > 0) elf.targetStore(&ph.type, .LOAD),
3519                        .LOAD => if (size == 0) elf.targetStore(&ph.type, .NULL),
3520                        .DYNAMIC, .INTERP, .PHDR, std.elf.PT.GNU_RELRO => {
3521                            elf.targetStore(&ph.memsz, @intCast(size));
3522                            return;
3523                        },
3524                        .TLS => {
3525                            elf.targetStore(&ph.memsz, @intCast(size));
3526                            return ni.childrenMoved(elf.base.comp.gpa, &elf.mf);
3527                        },
3528                    }
3529                    const memsz = ni.alignment(&elf.mf).forward(@intCast(size * 4));
3530                    elf.targetStore(&ph.memsz, @intCast(memsz));
3531                    var vaddr = elf.targetLoad(&ph.vaddr);
3532                    var new_phndx = phndx;
3533                    for (phdr[phndx + 1 ..], phndx + 1..) |*next_ph, next_phndx| {
3534                        switch (elf.targetLoad(&next_ph.type)) {
3535                            else => unreachable,
3536                            .NULL, .LOAD => {},
3537                            .DYNAMIC, .INTERP, .PHDR, .TLS => break,
3538                        }
3539                        const next_vaddr = elf.targetLoad(&next_ph.vaddr);
3540                        if (vaddr + memsz <= next_vaddr) break;
3541                        vaddr = next_vaddr + elf.targetLoad(&next_ph.memsz);
3542                        std.mem.swap(@TypeOf(ph.*), &phdr[new_phndx], next_ph);
3543                        const next_ni = elf.phdrs.items[next_phndx];
3544                        elf.phdrs.items[new_phndx] = next_ni;
3545                        elf.nodes.items(.data)[@intFromEnum(next_ni)] = .{ .segment = new_phndx };
3546                        new_phndx = @intCast(next_phndx);
3547                    }
3548                    if (new_phndx != phndx) {
3549                        const new_ph = &phdr[new_phndx];
3550                        elf.targetStore(&new_ph.vaddr, vaddr);
3551                        new_ph.paddr = new_ph.vaddr;
3552                        elf.phdrs.items[new_phndx] = ni;
3553                        elf.nodes.items(.data)[@intFromEnum(ni)] = .{ .segment = new_phndx };
3554                        try ni.childrenMoved(elf.base.comp.gpa, &elf.mf);
3555                    }
3556                }
3557            },
3558        },
3559        .section => |si| switch (elf.shdrPtr(si.shndx(elf))) {
3560            inline else => |shdr, class| {
3561                switch (elf.targetLoad(&shdr.type)) {
3562                    else => unreachable,
3563                    .NULL => if (size > 0) elf.targetStore(&shdr.type, .PROGBITS),
3564                    .PROGBITS => if (size == 0) elf.targetStore(&shdr.type, .NULL),
3565                    .SYMTAB, .DYNAMIC, .REL, .DYNSYM => return,
3566                    .STRTAB => {
3567                        if (elf.si.dynamic != .null) {
3568                            if (si == elf.si.dynstr) {
3569                                const dynamic_entries: [][2]class.ElfN().Addr = @ptrCast(@alignCast(
3570                                    elf.si.dynamic.node(elf).slice(&elf.mf),
3571                                ));
3572                                for (dynamic_entries) |*dynamic_entry|
3573                                    switch (elf.targetLoad(&dynamic_entry[0])) {
3574                                        else => {},
3575                                        std.elf.DT_STRSZ => dynamic_entry[1] = shdr.size,
3576                                    };
3577                            }
3578                        }
3579                        return;
3580                    },
3581                    .RELA => {
3582                        if (elf.si.dynamic != .null) {
3583                            if (si == elf.si.got.shndx(elf).get(elf).rela_si) {
3584                                const dynamic_entries: [][2]class.ElfN().Addr = @ptrCast(@alignCast(
3585                                    elf.si.dynamic.node(elf).slice(&elf.mf),
3586                                ));
3587                                for (dynamic_entries) |*dynamic_entry|
3588                                    switch (elf.targetLoad(&dynamic_entry[0])) {
3589                                        else => {},
3590                                        std.elf.DT_RELASZ => dynamic_entry[1] = shdr.size,
3591                                    };
3592                            } else if (si == elf.si.got_plt.shndx(elf).get(elf).rela_si) {
3593                                const dynamic_entries: [][2]class.ElfN().Addr = @ptrCast(@alignCast(
3594                                    elf.si.dynamic.node(elf).slice(&elf.mf),
3595                                ));
3596                                for (dynamic_entries) |*dynamic_entry|
3597                                    switch (elf.targetLoad(&dynamic_entry[0])) {
3598                                        else => {},
3599                                        std.elf.DT_PLTRELSZ => dynamic_entry[1] = shdr.size,
3600                                    };
3601                            }
3602                        }
3603                        return;
3604                    },
3605                }
3606                elf.targetStore(&shdr.size, @intCast(size));
3607            },
3608        },
3609        .input_section, .nav, .uav, .lazy_code, .lazy_const_data => {},
3610    }
3611}
3612
3613pub fn updateExports(
3614    elf: *Elf,
3615    pt: Zcu.PerThread,
3616    exported: Zcu.Exported,
3617    export_indices: []const Zcu.Export.Index,
3618) !void {
3619    return elf.updateExportsInner(pt, exported, export_indices) catch |err| switch (err) {
3620        error.OutOfMemory => error.OutOfMemory,
3621        error.LinkFailure => error.AnalysisFail,
3622        else => |e| switch (elf.base.comp.link_diags.fail(
3623            "linker failed to update exports: {t}",
3624            .{e},
3625        )) {
3626            error.LinkFailure => return error.AnalysisFail,
3627        },
3628    };
3629}
3630fn updateExportsInner(
3631    elf: *Elf,
3632    pt: Zcu.PerThread,
3633    exported: Zcu.Exported,
3634    export_indices: []const Zcu.Export.Index,
3635) !void {
3636    const zcu = pt.zcu;
3637    const gpa = zcu.gpa;
3638    const ip = &zcu.intern_pool;
3639
3640    switch (exported) {
3641        .nav => |nav| log.debug("updateExports({f})", .{ip.getNav(nav).fqn.fmt(ip)}),
3642        .uav => |uav| log.debug("updateExports(@as({f}, {f}))", .{
3643            Type.fromInterned(ip.typeOf(uav)).fmt(pt),
3644            Value.fromInterned(uav).fmtValue(pt),
3645        }),
3646    }
3647    try elf.symtab.ensureUnusedCapacity(gpa, export_indices.len);
3648    const exported_si: Symbol.Index, const @"type": std.elf.STT = switch (exported) {
3649        .nav => |nav| .{
3650            try elf.navSymbol(zcu, nav),
3651            navType(ip, ip.getNav(nav).status, elf.base.comp.config.any_non_single_threaded),
3652        },
3653        .uav => |uav| .{ @enumFromInt(switch (try elf.lowerUav(
3654            pt,
3655            uav,
3656            Type.fromInterned(ip.typeOf(uav)).abiAlignment(zcu),
3657            export_indices[0].ptr(zcu).src,
3658        )) {
3659            .sym_index => |si| si,
3660            .fail => |em| {
3661                defer em.destroy(gpa);
3662                return elf.base.comp.link_diags.fail("{s}", .{em.msg});
3663            },
3664        }), .OBJECT },
3665    };
3666    while (try elf.idle(pt.tid)) {}
3667    const exported_ni = exported_si.node(elf);
3668    const value, const size, const shndx = switch (elf.symPtr(exported_si)) {
3669        inline else => |exported_sym| .{
3670            elf.targetLoad(&exported_sym.value),
3671            exported_sym.size,
3672            exported_sym.shndx,
3673        },
3674    };
3675    for (export_indices) |export_index| {
3676        const @"export" = export_index.ptr(zcu);
3677        const name = @"export".opts.name.toSlice(ip);
3678        const export_si = try elf.globalSymbol(.{
3679            .name = name,
3680            .type = @"type",
3681            .bind = switch (@"export".opts.linkage) {
3682                .internal => .LOCAL,
3683                .strong => .GLOBAL,
3684                .weak => .WEAK,
3685                .link_once => return error.LinkOnceUnsupported,
3686            },
3687            .visibility = switch (@"export".opts.visibility) {
3688                .default => .DEFAULT,
3689                .hidden => .HIDDEN,
3690                .protected => .PROTECTED,
3691            },
3692        });
3693        export_si.get(elf).ni = exported_ni;
3694        switch (elf.symPtr(export_si)) {
3695            inline else => |export_sym| {
3696                export_sym.size = @intCast(size);
3697                export_sym.shndx = shndx;
3698            },
3699        }
3700        export_si.flushMoved(elf, value);
3701    }
3702}
3703
3704pub fn deleteExport(elf: *Elf, exported: Zcu.Exported, name: InternPool.NullTerminatedString) void {
3705    _ = elf;
3706    _ = exported;
3707    _ = name;
3708}
3709
3710pub fn dump(elf: *Elf, tid: Zcu.PerThread.Id) void {
3711    const w, _ = std.debug.lockStderrWriter(&.{});
3712    defer std.debug.unlockStderrWriter();
3713    elf.printNode(tid, w, .root, 0) catch {};
3714}
3715
3716pub fn printNode(
3717    elf: *Elf,
3718    tid: Zcu.PerThread.Id,
3719    w: *std.Io.Writer,
3720    ni: MappedFile.Node.Index,
3721    indent: usize,
3722) !void {
3723    const node = elf.getNode(ni);
3724    try w.splatByteAll(' ', indent);
3725    try w.writeAll(@tagName(node));
3726    switch (node) {
3727        else => {},
3728        .segment => |phndx| switch (elf.phdrSlice()) {
3729            inline else => |phdr| {
3730                const ph = &phdr[phndx];
3731                try w.writeByte('(');
3732                const pt = elf.targetLoad(&ph.type);
3733                if (std.enums.tagName(std.elf.PT, pt)) |pt_name|
3734                    try w.writeAll(pt_name)
3735                else inline for (@typeInfo(std.elf.PT).@"enum".decls) |decl| {
3736                    const decl_val = @field(std.elf.PT, decl.name);
3737                    if (@TypeOf(decl_val) != std.elf.PT) continue;
3738                    if (pt == @field(std.elf.PT, decl.name)) break try w.writeAll(decl.name);
3739                } else try w.print("0x{x}", .{pt});
3740                try w.writeAll(", ");
3741                const pf = elf.targetLoad(&ph.flags);
3742                if (pf.R) try w.writeByte('R');
3743                if (pf.W) try w.writeByte('W');
3744                if (pf.X) try w.writeByte('X');
3745                try w.writeByte(')');
3746            },
3747        },
3748        .section => |si| try w.print("({s})", .{elf.sectionName(si)}),
3749        .input_section => |isi| {
3750            const ii = isi.input(elf);
3751            try w.print("({f}{f}, {s})", .{
3752                ii.path(elf).fmtEscapeString(),
3753                fmtMemberString(ii.member(elf)),
3754                elf.sectionName(elf.getNode(isi.symbol(elf).node(elf).parent(&elf.mf)).section),
3755            });
3756        },
3757        .nav => |nmi| {
3758            const zcu = elf.base.comp.zcu.?;
3759            const ip = &zcu.intern_pool;
3760            const nav = ip.getNav(nmi.navIndex(elf));
3761            try w.print("({f}, {f})", .{
3762                Type.fromInterned(nav.typeOf(ip)).fmt(.{ .zcu = zcu, .tid = tid }),
3763                nav.fqn.fmt(ip),
3764            });
3765        },
3766        .uav => |umi| {
3767            const zcu = elf.base.comp.zcu.?;
3768            const val: Value = .fromInterned(umi.uavValue(elf));
3769            try w.print("({f}, {f})", .{
3770                val.typeOf(zcu).fmt(.{ .zcu = zcu, .tid = tid }),
3771                val.fmtValue(.{ .zcu = zcu, .tid = tid }),
3772            });
3773        },
3774        inline .lazy_code, .lazy_const_data => |lmi| try w.print("({f})", .{
3775            Type.fromInterned(lmi.lazySymbol(elf).ty).fmt(.{
3776                .zcu = elf.base.comp.zcu.?,
3777                .tid = tid,
3778            }),
3779        }),
3780    }
3781    {
3782        const mf_node = &elf.mf.nodes.items[@intFromEnum(ni)];
3783        const off, const size = mf_node.location().resolve(&elf.mf);
3784        try w.print(" index={d} offset=0x{x} size=0x{x} align=0x{x}{s}{s}{s}{s}\n", .{
3785            @intFromEnum(ni),
3786            off,
3787            size,
3788            mf_node.flags.alignment.toByteUnits(),
3789            if (mf_node.flags.fixed) " fixed" else "",
3790            if (mf_node.flags.moved) " moved" else "",
3791            if (mf_node.flags.resized) " resized" else "",
3792            if (mf_node.flags.has_content) " has_content" else "",
3793        });
3794    }
3795    var leaf = true;
3796    var child_it = ni.children(&elf.mf);
3797    while (child_it.next()) |child_ni| {
3798        leaf = false;
3799        try elf.printNode(tid, w, child_ni, indent + 1);
3800    }
3801    if (!leaf) return;
3802    const file_loc = ni.fileLocation(&elf.mf, false);
3803    var address = file_loc.offset;
3804    if (file_loc.size == 0) {
3805        try w.splatByteAll(' ', indent + 1);
3806        try w.print("{x:0>8}\n", .{address});
3807        return;
3808    }
3809    const line_len = 0x10;
3810    var line_it = std.mem.window(
3811        u8,
3812        elf.mf.contents[@intCast(file_loc.offset)..][0..@intCast(file_loc.size)],
3813        line_len,
3814        line_len,
3815    );
3816    while (line_it.next()) |line_bytes| : (address += line_len) {
3817        try w.splatByteAll(' ', indent + 1);
3818        try w.print("{x:0>8}  ", .{address});
3819        for (line_bytes) |byte| try w.print("{x:0>2} ", .{byte});
3820        try w.splatByteAll(' ', 3 * (line_len - line_bytes.len) + 1);
3821        for (line_bytes) |byte| try w.writeByte(if (std.ascii.isPrint(byte)) byte else '.');
3822        try w.writeByte('\n');
3823    }
3824}
3825
3826const assert = std.debug.assert;
3827const builtin = @import("builtin");
3828const codegen = @import("../codegen.zig");
3829const Compilation = @import("../Compilation.zig");
3830const Elf = @This();
3831const InternPool = @import("../InternPool.zig");
3832const link = @import("../link.zig");
3833const log = std.log.scoped(.link);
3834const MappedFile = @import("MappedFile.zig");
3835const native_endian = builtin.cpu.arch.endian();
3836const std = @import("std");
3837const target_util = @import("../target.zig");
3838const Type = @import("../Type.zig");
3839const Value = @import("../Value.zig");
3840const Zcu = @import("../Zcu.zig");