master
1//! The overall strategy here is to load all the object file data into memory
2//! as inputs are parsed. During `prelink`, as much linking as possible is
3//! performed without any knowledge of functions and globals provided by the
4//! Zcu. If there is no Zcu, effectively all linking is done in `prelink`.
5//!
6//! `updateFunc`, `updateNav`, `updateExports`, and `deleteExport` are handled
7//! by merely tracking references to the relevant functions and globals. All
8//! the linking logic between objects and Zcu happens in `flush`. Many
9//! components of the final output are computed on-the-fly at this time rather
10//! than being precomputed and stored separately.
11
12const Wasm = @This();
13const Archive = @import("Wasm/Archive.zig");
14const Object = @import("Wasm/Object.zig");
15pub const Flush = @import("Wasm/Flush.zig");
16
17const builtin = @import("builtin");
18const native_endian = builtin.cpu.arch.endian();
19
20const build_options = @import("build_options");
21
22const std = @import("std");
23const Allocator = std.mem.Allocator;
24const Cache = std.Build.Cache;
25const Path = Cache.Path;
26const assert = std.debug.assert;
27const fs = std.fs;
28const leb = std.leb;
29const log = std.log.scoped(.link);
30const mem = std.mem;
31
32const Mir = @import("../codegen/wasm/Mir.zig");
33const CodeGen = @import("../codegen/wasm/CodeGen.zig");
34const abi = @import("../codegen/wasm/abi.zig");
35const Compilation = @import("../Compilation.zig");
36const Dwarf = @import("Dwarf.zig");
37const InternPool = @import("../InternPool.zig");
38const Zcu = @import("../Zcu.zig");
39const codegen = @import("../codegen.zig");
40const dev = @import("../dev.zig");
41const link = @import("../link.zig");
42const trace = @import("../tracy.zig").trace;
43const wasi_libc = @import("../libs/wasi_libc.zig");
44const Value = @import("../Value.zig");
45
46base: link.File,
47/// Null-terminated strings, indexes have type String and string_table provides
48/// lookup.
49///
50/// There are a couple of sites that add things here without adding
51/// corresponding string_table entries. For such cases, when implementing
52/// serialization/deserialization, they should be adjusted to prefix that data
53/// with a null byte so that deserialization does not attempt to create
54/// string_table entries for them. Alternately those sites could be moved to
55/// use a different byte array for this purpose.
56string_bytes: std.ArrayList(u8),
57/// Sometimes we have logic that wants to borrow string bytes to store
58/// arbitrary things in there. In this case it is not allowed to intern new
59/// strings during this time. This safety lock is used to detect misuses.
60string_bytes_lock: std.debug.SafetyLock = .{},
61/// Omitted when serializing linker state.
62string_table: String.Table,
63/// Symbol name of the entry function to export
64entry_name: OptionalString,
65/// When true, will allow undefined symbols
66import_symbols: bool,
67/// Set of *global* symbol names to export to the host environment.
68export_symbol_names: []const []const u8,
69/// When defined, sets the start of the data section.
70global_base: ?u64,
71/// When defined, sets the initial memory size of the memory.
72initial_memory: ?u64,
73/// When defined, sets the maximum memory size of the memory.
74max_memory: ?u64,
75/// When true, will export the function table to the host environment.
76export_table: bool,
77/// Output name of the file
78name: []const u8,
79/// List of relocatable files to be linked into the final binary.
80objects: std.ArrayList(Object) = .{},
81
82func_types: std.AutoArrayHashMapUnmanaged(FunctionType, void) = .empty,
83/// Provides a mapping of both imports and provided functions to symbol name.
84/// Local functions may be unnamed.
85/// Key is symbol name, however the `FunctionImport` may have an name override for the import name.
86object_function_imports: std.AutoArrayHashMapUnmanaged(String, FunctionImport) = .empty,
87/// All functions for all objects.
88object_functions: std.ArrayList(ObjectFunction) = .empty,
89
90/// Provides a mapping of both imports and provided globals to symbol name.
91/// Local globals may be unnamed.
92object_global_imports: std.AutoArrayHashMapUnmanaged(String, GlobalImport) = .empty,
93/// All globals for all objects.
94object_globals: std.ArrayList(ObjectGlobal) = .empty,
95
96/// All table imports for all objects.
97object_table_imports: std.AutoArrayHashMapUnmanaged(String, TableImport) = .empty,
98/// All parsed table sections for all objects.
99object_tables: std.ArrayList(Table) = .empty,
100
101/// All memory imports for all objects.
102object_memory_imports: std.AutoArrayHashMapUnmanaged(String, MemoryImport) = .empty,
103/// All parsed memory sections for all objects.
104object_memories: std.ArrayList(ObjectMemory) = .empty,
105
106/// All relocations from all objects concatenated. `relocs_start` marks the end
107/// point of object relocations and start point of Zcu relocations.
108object_relocations: std.MultiArrayList(ObjectRelocation) = .empty,
109
110/// List of initialization functions. These must be called in order of priority
111/// by the (synthetic) `__wasm_call_ctors` function.
112object_init_funcs: std.ArrayList(InitFunc) = .empty,
113
114/// The data section of an object has many segments. Each segment corresponds
115/// logically to an object file's .data section, or .rodata section. In
116/// the case of `-fdata-sections` there will be one segment per data symbol.
117object_data_segments: std.ArrayList(ObjectDataSegment) = .empty,
118/// Each segment has many data symbols, which correspond logically to global
119/// constants.
120object_datas: std.ArrayList(ObjectData) = .empty,
121object_data_imports: std.AutoArrayHashMapUnmanaged(String, ObjectDataImport) = .empty,
122/// Non-synthetic section that can essentially be mem-cpy'd into place after performing relocations.
123object_custom_segments: std.AutoArrayHashMapUnmanaged(ObjectSectionIndex, CustomSegment) = .empty,
124
125/// All comdat information for all objects.
126object_comdats: std.ArrayList(Comdat) = .empty,
127/// A table that maps the relocations to be performed where the key represents
128/// the section (across all objects) that the slice of relocations applies to.
129object_relocations_table: std.AutoArrayHashMapUnmanaged(ObjectSectionIndex, ObjectRelocation.Slice) = .empty,
130/// Incremented across all objects in order to enable calculation of `ObjectSectionIndex` values.
131object_total_sections: u32 = 0,
132/// All comdat symbols from all objects concatenated.
133object_comdat_symbols: std.MultiArrayList(Comdat.Symbol) = .empty,
134
135/// Relocations to be emitted into an object file. Remains empty when not
136/// emitting an object file.
137out_relocs: std.MultiArrayList(OutReloc) = .empty,
138/// List of locations within `string_bytes` that must be patched with the virtual
139/// memory address of a Uav during `flush`.
140/// When emitting an object file, `out_relocs` is used instead.
141uav_fixups: std.ArrayList(UavFixup) = .empty,
142/// List of locations within `string_bytes` that must be patched with the virtual
143/// memory address of a Nav during `flush`.
144/// When emitting an object file, `out_relocs` is used instead.
145/// No functions here only global variables.
146nav_fixups: std.ArrayList(NavFixup) = .empty,
147/// When a nav reference is a function pointer, this tracks the required function
148/// table entry index that needs to overwrite the code in the final output.
149func_table_fixups: std.ArrayList(FuncTableFixup) = .empty,
150/// Symbols to be emitted into an object file. Remains empty when not emitting
151/// an object file.
152symbol_table: std.AutoArrayHashMapUnmanaged(String, void) = .empty,
153
154/// When importing objects from the host environment, a name must be supplied.
155/// LLVM uses "env" by default when none is given.
156/// This value is passed to object files since wasm tooling conventions provides
157/// no way to specify the module name in the symbol table.
158object_host_name: OptionalString,
159
160/// Memory section
161memories: std.wasm.Memory = .{ .limits = .{
162 .min = 0,
163 .max = 0,
164 .flags = .{ .has_max = false, .is_shared = false },
165} },
166
167/// `--verbose-link` output.
168/// Initialized on creation, appended to as inputs are added, printed during `flush`.
169/// String data is allocated into Compilation arena.
170dump_argv_list: std.ArrayList([]const u8),
171
172preloaded_strings: PreloadedStrings,
173
174/// This field is used when emitting an object; `navs_exe` used otherwise.
175/// Does not include externs since that data lives elsewhere.
176navs_obj: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, ZcuDataObj) = .empty,
177/// This field is unused when emitting an object; `navs_obj` used otherwise.
178/// Does not include externs since that data lives elsewhere.
179navs_exe: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, ZcuDataExe) = .empty,
180/// Tracks all InternPool values referenced by codegen. Needed for outputting
181/// the data segment. This one does not track ref count because object files
182/// require using max LEB encoding for these references anyway.
183uavs_obj: std.AutoArrayHashMapUnmanaged(InternPool.Index, ZcuDataObj) = .empty,
184/// Tracks ref count to optimize LEB encodings for UAV references.
185uavs_exe: std.AutoArrayHashMapUnmanaged(InternPool.Index, ZcuDataExe) = .empty,
186/// Sparse table of uavs that need to be emitted with greater alignment than
187/// the default for the type.
188overaligned_uavs: std.AutoArrayHashMapUnmanaged(InternPool.Index, Alignment) = .empty,
189/// When the key is an enum type, this represents a `@tagName` function.
190zcu_funcs: std.AutoArrayHashMapUnmanaged(InternPool.Index, ZcuFunc) = .empty,
191nav_exports: std.AutoArrayHashMapUnmanaged(NavExport, Zcu.Export.Index) = .empty,
192uav_exports: std.AutoArrayHashMapUnmanaged(UavExport, Zcu.Export.Index) = .empty,
193imports: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, void) = .empty,
194
195dwarf: ?Dwarf = null,
196
197flush_buffer: Flush = .{},
198
199/// Empty until `prelink`. There it is populated based on object files.
200/// Next, it is copied into `Flush.missing_exports` just before `flush`
201/// and that data is used during `flush`.
202missing_exports: std.AutoArrayHashMapUnmanaged(String, void) = .empty,
203entry_resolution: FunctionImport.Resolution = .unresolved,
204
205/// Empty when outputting an object.
206function_exports: std.AutoArrayHashMapUnmanaged(String, FunctionIndex) = .empty,
207hidden_function_exports: std.AutoArrayHashMapUnmanaged(String, FunctionIndex) = .empty,
208global_exports: std.ArrayList(GlobalExport) = .empty,
209/// Tracks the value at the end of prelink.
210global_exports_len: u32 = 0,
211
212/// Ordered list of non-import functions that will appear in the final binary.
213/// Empty until prelink.
214functions: std.AutoArrayHashMapUnmanaged(FunctionImport.Resolution, void) = .empty,
215/// Tracks the value at the end of prelink, at which point `functions`
216/// contains only object file functions, and nothing from the Zcu yet.
217functions_end_prelink: u32 = 0,
218
219function_imports_len_prelink: u32 = 0,
220data_imports_len_prelink: u32 = 0,
221/// At the end of prelink, this is populated with needed functions from
222/// objects.
223///
224/// During the Zcu phase, entries are not deleted from this table
225/// because doing so would be irreversible when a `deleteExport` call is
226/// handled. However, entries are added during the Zcu phase when extern
227/// functions are passed to `updateNav`.
228///
229/// `flush` gets a copy of this table, and then Zcu exports are applied to
230/// remove elements from the table, and the remainder are either undefined
231/// symbol errors, or import section entries depending on the output mode.
232function_imports: std.AutoArrayHashMapUnmanaged(String, FunctionImportId) = .empty,
233
234/// At the end of prelink, this is populated with data symbols needed by
235/// objects.
236///
237/// During the Zcu phase, entries are not deleted from this table
238/// because doing so would be irreversible when a `deleteExport` call is
239/// handled. However, entries are added during the Zcu phase when extern
240/// functions are passed to `updateNav`.
241///
242/// `flush` gets a copy of this table, and then Zcu exports are applied to
243/// remove elements from the table, and the remainder are either undefined
244/// symbol errors, or symbol table entries depending on the output mode.
245data_imports: std.AutoArrayHashMapUnmanaged(String, DataImportId) = .empty,
246/// Set of data symbols that will appear in the final binary. Used to populate
247/// `Flush.data_segments` before sorting.
248data_segments: std.AutoArrayHashMapUnmanaged(DataSegmentId, void) = .empty,
249
250/// Ordered list of non-import globals that will appear in the final binary.
251/// Empty until prelink.
252globals: std.AutoArrayHashMapUnmanaged(GlobalImport.Resolution, void) = .empty,
253/// Tracks the value at the end of prelink, at which point `globals`
254/// contains only object file globals, and nothing from the Zcu yet.
255globals_end_prelink: u32 = 0,
256global_imports: std.AutoArrayHashMapUnmanaged(String, GlobalImportId) = .empty,
257
258/// Ordered list of non-import tables that will appear in the final binary.
259/// Empty until prelink.
260tables: std.AutoArrayHashMapUnmanaged(TableImport.Resolution, void) = .empty,
261table_imports: std.AutoArrayHashMapUnmanaged(String, TableImport.Index) = .empty,
262
263/// All functions that have had their address taken and therefore might be
264/// called via a `call_indirect` function.
265zcu_indirect_function_set: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, void) = .empty,
266object_indirect_function_import_set: std.AutoArrayHashMapUnmanaged(String, void) = .empty,
267object_indirect_function_set: std.AutoArrayHashMapUnmanaged(ObjectFunctionIndex, void) = .empty,
268
269error_name_table_ref_count: u32 = 0,
270tag_name_table_ref_count: u32 = 0,
271
272/// Set to true if any `GLOBAL_INDEX` relocation is encountered with
273/// `SymbolFlags.tls` set to true. This is for objects only; final
274/// value must be this OR'd with the same logic for zig functions
275/// (set to true if any threadlocal global is used).
276any_tls_relocs: bool = false,
277any_passive_inits: bool = false,
278
279/// All MIR instructions for all Zcu functions.
280mir_instructions: std.MultiArrayList(Mir.Inst) = .{},
281/// Corresponds to `mir_instructions`.
282mir_extra: std.ArrayList(u32) = .empty,
283/// All local types for all Zcu functions.
284mir_locals: std.ArrayList(std.wasm.Valtype) = .empty,
285
286params_scratch: std.ArrayList(std.wasm.Valtype) = .empty,
287returns_scratch: std.ArrayList(std.wasm.Valtype) = .empty,
288
289/// All Zcu error names in order, null-terminated, concatenated. No need to
290/// serialize; trivially reconstructed.
291error_name_bytes: std.ArrayList(u8) = .empty,
292/// For each Zcu error, in order, offset into `error_name_bytes` where the name
293/// is stored. No need to serialize; trivially reconstructed.
294error_name_offs: std.ArrayList(u32) = .empty,
295
296tag_name_bytes: std.ArrayList(u8) = .empty,
297tag_name_offs: std.ArrayList(u32) = .empty,
298
299pub const TagNameOff = extern struct {
300 off: u32,
301 len: u32,
302};
303
304/// Index into `Wasm.zcu_indirect_function_set`.
305pub const ZcuIndirectFunctionSetIndex = enum(u32) {
306 _,
307};
308
309pub const UavFixup = extern struct {
310 uavs_exe_index: UavsExeIndex,
311 /// Index into `string_bytes`.
312 offset: u32,
313 addend: u32,
314};
315
316pub const NavFixup = extern struct {
317 navs_exe_index: NavsExeIndex,
318 /// Index into `string_bytes`.
319 offset: u32,
320 addend: u32,
321};
322
323pub const FuncTableFixup = extern struct {
324 table_index: ZcuIndirectFunctionSetIndex,
325 /// Index into `string_bytes`.
326 offset: u32,
327};
328
329/// Index into `objects`.
330pub const ObjectIndex = enum(u32) {
331 _,
332
333 pub fn ptr(index: ObjectIndex, wasm: *const Wasm) *Object {
334 return &wasm.objects.items[@intFromEnum(index)];
335 }
336};
337
338/// Index into `Wasm.functions`.
339pub const FunctionIndex = enum(u32) {
340 _,
341
342 pub fn ptr(index: FunctionIndex, wasm: *const Wasm) *FunctionImport.Resolution {
343 return &wasm.functions.keys()[@intFromEnum(index)];
344 }
345
346 pub fn fromIpNav(wasm: *const Wasm, nav_index: InternPool.Nav.Index) ?FunctionIndex {
347 return fromResolution(wasm, .fromIpNav(wasm, nav_index));
348 }
349
350 pub fn fromTagNameType(wasm: *const Wasm, tag_type: InternPool.Index) ?FunctionIndex {
351 const zcu_func: ZcuFunc.Index = @enumFromInt(wasm.zcu_funcs.getIndex(tag_type) orelse return null);
352 return fromResolution(wasm, .pack(wasm, .{ .zcu_func = zcu_func }));
353 }
354
355 pub fn fromSymbolName(wasm: *const Wasm, name: String) ?FunctionIndex {
356 if (wasm.object_function_imports.getPtr(name)) |import| {
357 return fromResolution(wasm, import.resolution);
358 }
359 if (wasm.function_exports.get(name)) |index| return index;
360 if (wasm.hidden_function_exports.get(name)) |index| return index;
361 return null;
362 }
363
364 pub fn fromResolution(wasm: *const Wasm, resolution: FunctionImport.Resolution) ?FunctionIndex {
365 const i = wasm.functions.getIndex(resolution) orelse return null;
366 return @enumFromInt(i);
367 }
368};
369
370pub const GlobalExport = extern struct {
371 name: String,
372 global_index: GlobalIndex,
373};
374
375/// 0. Index into `Flush.function_imports`
376/// 1. Index into `functions`.
377///
378/// Note that function_imports indexes are subject to swap removals during
379/// `flush`.
380pub const OutputFunctionIndex = enum(u32) {
381 _,
382
383 pub fn fromResolution(wasm: *const Wasm, resolution: FunctionImport.Resolution) ?OutputFunctionIndex {
384 return fromFunctionIndex(wasm, FunctionIndex.fromResolution(wasm, resolution) orelse return null);
385 }
386
387 pub fn fromFunctionIndex(wasm: *const Wasm, index: FunctionIndex) OutputFunctionIndex {
388 return @enumFromInt(wasm.flush_buffer.function_imports.entries.len + @intFromEnum(index));
389 }
390
391 pub fn fromObjectFunction(wasm: *const Wasm, index: ObjectFunctionIndex) OutputFunctionIndex {
392 return fromResolution(wasm, .fromObjectFunction(wasm, index)).?;
393 }
394
395 pub fn fromObjectFunctionHandlingWeak(wasm: *const Wasm, index: ObjectFunctionIndex) OutputFunctionIndex {
396 const ptr = index.ptr(wasm);
397 if (ptr.flags.binding == .weak) {
398 const name = ptr.name.unwrap().?;
399 const import = wasm.object_function_imports.getPtr(name).?;
400 assert(import.resolution != .unresolved);
401 return fromResolution(wasm, import.resolution).?;
402 }
403 return fromResolution(wasm, .fromObjectFunction(wasm, index)).?;
404 }
405
406 pub fn fromIpIndex(wasm: *const Wasm, ip_index: InternPool.Index) OutputFunctionIndex {
407 const zcu = wasm.base.comp.zcu.?;
408 const ip = &zcu.intern_pool;
409 return switch (ip.indexToKey(ip_index)) {
410 .@"extern" => |ext| {
411 const name = wasm.getExistingString(ext.name.toSlice(ip)).?;
412 return fromSymbolName(wasm, name);
413 },
414 else => fromResolution(wasm, .fromIpIndex(wasm, ip_index)).?,
415 };
416 }
417
418 pub fn fromIpNav(wasm: *const Wasm, nav_index: InternPool.Nav.Index) OutputFunctionIndex {
419 const zcu = wasm.base.comp.zcu.?;
420 const ip = &zcu.intern_pool;
421 const nav = ip.getNav(nav_index);
422 return fromIpIndex(wasm, nav.status.fully_resolved.val);
423 }
424
425 pub fn fromTagNameType(wasm: *const Wasm, tag_type: InternPool.Index) OutputFunctionIndex {
426 return fromFunctionIndex(wasm, FunctionIndex.fromTagNameType(wasm, tag_type).?);
427 }
428
429 pub fn fromSymbolName(wasm: *const Wasm, name: String) OutputFunctionIndex {
430 if (wasm.flush_buffer.function_imports.getIndex(name)) |i| return @enumFromInt(i);
431 return fromFunctionIndex(wasm, FunctionIndex.fromSymbolName(wasm, name).?);
432 }
433};
434
435/// Index into `Wasm.globals`.
436pub const GlobalIndex = enum(u32) {
437 _,
438
439 /// This is only accurate when not emitting an object and there is a Zcu.
440 pub const stack_pointer: GlobalIndex = @enumFromInt(0);
441
442 /// Same as `stack_pointer` but with a safety assertion.
443 pub fn stackPointer(wasm: *const Wasm) ObjectGlobal.Index {
444 const comp = wasm.base.comp;
445 assert(comp.config.output_mode != .Obj);
446 assert(comp.zcu != null);
447 return .stack_pointer;
448 }
449
450 pub fn ptr(index: GlobalIndex, f: *const Flush) *Wasm.GlobalImport.Resolution {
451 return &f.globals.items[@intFromEnum(index)];
452 }
453
454 pub fn fromIpNav(wasm: *const Wasm, nav_index: InternPool.Nav.Index) ?GlobalIndex {
455 const i = wasm.globals.getIndex(.fromIpNav(wasm, nav_index)) orelse return null;
456 return @enumFromInt(i);
457 }
458
459 pub fn fromObjectGlobal(wasm: *const Wasm, i: ObjectGlobalIndex) GlobalIndex {
460 return @enumFromInt(wasm.globals.getIndex(.fromObjectGlobal(wasm, i)).?);
461 }
462
463 pub fn fromObjectGlobalHandlingWeak(wasm: *const Wasm, index: ObjectGlobalIndex) GlobalIndex {
464 const global = index.ptr(wasm);
465 return if (global.flags.binding == .weak)
466 fromSymbolName(wasm, global.name.unwrap().?)
467 else
468 fromObjectGlobal(wasm, index);
469 }
470
471 pub fn fromSymbolName(wasm: *const Wasm, name: String) GlobalIndex {
472 const import = wasm.object_global_imports.getPtr(name).?;
473 return @enumFromInt(wasm.globals.getIndex(import.resolution).?);
474 }
475};
476
477/// Index into `tables`.
478pub const TableIndex = enum(u32) {
479 _,
480
481 pub fn ptr(index: TableIndex, f: *const Flush) *Wasm.TableImport.Resolution {
482 return &f.tables.items[@intFromEnum(index)];
483 }
484
485 pub fn fromObjectTable(wasm: *const Wasm, i: ObjectTableIndex) TableIndex {
486 return @enumFromInt(wasm.tables.getIndex(.fromObjectTable(i)).?);
487 }
488
489 pub fn fromSymbolName(wasm: *const Wasm, name: String) TableIndex {
490 const import = wasm.object_table_imports.getPtr(name).?;
491 return @enumFromInt(wasm.tables.getIndex(import.resolution).?);
492 }
493};
494
495/// The first N indexes correspond to input objects (`objects`) array.
496/// After that, the indexes correspond to the `source_locations` array,
497/// representing a location in a Zig source file that can be pinpointed
498/// precisely via AST node and token.
499pub const SourceLocation = enum(u32) {
500 /// From the Zig compilation unit but no precise source location.
501 zig_object_nofile = std.math.maxInt(u32) - 1,
502 none = std.math.maxInt(u32),
503 _,
504
505 /// Index into `source_locations`.
506 pub const Index = enum(u32) {
507 _,
508 };
509
510 pub const Unpacked = union(enum) {
511 none,
512 zig_object_nofile,
513 object_index: ObjectIndex,
514 source_location_index: Index,
515 };
516
517 pub fn pack(unpacked: Unpacked, wasm: *const Wasm) SourceLocation {
518 _ = wasm;
519 return switch (unpacked) {
520 .zig_object_nofile => .zig_object_nofile,
521 .none => .none,
522 .object_index => |object_index| @enumFromInt(@intFromEnum(object_index)),
523 .source_location_index => @panic("TODO"),
524 };
525 }
526
527 pub fn unpack(sl: SourceLocation, wasm: *const Wasm) Unpacked {
528 return switch (sl) {
529 .zig_object_nofile => .zig_object_nofile,
530 .none => .none,
531 _ => {
532 const i = @intFromEnum(sl);
533 if (i < wasm.objects.items.len) return .{ .object_index = @enumFromInt(i) };
534 const sl_index = i - wasm.objects.items.len;
535 _ = sl_index;
536 @panic("TODO");
537 },
538 };
539 }
540
541 pub fn fromObject(object_index: ObjectIndex, wasm: *const Wasm) SourceLocation {
542 return pack(.{ .object_index = object_index }, wasm);
543 }
544
545 pub fn addError(sl: SourceLocation, wasm: *Wasm, comptime f: []const u8, args: anytype) void {
546 const diags = &wasm.base.comp.link_diags;
547 switch (sl.unpack(wasm)) {
548 .none => unreachable,
549 .zig_object_nofile => diags.addError("zig compilation unit: " ++ f, args),
550 .object_index => |i| diags.addError("{f}: " ++ f, .{i.ptr(wasm).path} ++ args),
551 .source_location_index => @panic("TODO"),
552 }
553 }
554
555 pub fn addNote(
556 sl: SourceLocation,
557 err: *link.Diags.ErrorWithNotes,
558 comptime f: []const u8,
559 args: anytype,
560 ) void {
561 err.addNote(f, args);
562 const err_msg = &err.diags.msgs.items[err.index];
563 err_msg.notes[err.note_slot - 1].source_location = .{ .wasm = sl };
564 }
565
566 pub fn fail(sl: SourceLocation, diags: *link.Diags, comptime format: []const u8, args: anytype) error{LinkFailure} {
567 return diags.failSourceLocation(.{ .wasm = sl }, format, args);
568 }
569
570 pub fn string(
571 sl: SourceLocation,
572 msg: []const u8,
573 bundle: *std.zig.ErrorBundle.Wip,
574 wasm: *const Wasm,
575 ) Allocator.Error!std.zig.ErrorBundle.String {
576 return switch (sl.unpack(wasm)) {
577 .none => try bundle.addString(msg),
578 .zig_object_nofile => try bundle.printString("zig compilation unit: {s}", .{msg}),
579 .object_index => |i| {
580 const obj = i.ptr(wasm);
581 return if (obj.archive_member_name.slice(wasm)) |obj_name|
582 try bundle.printString("{f} ({s}): {s}", .{ obj.path, std.fs.path.basename(obj_name), msg })
583 else
584 try bundle.printString("{f}: {s}", .{ obj.path, msg });
585 },
586 .source_location_index => @panic("TODO"),
587 };
588 }
589};
590
591/// The lower bits of this ABI-match the flags here:
592/// https://github.com/WebAssembly/tool-conventions/blob/df8d737539eb8a8f446ba5eab9dc670c40dfb81e/Linking.md#symbol-table-subsection
593/// The upper bits are used for nefarious purposes.
594pub const SymbolFlags = packed struct(u32) {
595 binding: Binding = .strong,
596 /// Indicating that this is a hidden symbol. Hidden symbols are not to be
597 /// exported when performing the final link, but may be linked to other
598 /// modules.
599 visibility_hidden: bool = false,
600 padding0: u1 = 0,
601 /// For non-data symbols, this must match whether the symbol is an import
602 /// or is defined; for data symbols, determines whether a segment is
603 /// specified.
604 undefined: bool = false,
605 /// The symbol is intended to be exported from the wasm module to the host
606 /// environment. This differs from the visibility flags in that it affects
607 /// static linking.
608 exported: bool = false,
609 /// The symbol uses an explicit symbol name, rather than reusing the name
610 /// from a wasm import. This allows it to remap imports from foreign
611 /// WebAssembly modules into local symbols with different names.
612 explicit_name: bool = false,
613 /// The symbol is intended to be included in the linker output, regardless
614 /// of whether it is used by the program. Same meaning as `retain`.
615 no_strip: bool = false,
616 /// The symbol resides in thread local storage.
617 tls: bool = false,
618 /// The symbol represents an absolute address. This means its offset is
619 /// relative to the start of the wasm memory as opposed to being relative
620 /// to a data segment.
621 absolute: bool = false,
622
623 // Above here matches the tooling conventions ABI.
624
625 padding1: u13 = 0,
626 /// Zig-specific. Dead things are allowed to be garbage collected.
627 alive: bool = false,
628 /// Zig-specific. This symbol comes from an object that must be included in
629 /// the final link.
630 must_link: bool = false,
631 /// Zig-specific.
632 global_type: GlobalType4 = .zero,
633 /// Zig-specific.
634 limits_has_max: bool = false,
635 /// Zig-specific.
636 limits_is_shared: bool = false,
637 /// Zig-specific.
638 ref_type: RefType1 = .funcref,
639
640 pub const Binding = enum(u2) {
641 strong = 0,
642 /// Indicating that this is a weak symbol. When linking multiple modules
643 /// defining the same symbol, all weak definitions are discarded if any
644 /// strong definitions exist; then if multiple weak definitions exist all
645 /// but one (unspecified) are discarded; and finally it is an error if more
646 /// than one definition remains.
647 weak = 1,
648 /// Indicating that this is a local symbol. Local symbols are not to be
649 /// exported, or linked to other modules/sections. The names of all
650 /// non-local symbols must be unique, but the names of local symbols
651 /// are not considered for uniqueness. A local function or global
652 /// symbol cannot reference an import.
653 local = 2,
654 };
655
656 pub fn initZigSpecific(flags: *SymbolFlags, must_link: bool, no_strip: bool) void {
657 flags.no_strip = no_strip;
658 flags.alive = false;
659 flags.must_link = must_link;
660 flags.global_type = .zero;
661 flags.limits_has_max = false;
662 flags.limits_is_shared = false;
663 flags.ref_type = .funcref;
664 }
665
666 pub fn isIncluded(flags: SymbolFlags, is_dynamic: bool) bool {
667 return flags.exported or
668 (is_dynamic and !flags.visibility_hidden) or
669 (flags.no_strip and flags.must_link);
670 }
671
672 pub fn isExported(flags: SymbolFlags, is_dynamic: bool) bool {
673 if (flags.undefined or flags.binding == .local) return false;
674 if (is_dynamic and !flags.visibility_hidden) return true;
675 return flags.exported;
676 }
677
678 /// Returns the name as how it will be output into the final object
679 /// file or binary. When `merge` is true, this will return the
680 /// short name. i.e. ".rodata". When false, it returns the entire name instead.
681 pub fn outputName(flags: SymbolFlags, name: []const u8, merge: bool) []const u8 {
682 if (flags.tls) return ".tdata";
683 if (!merge) return name;
684 if (mem.startsWith(u8, name, ".rodata.")) return ".rodata";
685 if (mem.startsWith(u8, name, ".text.")) return ".text";
686 if (mem.startsWith(u8, name, ".data.")) return ".data";
687 if (mem.startsWith(u8, name, ".bss.")) return ".bss";
688 return name;
689 }
690
691 /// Masks off the Zig-specific stuff.
692 pub fn toAbiInteger(flags: SymbolFlags) u32 {
693 var copy = flags;
694 copy.initZigSpecific(false, false);
695 return @bitCast(copy);
696 }
697};
698
699pub const GlobalType4 = packed struct(u4) {
700 valtype: Valtype3,
701 mutable: bool,
702
703 pub const zero: GlobalType4 = @bitCast(@as(u4, 0));
704
705 pub fn to(gt: GlobalType4) ObjectGlobal.Type {
706 return .{
707 .valtype = gt.valtype.to(),
708 .mutable = gt.mutable,
709 };
710 }
711};
712
713pub const Valtype3 = enum(u3) {
714 i32,
715 i64,
716 f32,
717 f64,
718 v128,
719
720 pub fn from(v: std.wasm.Valtype) Valtype3 {
721 return switch (v) {
722 .i32 => .i32,
723 .i64 => .i64,
724 .f32 => .f32,
725 .f64 => .f64,
726 .v128 => .v128,
727 };
728 }
729
730 pub fn to(v: Valtype3) std.wasm.Valtype {
731 return switch (v) {
732 .i32 => .i32,
733 .i64 => .i64,
734 .f32 => .f32,
735 .f64 => .f64,
736 .v128 => .v128,
737 };
738 }
739};
740
741/// Index into `Wasm.navs_obj`.
742pub const NavsObjIndex = enum(u32) {
743 _,
744
745 pub fn key(i: @This(), wasm: *const Wasm) *InternPool.Nav.Index {
746 return &wasm.navs_obj.keys()[@intFromEnum(i)];
747 }
748
749 pub fn value(i: @This(), wasm: *const Wasm) *ZcuDataObj {
750 return &wasm.navs_obj.values()[@intFromEnum(i)];
751 }
752
753 pub fn name(i: @This(), wasm: *const Wasm) [:0]const u8 {
754 const zcu = wasm.base.comp.zcu.?;
755 const ip = &zcu.intern_pool;
756 const nav = ip.getNav(i.key(wasm).*);
757 return nav.fqn.toSlice(ip);
758 }
759};
760
761/// Index into `Wasm.navs_exe`.
762pub const NavsExeIndex = enum(u32) {
763 _,
764
765 pub fn key(i: @This(), wasm: *const Wasm) *InternPool.Nav.Index {
766 return &wasm.navs_exe.keys()[@intFromEnum(i)];
767 }
768
769 pub fn value(i: @This(), wasm: *const Wasm) *ZcuDataExe {
770 return &wasm.navs_exe.values()[@intFromEnum(i)];
771 }
772
773 pub fn name(i: @This(), wasm: *const Wasm) [:0]const u8 {
774 const zcu = wasm.base.comp.zcu.?;
775 const ip = &zcu.intern_pool;
776 const nav = ip.getNav(i.key(wasm).*);
777 return nav.fqn.toSlice(ip);
778 }
779};
780
781/// Index into `Wasm.uavs_obj`.
782pub const UavsObjIndex = enum(u32) {
783 _,
784
785 pub fn key(i: @This(), wasm: *const Wasm) *InternPool.Index {
786 return &wasm.uavs_obj.keys()[@intFromEnum(i)];
787 }
788
789 pub fn value(i: @This(), wasm: *const Wasm) *ZcuDataObj {
790 return &wasm.uavs_obj.values()[@intFromEnum(i)];
791 }
792};
793
794/// Index into `Wasm.uavs_exe`.
795pub const UavsExeIndex = enum(u32) {
796 _,
797
798 pub fn key(i: @This(), wasm: *const Wasm) *InternPool.Index {
799 return &wasm.uavs_exe.keys()[@intFromEnum(i)];
800 }
801
802 pub fn value(i: @This(), wasm: *const Wasm) *ZcuDataExe {
803 return &wasm.uavs_exe.values()[@intFromEnum(i)];
804 }
805};
806
807/// Used when emitting a relocatable object.
808pub const ZcuDataObj = extern struct {
809 code: DataPayload,
810 relocs: OutReloc.Slice,
811};
812
813/// Used when not emitting a relocatable object.
814pub const ZcuDataExe = extern struct {
815 code: DataPayload,
816 /// Tracks how many references there are for the purposes of sorting data segments.
817 count: u32,
818};
819
820/// An abstraction for calling `lowerZcuData` repeatedly until all data entries
821/// are populated.
822const ZcuDataStarts = struct {
823 uavs_i: u32,
824
825 fn init(wasm: *const Wasm) ZcuDataStarts {
826 const comp = wasm.base.comp;
827 const is_obj = comp.config.output_mode == .Obj;
828 return if (is_obj) initObj(wasm) else initExe(wasm);
829 }
830
831 fn initObj(wasm: *const Wasm) ZcuDataStarts {
832 return .{
833 .uavs_i = @intCast(wasm.uavs_obj.entries.len),
834 };
835 }
836
837 fn initExe(wasm: *const Wasm) ZcuDataStarts {
838 return .{
839 .uavs_i = @intCast(wasm.uavs_exe.entries.len),
840 };
841 }
842
843 fn finish(zds: ZcuDataStarts, wasm: *Wasm, pt: Zcu.PerThread) !void {
844 const comp = wasm.base.comp;
845 const is_obj = comp.config.output_mode == .Obj;
846 return if (is_obj) finishObj(zds, wasm, pt) else finishExe(zds, wasm, pt);
847 }
848
849 fn finishObj(zds: ZcuDataStarts, wasm: *Wasm, pt: Zcu.PerThread) !void {
850 var uavs_i = zds.uavs_i;
851 while (uavs_i < wasm.uavs_obj.entries.len) : (uavs_i += 1) {
852 // Call to `lowerZcuData` here possibly creates more entries in these tables.
853 wasm.uavs_obj.values()[uavs_i] = try lowerZcuData(wasm, pt, wasm.uavs_obj.keys()[uavs_i]);
854 }
855 }
856
857 fn finishExe(zds: ZcuDataStarts, wasm: *Wasm, pt: Zcu.PerThread) !void {
858 var uavs_i = zds.uavs_i;
859 while (uavs_i < wasm.uavs_exe.entries.len) : (uavs_i += 1) {
860 // Call to `lowerZcuData` here possibly creates more entries in these tables.
861 const zcu_data = try lowerZcuData(wasm, pt, wasm.uavs_exe.keys()[uavs_i]);
862 wasm.uavs_exe.values()[uavs_i].code = zcu_data.code;
863 }
864 }
865};
866
867pub const ZcuFunc = union {
868 function: Function,
869 tag_name: TagName,
870
871 pub const Function = extern struct {
872 /// Index into `Wasm.mir_instructions`.
873 instructions_off: u32,
874 /// This is unused except for as a safety slice bound and could be removed.
875 instructions_len: u32,
876 /// Index into `Wasm.mir_extra`.
877 extra_off: u32,
878 /// This is unused except for as a safety slice bound and could be removed.
879 extra_len: u32,
880 /// Index into `Wasm.mir_locals`.
881 locals_off: u32,
882 locals_len: u32,
883 prologue: Mir.Prologue,
884 };
885
886 pub const TagName = extern struct {
887 symbol_name: String,
888 type_index: FunctionType.Index,
889 /// Index into `Wasm.tag_name_offs`.
890 table_index: u32,
891 };
892
893 /// Index into `Wasm.zcu_funcs`.
894 /// Note that swapRemove is sometimes performed on `zcu_funcs`.
895 pub const Index = enum(u32) {
896 _,
897
898 pub fn key(i: @This(), wasm: *const Wasm) *InternPool.Index {
899 return &wasm.zcu_funcs.keys()[@intFromEnum(i)];
900 }
901
902 pub fn value(i: @This(), wasm: *const Wasm) *ZcuFunc {
903 return &wasm.zcu_funcs.values()[@intFromEnum(i)];
904 }
905
906 pub fn name(i: @This(), wasm: *const Wasm) [:0]const u8 {
907 const zcu = wasm.base.comp.zcu.?;
908 const ip = &zcu.intern_pool;
909 const ip_index = i.key(wasm).*;
910 switch (ip.indexToKey(ip_index)) {
911 .func => |func| {
912 const nav = ip.getNav(func.owner_nav);
913 return nav.fqn.toSlice(ip);
914 },
915 .enum_type => {
916 return i.value(wasm).tag_name.symbol_name.slice(wasm);
917 },
918 else => unreachable,
919 }
920 }
921
922 pub fn typeIndex(i: @This(), wasm: *Wasm) FunctionType.Index {
923 const comp = wasm.base.comp;
924 const zcu = comp.zcu.?;
925 const target = &comp.root_mod.resolved_target.result;
926 const ip = &zcu.intern_pool;
927 switch (ip.indexToKey(i.key(wasm).*)) {
928 .func => |func| {
929 const fn_ty = zcu.navValue(func.owner_nav).typeOf(zcu);
930 const fn_info = zcu.typeToFunc(fn_ty).?;
931 return wasm.getExistingFunctionType(fn_info.cc, fn_info.param_types.get(ip), .fromInterned(fn_info.return_type), target).?;
932 },
933 .enum_type => {
934 return i.value(wasm).tag_name.type_index;
935 },
936 else => unreachable,
937 }
938 }
939 };
940};
941
942pub const NavExport = extern struct {
943 name: String,
944 nav_index: InternPool.Nav.Index,
945};
946
947pub const UavExport = extern struct {
948 name: String,
949 uav_index: InternPool.Index,
950};
951
952pub const FunctionImport = extern struct {
953 flags: SymbolFlags,
954 module_name: OptionalString,
955 /// May be different than the key which is a symbol name.
956 name: String,
957 source_location: SourceLocation,
958 resolution: Resolution,
959 type: FunctionType.Index,
960
961 /// Represents a synthetic function, a function from an object, or a
962 /// function from the Zcu.
963 pub const Resolution = enum(u32) {
964 unresolved,
965 __wasm_apply_global_tls_relocs,
966 __wasm_call_ctors,
967 __wasm_init_memory,
968 __wasm_init_tls,
969 // Next, index into `object_functions`.
970 // Next, index into `zcu_funcs`.
971 _,
972
973 const first_object_function = @intFromEnum(Resolution.__wasm_init_tls) + 1;
974
975 pub const Unpacked = union(enum) {
976 unresolved,
977 __wasm_apply_global_tls_relocs,
978 __wasm_call_ctors,
979 __wasm_init_memory,
980 __wasm_init_tls,
981 object_function: ObjectFunctionIndex,
982 zcu_func: ZcuFunc.Index,
983 };
984
985 pub fn unpack(r: Resolution, wasm: *const Wasm) Unpacked {
986 return switch (r) {
987 .unresolved => .unresolved,
988 .__wasm_apply_global_tls_relocs => .__wasm_apply_global_tls_relocs,
989 .__wasm_call_ctors => .__wasm_call_ctors,
990 .__wasm_init_memory => .__wasm_init_memory,
991 .__wasm_init_tls => .__wasm_init_tls,
992 _ => {
993 const object_function_index = @intFromEnum(r) - first_object_function;
994
995 const zcu_func_index = if (object_function_index < wasm.object_functions.items.len)
996 return .{ .object_function = @enumFromInt(object_function_index) }
997 else
998 object_function_index - wasm.object_functions.items.len;
999
1000 return .{ .zcu_func = @enumFromInt(zcu_func_index) };
1001 },
1002 };
1003 }
1004
1005 pub fn pack(wasm: *const Wasm, unpacked: Unpacked) Resolution {
1006 return switch (unpacked) {
1007 .unresolved => .unresolved,
1008 .__wasm_apply_global_tls_relocs => .__wasm_apply_global_tls_relocs,
1009 .__wasm_call_ctors => .__wasm_call_ctors,
1010 .__wasm_init_memory => .__wasm_init_memory,
1011 .__wasm_init_tls => .__wasm_init_tls,
1012 .object_function => |i| @enumFromInt(first_object_function + @intFromEnum(i)),
1013 .zcu_func => |i| @enumFromInt(first_object_function + wasm.object_functions.items.len + @intFromEnum(i)),
1014 };
1015 }
1016
1017 pub fn fromIpNav(wasm: *const Wasm, nav_index: InternPool.Nav.Index) Resolution {
1018 const zcu = wasm.base.comp.zcu.?;
1019 const ip = &zcu.intern_pool;
1020 return fromIpIndex(wasm, ip.getNav(nav_index).status.fully_resolved.val);
1021 }
1022
1023 pub fn fromZcuFunc(wasm: *const Wasm, i: ZcuFunc.Index) Resolution {
1024 return pack(wasm, .{ .zcu_func = i });
1025 }
1026
1027 pub fn fromIpIndex(wasm: *const Wasm, ip_index: InternPool.Index) Resolution {
1028 return fromZcuFunc(wasm, @enumFromInt(wasm.zcu_funcs.getIndex(ip_index).?));
1029 }
1030
1031 pub fn fromObjectFunction(wasm: *const Wasm, object_function: ObjectFunctionIndex) Resolution {
1032 return pack(wasm, .{ .object_function = object_function });
1033 }
1034
1035 pub fn isNavOrUnresolved(r: Resolution, wasm: *const Wasm) bool {
1036 return switch (r.unpack(wasm)) {
1037 .unresolved, .zcu_func => true,
1038 else => false,
1039 };
1040 }
1041
1042 pub fn typeIndex(r: Resolution, wasm: *Wasm) FunctionType.Index {
1043 return switch (unpack(r, wasm)) {
1044 .unresolved => unreachable,
1045 .__wasm_apply_global_tls_relocs,
1046 .__wasm_call_ctors,
1047 .__wasm_init_memory,
1048 => getExistingFuncType2(wasm, &.{}, &.{}),
1049 .__wasm_init_tls => getExistingFuncType2(wasm, &.{.i32}, &.{}),
1050 .object_function => |i| i.ptr(wasm).type_index,
1051 .zcu_func => |i| i.typeIndex(wasm),
1052 };
1053 }
1054
1055 pub fn name(r: Resolution, wasm: *const Wasm) ?[]const u8 {
1056 return switch (unpack(r, wasm)) {
1057 .unresolved => unreachable,
1058 .__wasm_apply_global_tls_relocs => @tagName(Unpacked.__wasm_apply_global_tls_relocs),
1059 .__wasm_call_ctors => @tagName(Unpacked.__wasm_call_ctors),
1060 .__wasm_init_memory => @tagName(Unpacked.__wasm_init_memory),
1061 .__wasm_init_tls => @tagName(Unpacked.__wasm_init_tls),
1062 .object_function => |i| i.ptr(wasm).name.slice(wasm),
1063 .zcu_func => |i| i.name(wasm),
1064 };
1065 }
1066 };
1067
1068 /// Index into `object_function_imports`.
1069 pub const Index = enum(u32) {
1070 _,
1071
1072 pub fn key(index: Index, wasm: *const Wasm) *String {
1073 return &wasm.object_function_imports.keys()[@intFromEnum(index)];
1074 }
1075
1076 pub fn value(index: Index, wasm: *const Wasm) *FunctionImport {
1077 return &wasm.object_function_imports.values()[@intFromEnum(index)];
1078 }
1079
1080 pub fn symbolName(index: Index, wasm: *const Wasm) String {
1081 return index.key(wasm).*;
1082 }
1083
1084 pub fn importName(index: Index, wasm: *const Wasm) String {
1085 return index.value(wasm).name;
1086 }
1087
1088 pub fn moduleName(index: Index, wasm: *const Wasm) OptionalString {
1089 return index.value(wasm).module_name;
1090 }
1091
1092 pub fn functionType(index: Index, wasm: *const Wasm) FunctionType.Index {
1093 return value(index, wasm).type;
1094 }
1095 };
1096};
1097
1098pub const ObjectFunction = extern struct {
1099 flags: SymbolFlags,
1100 /// `none` if this function has no symbol describing it.
1101 name: OptionalString,
1102 type_index: FunctionType.Index,
1103 code: Code,
1104 /// The offset within the code section where the data starts.
1105 offset: u32,
1106 /// The object file whose code section contains this function.
1107 object_index: ObjectIndex,
1108
1109 pub const Code = DataPayload;
1110
1111 pub fn relocations(of: *const ObjectFunction, wasm: *const Wasm) ObjectRelocation.IterableSlice {
1112 const code_section_index = of.object_index.ptr(wasm).code_section_index.?;
1113 const relocs = wasm.object_relocations_table.get(code_section_index) orelse return .empty;
1114 return .init(relocs, of.offset, of.code.len, wasm);
1115 }
1116};
1117
1118pub const GlobalImport = extern struct {
1119 flags: SymbolFlags,
1120 module_name: OptionalString,
1121 /// May be different than the key which is a symbol name.
1122 name: String,
1123 source_location: SourceLocation,
1124 resolution: Resolution,
1125
1126 /// Represents a synthetic global, a global from an object, or a global
1127 /// from the Zcu.
1128 pub const Resolution = enum(u32) {
1129 unresolved,
1130 __heap_base,
1131 __heap_end,
1132 __stack_pointer,
1133 __tls_align,
1134 __tls_base,
1135 __tls_size,
1136 // Next, index into `object_globals`.
1137 // Next, index into `navs_obj` or `navs_exe` depending on whether emitting an object.
1138 _,
1139
1140 const first_object_global = @intFromEnum(Resolution.__tls_size) + 1;
1141
1142 pub const Unpacked = union(enum) {
1143 unresolved,
1144 __heap_base,
1145 __heap_end,
1146 __stack_pointer,
1147 __tls_align,
1148 __tls_base,
1149 __tls_size,
1150 object_global: ObjectGlobalIndex,
1151 nav_exe: NavsExeIndex,
1152 nav_obj: NavsObjIndex,
1153 };
1154
1155 pub fn unpack(r: Resolution, wasm: *const Wasm) Unpacked {
1156 return switch (r) {
1157 .unresolved => .unresolved,
1158 .__heap_base => .__heap_base,
1159 .__heap_end => .__heap_end,
1160 .__stack_pointer => .__stack_pointer,
1161 .__tls_align => .__tls_align,
1162 .__tls_base => .__tls_base,
1163 .__tls_size => .__tls_size,
1164 _ => {
1165 const i: u32 = @intFromEnum(r);
1166 const object_global_index = i - first_object_global;
1167 if (object_global_index < wasm.object_globals.items.len)
1168 return .{ .object_global = @enumFromInt(object_global_index) };
1169 const comp = wasm.base.comp;
1170 const is_obj = comp.config.output_mode == .Obj;
1171 const nav_index = object_global_index - wasm.object_globals.items.len;
1172 return if (is_obj) .{
1173 .nav_obj = @enumFromInt(nav_index),
1174 } else .{
1175 .nav_exe = @enumFromInt(nav_index),
1176 };
1177 },
1178 };
1179 }
1180
1181 pub fn pack(wasm: *const Wasm, unpacked: Unpacked) Resolution {
1182 return switch (unpacked) {
1183 .unresolved => .unresolved,
1184 .__heap_base => .__heap_base,
1185 .__heap_end => .__heap_end,
1186 .__stack_pointer => .__stack_pointer,
1187 .__tls_align => .__tls_align,
1188 .__tls_base => .__tls_base,
1189 .__tls_size => .__tls_size,
1190 .object_global => |i| @enumFromInt(first_object_global + @intFromEnum(i)),
1191 .nav_obj => |i| @enumFromInt(first_object_global + wasm.object_globals.items.len + @intFromEnum(i)),
1192 .nav_exe => |i| @enumFromInt(first_object_global + wasm.object_globals.items.len + @intFromEnum(i)),
1193 };
1194 }
1195
1196 pub fn fromIpNav(wasm: *const Wasm, ip_nav: InternPool.Nav.Index) Resolution {
1197 const comp = wasm.base.comp;
1198 const is_obj = comp.config.output_mode == .Obj;
1199 return pack(wasm, if (is_obj) .{
1200 .nav_obj = @enumFromInt(wasm.navs_obj.getIndex(ip_nav).?),
1201 } else .{
1202 .nav_exe = @enumFromInt(wasm.navs_exe.getIndex(ip_nav).?),
1203 });
1204 }
1205
1206 pub fn fromObjectGlobal(wasm: *const Wasm, object_global: ObjectGlobalIndex) Resolution {
1207 return pack(wasm, .{ .object_global = object_global });
1208 }
1209
1210 pub fn name(r: Resolution, wasm: *const Wasm) ?[]const u8 {
1211 return switch (unpack(r, wasm)) {
1212 .unresolved => unreachable,
1213 .__heap_base => @tagName(Unpacked.__heap_base),
1214 .__heap_end => @tagName(Unpacked.__heap_end),
1215 .__stack_pointer => @tagName(Unpacked.__stack_pointer),
1216 .__tls_align => @tagName(Unpacked.__tls_align),
1217 .__tls_base => @tagName(Unpacked.__tls_base),
1218 .__tls_size => @tagName(Unpacked.__tls_size),
1219 .object_global => |i| i.name(wasm).slice(wasm),
1220 .nav_obj => |i| i.name(wasm),
1221 .nav_exe => |i| i.name(wasm),
1222 };
1223 }
1224 };
1225
1226 /// Index into `Wasm.object_global_imports`.
1227 pub const Index = enum(u32) {
1228 _,
1229
1230 pub fn key(index: Index, wasm: *const Wasm) *String {
1231 return &wasm.object_global_imports.keys()[@intFromEnum(index)];
1232 }
1233
1234 pub fn value(index: Index, wasm: *const Wasm) *GlobalImport {
1235 return &wasm.object_global_imports.values()[@intFromEnum(index)];
1236 }
1237
1238 pub fn symbolName(index: Index, wasm: *const Wasm) String {
1239 return index.key(wasm).*;
1240 }
1241
1242 pub fn importName(index: Index, wasm: *const Wasm) String {
1243 return index.value(wasm).name;
1244 }
1245
1246 pub fn moduleName(index: Index, wasm: *const Wasm) OptionalString {
1247 return index.value(wasm).module_name;
1248 }
1249
1250 pub fn globalType(index: Index, wasm: *const Wasm) ObjectGlobal.Type {
1251 return value(index, wasm).type();
1252 }
1253 };
1254
1255 pub fn @"type"(gi: *const GlobalImport) ObjectGlobal.Type {
1256 return gi.flags.global_type.to();
1257 }
1258};
1259
1260pub const ObjectGlobal = extern struct {
1261 /// `none` if this function has no symbol describing it.
1262 name: OptionalString,
1263 flags: SymbolFlags,
1264 expr: Expr,
1265 /// The object file whose global section contains this global.
1266 object_index: ObjectIndex,
1267 offset: u32,
1268 size: u32,
1269
1270 pub fn @"type"(og: *const ObjectGlobal) Type {
1271 return og.flags.global_type.to();
1272 }
1273
1274 pub const Type = struct {
1275 valtype: std.wasm.Valtype,
1276 mutable: bool,
1277 };
1278
1279 pub fn relocations(og: *const ObjectGlobal, wasm: *const Wasm) ObjectRelocation.IterableSlice {
1280 const global_section_index = og.object_index.ptr(wasm).global_section_index.?;
1281 const relocs = wasm.object_relocations_table.get(global_section_index) orelse return .empty;
1282 return .init(relocs, og.offset, og.size, wasm);
1283 }
1284};
1285
1286pub const RefType1 = enum(u1) {
1287 funcref,
1288 externref,
1289
1290 pub fn from(rt: std.wasm.RefType) RefType1 {
1291 return switch (rt) {
1292 .funcref => .funcref,
1293 .externref => .externref,
1294 };
1295 }
1296
1297 pub fn to(rt: RefType1) std.wasm.RefType {
1298 return switch (rt) {
1299 .funcref => .funcref,
1300 .externref => .externref,
1301 };
1302 }
1303};
1304
1305pub const TableImport = extern struct {
1306 flags: SymbolFlags,
1307 module_name: String,
1308 /// May be different than the key which is a symbol name.
1309 name: String,
1310 source_location: SourceLocation,
1311 resolution: Resolution,
1312 limits_min: u32,
1313 limits_max: u32,
1314
1315 /// Represents a synthetic table, or a table from an object.
1316 pub const Resolution = enum(u32) {
1317 unresolved,
1318 __indirect_function_table,
1319 // Next, index into `object_tables`.
1320 _,
1321
1322 const first_object_table = @intFromEnum(Resolution.__indirect_function_table) + 1;
1323
1324 pub const Unpacked = union(enum) {
1325 unresolved,
1326 __indirect_function_table,
1327 object_table: ObjectTableIndex,
1328 };
1329
1330 pub fn unpack(r: Resolution) Unpacked {
1331 return switch (r) {
1332 .unresolved => .unresolved,
1333 .__indirect_function_table => .__indirect_function_table,
1334 _ => .{ .object_table = @enumFromInt(@intFromEnum(r) - first_object_table) },
1335 };
1336 }
1337
1338 fn pack(unpacked: Unpacked) Resolution {
1339 return switch (unpacked) {
1340 .unresolved => .unresolved,
1341 .__indirect_function_table => .__indirect_function_table,
1342 .object_table => |i| @enumFromInt(first_object_table + @intFromEnum(i)),
1343 };
1344 }
1345
1346 fn fromObjectTable(object_table: ObjectTableIndex) Resolution {
1347 return pack(.{ .object_table = object_table });
1348 }
1349
1350 pub fn refType(r: Resolution, wasm: *const Wasm) std.wasm.RefType {
1351 return switch (unpack(r)) {
1352 .unresolved => unreachable,
1353 .__indirect_function_table => .funcref,
1354 .object_table => |i| i.ptr(wasm).flags.ref_type.to(),
1355 };
1356 }
1357
1358 pub fn limits(r: Resolution, wasm: *const Wasm) std.wasm.Limits {
1359 return switch (unpack(r)) {
1360 .unresolved => unreachable,
1361 .__indirect_function_table => .{
1362 .flags = .{ .has_max = true, .is_shared = false },
1363 .min = @intCast(wasm.flush_buffer.indirect_function_table.entries.len + 1),
1364 .max = @intCast(wasm.flush_buffer.indirect_function_table.entries.len + 1),
1365 },
1366 .object_table => |i| i.ptr(wasm).limits(),
1367 };
1368 }
1369 };
1370
1371 /// Index into `object_table_imports`.
1372 pub const Index = enum(u32) {
1373 _,
1374
1375 pub fn key(index: Index, wasm: *const Wasm) *String {
1376 return &wasm.object_table_imports.keys()[@intFromEnum(index)];
1377 }
1378
1379 pub fn value(index: Index, wasm: *const Wasm) *TableImport {
1380 return &wasm.object_table_imports.values()[@intFromEnum(index)];
1381 }
1382
1383 pub fn name(index: Index, wasm: *const Wasm) String {
1384 return index.key(wasm).*;
1385 }
1386
1387 pub fn moduleName(index: Index, wasm: *const Wasm) OptionalString {
1388 return index.value(wasm).module_name;
1389 }
1390 };
1391
1392 pub fn limits(ti: *const TableImport) std.wasm.Limits {
1393 return .{
1394 .flags = .{
1395 .has_max = ti.flags.limits_has_max,
1396 .is_shared = ti.flags.limits_is_shared,
1397 },
1398 .min = ti.limits_min,
1399 .max = ti.limits_max,
1400 };
1401 }
1402};
1403
1404pub const Table = extern struct {
1405 module_name: OptionalString,
1406 name: OptionalString,
1407 flags: SymbolFlags,
1408 limits_min: u32,
1409 limits_max: u32,
1410
1411 pub fn limits(t: *const Table) std.wasm.Limits {
1412 return .{
1413 .flags = .{
1414 .has_max = t.flags.limits_has_max,
1415 .is_shared = t.flags.limits_is_shared,
1416 },
1417 .min = t.limits_min,
1418 .max = t.limits_max,
1419 };
1420 }
1421};
1422
1423/// Uniquely identifies a section across all objects. By subtracting
1424/// `Object.local_section_index_base` from this one, the Object section index
1425/// is obtained.
1426pub const ObjectSectionIndex = enum(u32) {
1427 _,
1428};
1429
1430/// Index into `object_tables`.
1431pub const ObjectTableIndex = enum(u32) {
1432 _,
1433
1434 pub fn ptr(index: ObjectTableIndex, wasm: *const Wasm) *Table {
1435 return &wasm.object_tables.items[@intFromEnum(index)];
1436 }
1437
1438 pub fn chaseWeak(i: ObjectTableIndex, wasm: *const Wasm) ObjectTableIndex {
1439 const table = ptr(i, wasm);
1440 if (table.flags.binding != .weak) return i;
1441 const name = table.name.unwrap().?;
1442 const import = wasm.object_table_imports.getPtr(name).?;
1443 assert(import.resolution != .unresolved); // otherwise it should resolve to this one.
1444 return import.resolution.unpack().object_table;
1445 }
1446};
1447
1448/// Index into `Wasm.object_globals`.
1449pub const ObjectGlobalIndex = enum(u32) {
1450 _,
1451
1452 pub fn ptr(index: ObjectGlobalIndex, wasm: *const Wasm) *ObjectGlobal {
1453 return &wasm.object_globals.items[@intFromEnum(index)];
1454 }
1455
1456 pub fn name(index: ObjectGlobalIndex, wasm: *const Wasm) OptionalString {
1457 return index.ptr(wasm).name;
1458 }
1459
1460 pub fn chaseWeak(i: ObjectGlobalIndex, wasm: *const Wasm) ObjectGlobalIndex {
1461 const global = ptr(i, wasm);
1462 if (global.flags.binding != .weak) return i;
1463 const import_name = global.name.unwrap().?;
1464 const import = wasm.object_global_imports.getPtr(import_name).?;
1465 assert(import.resolution != .unresolved); // otherwise it should resolve to this one.
1466 return import.resolution.unpack(wasm).object_global;
1467 }
1468};
1469
1470pub const ObjectMemory = extern struct {
1471 flags: SymbolFlags,
1472 name: OptionalString,
1473 limits_min: u32,
1474 limits_max: u32,
1475
1476 /// Index into `Wasm.object_memories`.
1477 pub const Index = enum(u32) {
1478 _,
1479
1480 pub fn ptr(index: Index, wasm: *const Wasm) *ObjectMemory {
1481 return &wasm.object_memories.items[@intFromEnum(index)];
1482 }
1483 };
1484
1485 pub fn limits(om: *const ObjectMemory) std.wasm.Limits {
1486 return .{
1487 .flags = .{
1488 .has_max = om.limits_has_max,
1489 .is_shared = om.limits_is_shared,
1490 },
1491 .min = om.limits_min,
1492 .max = om.limits_max,
1493 };
1494 }
1495};
1496
1497/// Index into `Wasm.object_functions`.
1498pub const ObjectFunctionIndex = enum(u32) {
1499 _,
1500
1501 pub fn ptr(index: ObjectFunctionIndex, wasm: *const Wasm) *ObjectFunction {
1502 return &wasm.object_functions.items[@intFromEnum(index)];
1503 }
1504
1505 pub fn toOptional(i: ObjectFunctionIndex) OptionalObjectFunctionIndex {
1506 const result: OptionalObjectFunctionIndex = @enumFromInt(@intFromEnum(i));
1507 assert(result != .none);
1508 return result;
1509 }
1510
1511 pub fn chaseWeak(i: ObjectFunctionIndex, wasm: *const Wasm) ObjectFunctionIndex {
1512 const func = ptr(i, wasm);
1513 if (func.flags.binding != .weak) return i;
1514 const name = func.name.unwrap().?;
1515 const import = wasm.object_function_imports.getPtr(name).?;
1516 assert(import.resolution != .unresolved); // otherwise it should resolve to this one.
1517 return import.resolution.unpack(wasm).object_function;
1518 }
1519};
1520
1521/// Index into `object_functions`, or null.
1522pub const OptionalObjectFunctionIndex = enum(u32) {
1523 none = std.math.maxInt(u32),
1524 _,
1525
1526 pub fn unwrap(i: OptionalObjectFunctionIndex) ?ObjectFunctionIndex {
1527 if (i == .none) return null;
1528 return @enumFromInt(@intFromEnum(i));
1529 }
1530};
1531
1532pub const ObjectDataSegment = extern struct {
1533 /// `none` if segment info custom subsection is missing.
1534 name: OptionalString,
1535 flags: Flags,
1536 payload: DataPayload,
1537 offset: u32,
1538 object_index: ObjectIndex,
1539
1540 pub const Flags = packed struct(u32) {
1541 alive: bool = false,
1542 is_passive: bool = false,
1543 alignment: Alignment = .none,
1544 /// Signals that the segment contains only null terminated strings allowing
1545 /// the linker to perform merging.
1546 strings: bool = false,
1547 /// The segment contains thread-local data. This means that a unique copy
1548 /// of this segment will be created for each thread.
1549 tls: bool = false,
1550 /// If the object file is included in the final link, the segment should be
1551 /// retained in the final output regardless of whether it is used by the
1552 /// program.
1553 retain: bool = false,
1554
1555 _: u21 = 0,
1556 };
1557
1558 /// Index into `Wasm.object_data_segments`.
1559 pub const Index = enum(u32) {
1560 _,
1561
1562 pub fn ptr(i: Index, wasm: *const Wasm) *ObjectDataSegment {
1563 return &wasm.object_data_segments.items[@intFromEnum(i)];
1564 }
1565 };
1566
1567 pub fn relocations(ods: *const ObjectDataSegment, wasm: *const Wasm) ObjectRelocation.IterableSlice {
1568 const data_section_index = ods.object_index.ptr(wasm).data_section_index.?;
1569 const relocs = wasm.object_relocations_table.get(data_section_index) orelse return .empty;
1570 return .init(relocs, ods.offset, ods.payload.len, wasm);
1571 }
1572};
1573
1574/// A local or exported global const from an object file.
1575pub const ObjectData = extern struct {
1576 segment: ObjectDataSegment.Index,
1577 /// Index into the object segment payload. Must be <= the segment's size.
1578 offset: u32,
1579 /// May be zero. `offset + size` must be <= the segment's size.
1580 size: u32,
1581 name: String,
1582 flags: SymbolFlags,
1583
1584 /// Index into `Wasm.object_datas`.
1585 pub const Index = enum(u32) {
1586 _,
1587
1588 pub fn ptr(i: Index, wasm: *const Wasm) *ObjectData {
1589 return &wasm.object_datas.items[@intFromEnum(i)];
1590 }
1591 };
1592};
1593
1594pub const ObjectDataImport = extern struct {
1595 resolution: Resolution,
1596 flags: SymbolFlags,
1597 source_location: SourceLocation,
1598
1599 pub const Resolution = enum(u32) {
1600 unresolved,
1601 __zig_error_names,
1602 __zig_error_name_table,
1603 __heap_base,
1604 __heap_end,
1605 /// Next, an `ObjectData.Index`.
1606 /// Next, index into `uavs_obj` or `uavs_exe` depending on whether emitting an object.
1607 /// Next, index into `navs_obj` or `navs_exe` depending on whether emitting an object.
1608 _,
1609
1610 const first_object = @intFromEnum(Resolution.__heap_end) + 1;
1611
1612 pub const Unpacked = union(enum) {
1613 unresolved,
1614 __zig_error_names,
1615 __zig_error_name_table,
1616 __heap_base,
1617 __heap_end,
1618 object: ObjectData.Index,
1619 uav_exe: UavsExeIndex,
1620 uav_obj: UavsObjIndex,
1621 nav_exe: NavsExeIndex,
1622 nav_obj: NavsObjIndex,
1623 };
1624
1625 pub fn unpack(r: Resolution, wasm: *const Wasm) Unpacked {
1626 return switch (r) {
1627 .unresolved => .unresolved,
1628 .__zig_error_names => .__zig_error_names,
1629 .__zig_error_name_table => .__zig_error_name_table,
1630 .__heap_base => .__heap_base,
1631 .__heap_end => .__heap_end,
1632 _ => {
1633 const object_index = @intFromEnum(r) - first_object;
1634
1635 const uav_index = if (object_index < wasm.object_datas.items.len)
1636 return .{ .object = @enumFromInt(object_index) }
1637 else
1638 object_index - wasm.object_datas.items.len;
1639
1640 const comp = wasm.base.comp;
1641 const is_obj = comp.config.output_mode == .Obj;
1642 if (is_obj) {
1643 const nav_index = if (uav_index < wasm.uavs_obj.entries.len)
1644 return .{ .uav_obj = @enumFromInt(uav_index) }
1645 else
1646 uav_index - wasm.uavs_obj.entries.len;
1647
1648 return .{ .nav_obj = @enumFromInt(nav_index) };
1649 } else {
1650 const nav_index = if (uav_index < wasm.uavs_exe.entries.len)
1651 return .{ .uav_exe = @enumFromInt(uav_index) }
1652 else
1653 uav_index - wasm.uavs_exe.entries.len;
1654
1655 return .{ .nav_exe = @enumFromInt(nav_index) };
1656 }
1657 },
1658 };
1659 }
1660
1661 pub fn pack(wasm: *const Wasm, unpacked: Unpacked) Resolution {
1662 return switch (unpacked) {
1663 .unresolved => .unresolved,
1664 .__zig_error_names => .__zig_error_names,
1665 .__zig_error_name_table => .__zig_error_name_table,
1666 .__heap_base => .__heap_base,
1667 .__heap_end => .__heap_end,
1668 .object => |i| @enumFromInt(first_object + @intFromEnum(i)),
1669 inline .uav_exe, .uav_obj => |i| @enumFromInt(first_object + wasm.object_datas.items.len + @intFromEnum(i)),
1670 .nav_exe => |i| @enumFromInt(first_object + wasm.object_datas.items.len + wasm.uavs_exe.entries.len + @intFromEnum(i)),
1671 .nav_obj => |i| @enumFromInt(first_object + wasm.object_datas.items.len + wasm.uavs_obj.entries.len + @intFromEnum(i)),
1672 };
1673 }
1674
1675 pub fn fromObjectDataIndex(wasm: *const Wasm, object_data_index: ObjectData.Index) Resolution {
1676 return pack(wasm, .{ .object = object_data_index });
1677 }
1678
1679 pub fn objectDataSegment(r: Resolution, wasm: *const Wasm) ?ObjectDataSegment.Index {
1680 return switch (unpack(r, wasm)) {
1681 .unresolved => unreachable,
1682 .object => |i| i.ptr(wasm).segment,
1683 .__zig_error_names,
1684 .__zig_error_name_table,
1685 .__heap_base,
1686 .__heap_end,
1687 .uav_exe,
1688 .uav_obj,
1689 .nav_exe,
1690 .nav_obj,
1691 => null,
1692 };
1693 }
1694
1695 pub fn dataLoc(r: Resolution, wasm: *const Wasm) DataLoc {
1696 return switch (unpack(r, wasm)) {
1697 .unresolved => unreachable,
1698 .object => |i| {
1699 const ptr = i.ptr(wasm);
1700 return .{
1701 .segment = .fromObjectDataSegment(wasm, ptr.segment),
1702 .offset = ptr.offset,
1703 };
1704 },
1705 .__zig_error_names => .{ .segment = .__zig_error_names, .offset = 0 },
1706 .__zig_error_name_table => .{ .segment = .__zig_error_name_table, .offset = 0 },
1707 .__heap_base => .{ .segment = .__heap_base, .offset = 0 },
1708 .__heap_end => .{ .segment = .__heap_end, .offset = 0 },
1709 .uav_exe => @panic("TODO"),
1710 .uav_obj => @panic("TODO"),
1711 .nav_exe => @panic("TODO"),
1712 .nav_obj => @panic("TODO"),
1713 };
1714 }
1715 };
1716
1717 /// Points into `Wasm.object_data_imports`.
1718 pub const Index = enum(u32) {
1719 _,
1720
1721 pub fn value(i: @This(), wasm: *const Wasm) *ObjectDataImport {
1722 return &wasm.object_data_imports.values()[@intFromEnum(i)];
1723 }
1724
1725 pub fn fromSymbolName(wasm: *const Wasm, name: String) ?Index {
1726 return @enumFromInt(wasm.object_data_imports.getIndex(name) orelse return null);
1727 }
1728 };
1729};
1730
1731pub const DataPayload = extern struct {
1732 off: Off,
1733 /// The size in bytes of the data representing the segment within the section.
1734 len: u32,
1735
1736 pub const Off = enum(u32) {
1737 /// The payload is all zeroes (bss section).
1738 none = std.math.maxInt(u32),
1739 /// Points into string_bytes. No corresponding string_table entry.
1740 _,
1741
1742 pub fn unwrap(off: Off) ?u32 {
1743 return if (off == .none) null else @intFromEnum(off);
1744 }
1745 };
1746
1747 pub fn slice(p: DataPayload, wasm: *const Wasm) []const u8 {
1748 return wasm.string_bytes.items[p.off.unwrap().?..][0..p.len];
1749 }
1750};
1751
1752/// A reference to a local or exported global const.
1753pub const DataSegmentId = enum(u32) {
1754 __zig_error_names,
1755 __zig_error_name_table,
1756 /// All name string bytes for all `@tagName` implementations, concatenated together.
1757 __zig_tag_names,
1758 /// All tag name slices for all `@tagName` implementations, concatenated together.
1759 __zig_tag_name_table,
1760 /// This and `__heap_end` are better retrieved via a global, but there is
1761 /// some suboptimal code out there (wasi libc) that additionally needs them
1762 /// as data symbols.
1763 __heap_base,
1764 __heap_end,
1765 /// First, an `ObjectDataSegment.Index`.
1766 /// Next, index into `uavs_obj` or `uavs_exe` depending on whether emitting an object.
1767 /// Next, index into `navs_obj` or `navs_exe` depending on whether emitting an object.
1768 _,
1769
1770 const first_object = @intFromEnum(DataSegmentId.__heap_end) + 1;
1771
1772 pub const Category = enum {
1773 /// Thread-local variables.
1774 tls,
1775 /// Data that is not zero initialized and not threadlocal.
1776 data,
1777 /// Zero-initialized. Does not require corresponding bytes in the
1778 /// output file.
1779 zero,
1780 };
1781
1782 pub const Unpacked = union(enum) {
1783 __zig_error_names,
1784 __zig_error_name_table,
1785 __zig_tag_names,
1786 __zig_tag_name_table,
1787 __heap_base,
1788 __heap_end,
1789 object: ObjectDataSegment.Index,
1790 uav_exe: UavsExeIndex,
1791 uav_obj: UavsObjIndex,
1792 nav_exe: NavsExeIndex,
1793 nav_obj: NavsObjIndex,
1794 };
1795
1796 pub fn pack(wasm: *const Wasm, unpacked: Unpacked) DataSegmentId {
1797 return switch (unpacked) {
1798 .__zig_error_names => .__zig_error_names,
1799 .__zig_error_name_table => .__zig_error_name_table,
1800 .__zig_tag_names => .__zig_tag_names,
1801 .__zig_tag_name_table => .__zig_tag_name_table,
1802 .__heap_base => .__heap_base,
1803 .__heap_end => .__heap_end,
1804 .object => |i| @enumFromInt(first_object + @intFromEnum(i)),
1805 inline .uav_exe, .uav_obj => |i| @enumFromInt(first_object + wasm.object_data_segments.items.len + @intFromEnum(i)),
1806 .nav_exe => |i| @enumFromInt(first_object + wasm.object_data_segments.items.len + wasm.uavs_exe.entries.len + @intFromEnum(i)),
1807 .nav_obj => |i| @enumFromInt(first_object + wasm.object_data_segments.items.len + wasm.uavs_obj.entries.len + @intFromEnum(i)),
1808 };
1809 }
1810
1811 pub fn unpack(id: DataSegmentId, wasm: *const Wasm) Unpacked {
1812 return switch (id) {
1813 .__zig_error_names => .__zig_error_names,
1814 .__zig_error_name_table => .__zig_error_name_table,
1815 .__zig_tag_names => .__zig_tag_names,
1816 .__zig_tag_name_table => .__zig_tag_name_table,
1817 .__heap_base => .__heap_base,
1818 .__heap_end => .__heap_end,
1819 _ => {
1820 const object_index = @intFromEnum(id) - first_object;
1821
1822 const uav_index = if (object_index < wasm.object_data_segments.items.len)
1823 return .{ .object = @enumFromInt(object_index) }
1824 else
1825 object_index - wasm.object_data_segments.items.len;
1826
1827 const comp = wasm.base.comp;
1828 const is_obj = comp.config.output_mode == .Obj;
1829 if (is_obj) {
1830 const nav_index = if (uav_index < wasm.uavs_obj.entries.len)
1831 return .{ .uav_obj = @enumFromInt(uav_index) }
1832 else
1833 uav_index - wasm.uavs_obj.entries.len;
1834
1835 return .{ .nav_obj = @enumFromInt(nav_index) };
1836 } else {
1837 const nav_index = if (uav_index < wasm.uavs_exe.entries.len)
1838 return .{ .uav_exe = @enumFromInt(uav_index) }
1839 else
1840 uav_index - wasm.uavs_exe.entries.len;
1841
1842 return .{ .nav_exe = @enumFromInt(nav_index) };
1843 }
1844 },
1845 };
1846 }
1847
1848 pub fn fromNav(wasm: *const Wasm, nav_index: InternPool.Nav.Index) DataSegmentId {
1849 const comp = wasm.base.comp;
1850 const is_obj = comp.config.output_mode == .Obj;
1851 return pack(wasm, if (is_obj) .{
1852 .nav_obj = @enumFromInt(wasm.navs_obj.getIndex(nav_index).?),
1853 } else .{
1854 .nav_exe = @enumFromInt(wasm.navs_exe.getIndex(nav_index).?),
1855 });
1856 }
1857
1858 pub fn fromObjectDataSegment(wasm: *const Wasm, object_data_segment: ObjectDataSegment.Index) DataSegmentId {
1859 return pack(wasm, .{ .object = object_data_segment });
1860 }
1861
1862 pub fn category(id: DataSegmentId, wasm: *const Wasm) Category {
1863 return switch (unpack(id, wasm)) {
1864 .__zig_error_names,
1865 .__zig_error_name_table,
1866 .__zig_tag_names,
1867 .__zig_tag_name_table,
1868 .__heap_base,
1869 .__heap_end,
1870 => .data,
1871
1872 .object => |i| {
1873 const ptr = i.ptr(wasm);
1874 if (ptr.flags.tls) return .tls;
1875 if (wasm.isBss(ptr.name)) return .zero;
1876 return .data;
1877 },
1878 inline .uav_exe, .uav_obj => |i| if (i.value(wasm).code.off == .none) .zero else .data,
1879 inline .nav_exe, .nav_obj => |i| {
1880 const zcu = wasm.base.comp.zcu.?;
1881 const ip = &zcu.intern_pool;
1882 const nav = ip.getNav(i.key(wasm).*);
1883 if (nav.isThreadlocal(ip)) return .tls;
1884 const code = i.value(wasm).code;
1885 return if (code.off == .none) .zero else .data;
1886 },
1887 };
1888 }
1889
1890 pub fn isTls(id: DataSegmentId, wasm: *const Wasm) bool {
1891 return switch (unpack(id, wasm)) {
1892 .__zig_error_names,
1893 .__zig_error_name_table,
1894 .__zig_tag_names,
1895 .__zig_tag_name_table,
1896 .__heap_base,
1897 .__heap_end,
1898 => false,
1899
1900 .object => |i| i.ptr(wasm).flags.tls,
1901 .uav_exe, .uav_obj => false,
1902 inline .nav_exe, .nav_obj => |i| {
1903 const zcu = wasm.base.comp.zcu.?;
1904 const ip = &zcu.intern_pool;
1905 const nav = ip.getNav(i.key(wasm).*);
1906 return nav.isThreadlocal(ip);
1907 },
1908 };
1909 }
1910
1911 pub fn isBss(id: DataSegmentId, wasm: *const Wasm) bool {
1912 return id.category(wasm) == .zero;
1913 }
1914
1915 pub fn name(id: DataSegmentId, wasm: *const Wasm) []const u8 {
1916 return switch (unpack(id, wasm)) {
1917 .__zig_error_names,
1918 .__zig_error_name_table,
1919 .__zig_tag_names,
1920 .__zig_tag_name_table,
1921 .uav_exe,
1922 .uav_obj,
1923 .__heap_base,
1924 .__heap_end,
1925 => ".data",
1926
1927 .object => |i| i.ptr(wasm).name.unwrap().?.slice(wasm),
1928 inline .nav_exe, .nav_obj => |i| {
1929 const zcu = wasm.base.comp.zcu.?;
1930 const ip = &zcu.intern_pool;
1931 const nav = ip.getNav(i.key(wasm).*);
1932 return nav.getLinkSection().toSlice(ip) orelse switch (category(id, wasm)) {
1933 .tls => ".tdata",
1934 .data => ".data",
1935 .zero => ".bss",
1936 };
1937 },
1938 };
1939 }
1940
1941 pub fn alignment(id: DataSegmentId, wasm: *const Wasm) Alignment {
1942 return switch (unpack(id, wasm)) {
1943 .__zig_error_names, .__zig_tag_names => .@"1",
1944 .__zig_error_name_table, .__zig_tag_name_table, .__heap_base, .__heap_end => wasm.pointerAlignment(),
1945 .object => |i| i.ptr(wasm).flags.alignment,
1946 inline .uav_exe, .uav_obj => |i| {
1947 const zcu = wasm.base.comp.zcu.?;
1948 const ip = &zcu.intern_pool;
1949 const ip_index = i.key(wasm).*;
1950 if (wasm.overaligned_uavs.get(ip_index)) |a| return a;
1951 const ty: Zcu.Type = .fromInterned(ip.typeOf(ip_index));
1952 const result = ty.abiAlignment(zcu);
1953 assert(result != .none);
1954 return result;
1955 },
1956 inline .nav_exe, .nav_obj => |i| {
1957 const zcu = wasm.base.comp.zcu.?;
1958 const ip = &zcu.intern_pool;
1959 const nav = ip.getNav(i.key(wasm).*);
1960 const explicit = nav.getAlignment();
1961 if (explicit != .none) return explicit;
1962 const ty: Zcu.Type = .fromInterned(nav.typeOf(ip));
1963 const result = ty.abiAlignment(zcu);
1964 assert(result != .none);
1965 return result;
1966 },
1967 };
1968 }
1969
1970 pub fn refCount(id: DataSegmentId, wasm: *const Wasm) u32 {
1971 return switch (unpack(id, wasm)) {
1972 .__zig_error_names => @intCast(wasm.error_name_offs.items.len),
1973 .__zig_error_name_table => wasm.error_name_table_ref_count,
1974 .__zig_tag_names => @intCast(wasm.tag_name_offs.items.len),
1975 .__zig_tag_name_table => wasm.tag_name_table_ref_count,
1976 .object, .uav_obj, .nav_obj, .__heap_base, .__heap_end => 0,
1977 inline .uav_exe, .nav_exe => |i| i.value(wasm).count,
1978 };
1979 }
1980
1981 pub fn isPassive(id: DataSegmentId, wasm: *const Wasm) bool {
1982 const comp = wasm.base.comp;
1983 if (comp.config.import_memory) return true;
1984 return switch (unpack(id, wasm)) {
1985 .__zig_error_names,
1986 .__zig_error_name_table,
1987 .__zig_tag_names,
1988 .__zig_tag_name_table,
1989 .__heap_base,
1990 .__heap_end,
1991 => false,
1992
1993 .object => |i| i.ptr(wasm).flags.is_passive,
1994 .uav_exe, .uav_obj, .nav_exe, .nav_obj => false,
1995 };
1996 }
1997
1998 pub fn isEmpty(id: DataSegmentId, wasm: *const Wasm) bool {
1999 return switch (unpack(id, wasm)) {
2000 .__zig_error_names,
2001 .__zig_error_name_table,
2002 .__zig_tag_names,
2003 .__zig_tag_name_table,
2004 .__heap_base,
2005 .__heap_end,
2006 => false,
2007
2008 .object => |i| i.ptr(wasm).payload.off == .none,
2009 inline .uav_exe, .uav_obj, .nav_exe, .nav_obj => |i| i.value(wasm).code.off == .none,
2010 };
2011 }
2012
2013 pub fn size(id: DataSegmentId, wasm: *const Wasm) u32 {
2014 return switch (unpack(id, wasm)) {
2015 .__zig_error_names => @intCast(wasm.error_name_bytes.items.len),
2016 .__zig_error_name_table => {
2017 const comp = wasm.base.comp;
2018 const zcu = comp.zcu.?;
2019 const errors_len = wasm.error_name_offs.items.len;
2020 const elem_size = Zcu.Type.slice_const_u8_sentinel_0.abiSize(zcu);
2021 return @intCast(errors_len * elem_size);
2022 },
2023 .__zig_tag_names => @intCast(wasm.tag_name_bytes.items.len),
2024 .__zig_tag_name_table => {
2025 const comp = wasm.base.comp;
2026 const zcu = comp.zcu.?;
2027 const table_len = wasm.tag_name_offs.items.len;
2028 const elem_size = Zcu.Type.slice_const_u8_sentinel_0.abiSize(zcu);
2029 return @intCast(table_len * elem_size);
2030 },
2031 .__heap_base, .__heap_end => wasm.pointerSize(),
2032 .object => |i| i.ptr(wasm).payload.len,
2033 inline .uav_exe, .uav_obj, .nav_exe, .nav_obj => |i| i.value(wasm).code.len,
2034 };
2035 }
2036};
2037
2038pub const DataLoc = struct {
2039 segment: Wasm.DataSegmentId,
2040 offset: u32,
2041
2042 pub fn fromObjectDataIndex(wasm: *const Wasm, i: Wasm.ObjectData.Index) DataLoc {
2043 const ptr = i.ptr(wasm);
2044 return .{
2045 .segment = .fromObjectDataSegment(wasm, ptr.segment),
2046 .offset = ptr.offset,
2047 };
2048 }
2049
2050 pub fn fromDataImportId(wasm: *const Wasm, id: Wasm.DataImportId) DataLoc {
2051 return switch (id.unpack(wasm)) {
2052 .object_data_import => |i| .fromObjectDataImportIndex(wasm, i),
2053 .zcu_import => |i| .fromZcuImport(wasm, i),
2054 };
2055 }
2056
2057 pub fn fromObjectDataImportIndex(wasm: *const Wasm, i: Wasm.ObjectDataImport.Index) DataLoc {
2058 return i.value(wasm).resolution.dataLoc(wasm);
2059 }
2060
2061 pub fn fromZcuImport(wasm: *const Wasm, zcu_import: ZcuImportIndex) DataLoc {
2062 const nav_index = zcu_import.ptr(wasm).*;
2063 return .{
2064 .segment = .fromNav(wasm, nav_index),
2065 .offset = 0,
2066 };
2067 }
2068};
2069
2070/// Index into `Wasm.uavs`.
2071pub const UavIndex = enum(u32) {
2072 _,
2073};
2074
2075pub const CustomSegment = extern struct {
2076 payload: Payload,
2077 flags: SymbolFlags,
2078 section_name: String,
2079
2080 pub const Payload = DataPayload;
2081};
2082
2083/// An index into string_bytes where a wasm expression is found.
2084pub const Expr = enum(u32) {
2085 _,
2086
2087 pub const end = @intFromEnum(std.wasm.Opcode.end);
2088
2089 pub fn slice(index: Expr, wasm: *const Wasm) [:end]const u8 {
2090 const start_slice = wasm.string_bytes.items[@intFromEnum(index)..];
2091 const end_pos = Object.exprEndPos(start_slice, 0) catch |err| switch (err) {
2092 error.InvalidInitOpcode => unreachable,
2093 };
2094 return start_slice[0..end_pos :end];
2095 }
2096};
2097
2098pub const FunctionType = extern struct {
2099 params: ValtypeList,
2100 returns: ValtypeList,
2101
2102 /// Index into func_types
2103 pub const Index = enum(u32) {
2104 _,
2105
2106 pub fn ptr(i: Index, wasm: *const Wasm) *FunctionType {
2107 return &wasm.func_types.keys()[@intFromEnum(i)];
2108 }
2109
2110 pub fn fmt(i: Index, wasm: *const Wasm) Formatter {
2111 return i.ptr(wasm).fmt(wasm);
2112 }
2113 };
2114
2115 pub const format = @compileError("can't format without *Wasm reference");
2116
2117 pub fn eql(a: FunctionType, b: FunctionType) bool {
2118 return a.params == b.params and a.returns == b.returns;
2119 }
2120
2121 pub fn fmt(ft: FunctionType, wasm: *const Wasm) Formatter {
2122 return .{ .wasm = wasm, .ft = ft };
2123 }
2124
2125 const Formatter = struct {
2126 wasm: *const Wasm,
2127 ft: FunctionType,
2128
2129 pub fn format(self: Formatter, writer: *std.Io.Writer) std.Io.Writer.Error!void {
2130 const params = self.ft.params.slice(self.wasm);
2131 const returns = self.ft.returns.slice(self.wasm);
2132
2133 try writer.writeByte('(');
2134 for (params, 0..) |param, i| {
2135 try writer.print("{s}", .{@tagName(param)});
2136 if (i + 1 != params.len) {
2137 try writer.writeAll(", ");
2138 }
2139 }
2140 try writer.writeAll(") -> ");
2141 if (returns.len == 0) {
2142 try writer.writeAll("nil");
2143 } else {
2144 for (returns, 0..) |return_ty, i| {
2145 try writer.print("{s}", .{@tagName(return_ty)});
2146 if (i + 1 != returns.len) {
2147 try writer.writeAll(", ");
2148 }
2149 }
2150 }
2151 }
2152 };
2153};
2154
2155/// Represents a function entry, holding the index to its type
2156pub const Func = extern struct {
2157 type_index: FunctionType.Index,
2158};
2159
2160/// Type reflection is used on the field names to autopopulate each field
2161/// during initialization.
2162const PreloadedStrings = struct {
2163 __heap_base: String,
2164 __heap_end: String,
2165 __indirect_function_table: String,
2166 __linear_memory: String,
2167 __stack_pointer: String,
2168 __tls_align: String,
2169 __tls_base: String,
2170 __tls_size: String,
2171 __wasm_apply_global_tls_relocs: String,
2172 __wasm_call_ctors: String,
2173 __wasm_init_memory: String,
2174 __wasm_init_memory_flag: String,
2175 __wasm_init_tls: String,
2176 __zig_error_names: String,
2177 __zig_error_name_table: String,
2178 __zig_errors_len: String,
2179 _initialize: String,
2180 _start: String,
2181 memory: String,
2182};
2183
2184/// Index into string_bytes
2185pub const String = enum(u32) {
2186 _,
2187
2188 const Table = std.HashMapUnmanaged(String, void, TableContext, std.hash_map.default_max_load_percentage);
2189
2190 const TableContext = struct {
2191 bytes: []const u8,
2192
2193 pub fn eql(_: @This(), a: String, b: String) bool {
2194 return a == b;
2195 }
2196
2197 pub fn hash(ctx: @This(), key: String) u64 {
2198 return std.hash_map.hashString(mem.sliceTo(ctx.bytes[@intFromEnum(key)..], 0));
2199 }
2200 };
2201
2202 const TableIndexAdapter = struct {
2203 bytes: []const u8,
2204
2205 pub fn eql(ctx: @This(), a: []const u8, b: String) bool {
2206 return mem.eql(u8, a, mem.sliceTo(ctx.bytes[@intFromEnum(b)..], 0));
2207 }
2208
2209 pub fn hash(_: @This(), adapted_key: []const u8) u64 {
2210 assert(mem.indexOfScalar(u8, adapted_key, 0) == null);
2211 return std.hash_map.hashString(adapted_key);
2212 }
2213 };
2214
2215 pub fn slice(index: String, wasm: *const Wasm) [:0]const u8 {
2216 const start_slice = wasm.string_bytes.items[@intFromEnum(index)..];
2217 return start_slice[0..mem.indexOfScalar(u8, start_slice, 0).? :0];
2218 }
2219
2220 pub fn toOptional(i: String) OptionalString {
2221 const result: OptionalString = @enumFromInt(@intFromEnum(i));
2222 assert(result != .none);
2223 return result;
2224 }
2225};
2226
2227pub const OptionalString = enum(u32) {
2228 none = std.math.maxInt(u32),
2229 _,
2230
2231 pub fn unwrap(i: OptionalString) ?String {
2232 if (i == .none) return null;
2233 return @enumFromInt(@intFromEnum(i));
2234 }
2235
2236 pub fn slice(index: OptionalString, wasm: *const Wasm) ?[:0]const u8 {
2237 return (index.unwrap() orelse return null).slice(wasm);
2238 }
2239};
2240
2241/// Stored identically to `String`. The bytes are reinterpreted as
2242/// `std.wasm.Valtype` elements.
2243pub const ValtypeList = enum(u32) {
2244 _,
2245
2246 pub fn fromString(s: String) ValtypeList {
2247 return @enumFromInt(@intFromEnum(s));
2248 }
2249
2250 pub fn slice(index: ValtypeList, wasm: *const Wasm) []const std.wasm.Valtype {
2251 return @ptrCast(String.slice(@enumFromInt(@intFromEnum(index)), wasm));
2252 }
2253};
2254
2255/// Index into `Wasm.imports`.
2256pub const ZcuImportIndex = enum(u32) {
2257 _,
2258
2259 pub fn ptr(index: ZcuImportIndex, wasm: *const Wasm) *InternPool.Nav.Index {
2260 return &wasm.imports.keys()[@intFromEnum(index)];
2261 }
2262
2263 pub fn importName(index: ZcuImportIndex, wasm: *const Wasm) String {
2264 const zcu = wasm.base.comp.zcu.?;
2265 const ip = &zcu.intern_pool;
2266 const nav_index = index.ptr(wasm).*;
2267 const ext = ip.getNav(nav_index).getResolvedExtern(ip).?;
2268 const name_slice = ext.name.toSlice(ip);
2269 return wasm.getExistingString(name_slice).?;
2270 }
2271
2272 pub fn moduleName(index: ZcuImportIndex, wasm: *const Wasm) OptionalString {
2273 const zcu = wasm.base.comp.zcu.?;
2274 const ip = &zcu.intern_pool;
2275 const nav_index = index.ptr(wasm).*;
2276 const ext = ip.getNav(nav_index).getResolvedExtern(ip).?;
2277 const lib_name = ext.lib_name.toSlice(ip) orelse return .none;
2278 return wasm.getExistingString(lib_name).?.toOptional();
2279 }
2280
2281 pub fn functionType(index: ZcuImportIndex, wasm: *Wasm) FunctionType.Index {
2282 const comp = wasm.base.comp;
2283 const target = &comp.root_mod.resolved_target.result;
2284 const zcu = comp.zcu.?;
2285 const ip = &zcu.intern_pool;
2286 const nav_index = index.ptr(wasm).*;
2287 const ext = ip.getNav(nav_index).getResolvedExtern(ip).?;
2288 const fn_info = zcu.typeToFunc(.fromInterned(ext.ty)).?;
2289 return getExistingFunctionType(wasm, fn_info.cc, fn_info.param_types.get(ip), .fromInterned(fn_info.return_type), target).?;
2290 }
2291
2292 pub fn globalType(index: ZcuImportIndex, wasm: *const Wasm) ObjectGlobal.Type {
2293 _ = index;
2294 _ = wasm;
2295 unreachable; // Zig has no way to create Wasm globals yet.
2296 }
2297};
2298
2299/// 0. Index into `Wasm.object_function_imports`.
2300/// 1. Index into `Wasm.imports`.
2301pub const FunctionImportId = enum(u32) {
2302 _,
2303
2304 pub const Unpacked = union(enum) {
2305 object_function_import: FunctionImport.Index,
2306 zcu_import: ZcuImportIndex,
2307 };
2308
2309 pub fn pack(unpacked: Unpacked, wasm: *const Wasm) FunctionImportId {
2310 return switch (unpacked) {
2311 .object_function_import => |i| @enumFromInt(@intFromEnum(i)),
2312 .zcu_import => |i| @enumFromInt(@intFromEnum(i) + wasm.object_function_imports.entries.len),
2313 };
2314 }
2315
2316 pub fn unpack(id: FunctionImportId, wasm: *const Wasm) Unpacked {
2317 const i = @intFromEnum(id);
2318 if (i < wasm.object_function_imports.entries.len) return .{ .object_function_import = @enumFromInt(i) };
2319 const zcu_import_i = i - wasm.object_function_imports.entries.len;
2320 return .{ .zcu_import = @enumFromInt(zcu_import_i) };
2321 }
2322
2323 pub fn fromObject(function_import_index: FunctionImport.Index, wasm: *const Wasm) FunctionImportId {
2324 return pack(.{ .object_function_import = function_import_index }, wasm);
2325 }
2326
2327 pub fn fromZcuImport(zcu_import: ZcuImportIndex, wasm: *const Wasm) FunctionImportId {
2328 return pack(.{ .zcu_import = zcu_import }, wasm);
2329 }
2330
2331 /// This function is allowed O(N) lookup because it is only called during
2332 /// diagnostic generation.
2333 pub fn sourceLocation(id: FunctionImportId, wasm: *const Wasm) SourceLocation {
2334 switch (id.unpack(wasm)) {
2335 .object_function_import => |obj_func_index| {
2336 // TODO binary search
2337 for (wasm.objects.items, 0..) |o, i| {
2338 if (o.function_imports.off <= @intFromEnum(obj_func_index) and
2339 o.function_imports.off + o.function_imports.len > @intFromEnum(obj_func_index))
2340 {
2341 return .pack(.{ .object_index = @enumFromInt(i) }, wasm);
2342 }
2343 } else unreachable;
2344 },
2345 .zcu_import => return .zig_object_nofile, // TODO give a better source location
2346 }
2347 }
2348
2349 pub fn importName(id: FunctionImportId, wasm: *const Wasm) String {
2350 return switch (unpack(id, wasm)) {
2351 inline .object_function_import, .zcu_import => |i| i.importName(wasm),
2352 };
2353 }
2354
2355 pub fn moduleName(id: FunctionImportId, wasm: *const Wasm) OptionalString {
2356 return switch (unpack(id, wasm)) {
2357 inline .object_function_import, .zcu_import => |i| i.moduleName(wasm),
2358 };
2359 }
2360
2361 pub fn functionType(id: FunctionImportId, wasm: *Wasm) FunctionType.Index {
2362 return switch (unpack(id, wasm)) {
2363 inline .object_function_import, .zcu_import => |i| i.functionType(wasm),
2364 };
2365 }
2366
2367 /// Asserts not emitting an object, and `Wasm.import_symbols` is false.
2368 pub fn undefinedAllowed(id: FunctionImportId, wasm: *const Wasm) bool {
2369 assert(!wasm.import_symbols);
2370 assert(wasm.base.comp.config.output_mode != .Obj);
2371 return switch (unpack(id, wasm)) {
2372 .object_function_import => |i| {
2373 const import = i.value(wasm);
2374 return import.flags.binding == .strong and import.module_name != .none;
2375 },
2376 .zcu_import => |i| {
2377 const zcu = wasm.base.comp.zcu.?;
2378 const ip = &zcu.intern_pool;
2379 const ext = ip.getNav(i.ptr(wasm).*).getResolvedExtern(ip).?;
2380 return ext.linkage != .weak and ext.lib_name != .none;
2381 },
2382 };
2383 }
2384};
2385
2386/// 0. Index into `object_global_imports`.
2387/// 1. Index into `imports`.
2388pub const GlobalImportId = enum(u32) {
2389 _,
2390
2391 pub const Unpacked = union(enum) {
2392 object_global_import: GlobalImport.Index,
2393 zcu_import: ZcuImportIndex,
2394 };
2395
2396 pub fn pack(unpacked: Unpacked, wasm: *const Wasm) GlobalImportId {
2397 return switch (unpacked) {
2398 .object_global_import => |i| @enumFromInt(@intFromEnum(i)),
2399 .zcu_import => |i| @enumFromInt(@intFromEnum(i) + wasm.object_global_imports.entries.len),
2400 };
2401 }
2402
2403 pub fn unpack(id: GlobalImportId, wasm: *const Wasm) Unpacked {
2404 const i = @intFromEnum(id);
2405 if (i < wasm.object_global_imports.entries.len) return .{ .object_global_import = @enumFromInt(i) };
2406 const zcu_import_i = i - wasm.object_global_imports.entries.len;
2407 return .{ .zcu_import = @enumFromInt(zcu_import_i) };
2408 }
2409
2410 pub fn fromObject(object_global_import: GlobalImport.Index, wasm: *const Wasm) GlobalImportId {
2411 return pack(.{ .object_global_import = object_global_import }, wasm);
2412 }
2413
2414 /// This function is allowed O(N) lookup because it is only called during
2415 /// diagnostic generation.
2416 pub fn sourceLocation(id: GlobalImportId, wasm: *const Wasm) SourceLocation {
2417 switch (id.unpack(wasm)) {
2418 .object_global_import => |obj_global_index| {
2419 // TODO binary search
2420 for (wasm.objects.items, 0..) |o, i| {
2421 if (o.global_imports.off <= @intFromEnum(obj_global_index) and
2422 o.global_imports.off + o.global_imports.len > @intFromEnum(obj_global_index))
2423 {
2424 return .pack(.{ .object_index = @enumFromInt(i) }, wasm);
2425 }
2426 } else unreachable;
2427 },
2428 .zcu_import => return .zig_object_nofile, // TODO give a better source location
2429 }
2430 }
2431
2432 pub fn importName(id: GlobalImportId, wasm: *const Wasm) String {
2433 return switch (unpack(id, wasm)) {
2434 inline .object_global_import, .zcu_import => |i| i.importName(wasm),
2435 };
2436 }
2437
2438 pub fn moduleName(id: GlobalImportId, wasm: *const Wasm) OptionalString {
2439 return switch (unpack(id, wasm)) {
2440 inline .object_global_import, .zcu_import => |i| i.moduleName(wasm),
2441 };
2442 }
2443
2444 pub fn globalType(id: GlobalImportId, wasm: *Wasm) ObjectGlobal.Type {
2445 return switch (unpack(id, wasm)) {
2446 inline .object_global_import, .zcu_import => |i| i.globalType(wasm),
2447 };
2448 }
2449};
2450
2451/// 0. Index into `Wasm.object_data_imports`.
2452/// 1. Index into `Wasm.imports`.
2453pub const DataImportId = enum(u32) {
2454 _,
2455
2456 pub const Unpacked = union(enum) {
2457 object_data_import: ObjectDataImport.Index,
2458 zcu_import: ZcuImportIndex,
2459 };
2460
2461 pub fn pack(unpacked: Unpacked, wasm: *const Wasm) DataImportId {
2462 return switch (unpacked) {
2463 .object_data_import => |i| @enumFromInt(@intFromEnum(i)),
2464 .zcu_import => |i| @enumFromInt(@intFromEnum(i) + wasm.object_data_imports.entries.len),
2465 };
2466 }
2467
2468 pub fn unpack(id: DataImportId, wasm: *const Wasm) Unpacked {
2469 const i = @intFromEnum(id);
2470 if (i < wasm.object_data_imports.entries.len) return .{ .object_data_import = @enumFromInt(i) };
2471 const zcu_import_i = i - wasm.object_data_imports.entries.len;
2472 return .{ .zcu_import = @enumFromInt(zcu_import_i) };
2473 }
2474
2475 pub fn fromZcuImport(zcu_import: ZcuImportIndex, wasm: *const Wasm) DataImportId {
2476 return pack(.{ .zcu_import = zcu_import }, wasm);
2477 }
2478
2479 pub fn fromObject(object_data_import: ObjectDataImport.Index, wasm: *const Wasm) DataImportId {
2480 return pack(.{ .object_data_import = object_data_import }, wasm);
2481 }
2482
2483 pub fn sourceLocation(id: DataImportId, wasm: *const Wasm) SourceLocation {
2484 switch (id.unpack(wasm)) {
2485 .object_data_import => |obj_data_index| {
2486 // TODO binary search
2487 for (wasm.objects.items, 0..) |o, i| {
2488 if (o.data_imports.off <= @intFromEnum(obj_data_index) and
2489 o.data_imports.off + o.data_imports.len > @intFromEnum(obj_data_index))
2490 {
2491 return .pack(.{ .object_index = @enumFromInt(i) }, wasm);
2492 }
2493 } else unreachable;
2494 },
2495 .zcu_import => return .zig_object_nofile, // TODO give a better source location
2496 }
2497 }
2498};
2499
2500/// Index into `Wasm.symbol_table`.
2501pub const SymbolTableIndex = enum(u32) {
2502 _,
2503
2504 pub fn key(i: @This(), wasm: *const Wasm) *String {
2505 return &wasm.symbol_table.keys()[@intFromEnum(i)];
2506 }
2507};
2508
2509pub const OutReloc = struct {
2510 tag: Object.RelocationType,
2511 offset: u32,
2512 pointee: Pointee,
2513 addend: i32,
2514
2515 pub const Pointee = union {
2516 symbol_index: SymbolTableIndex,
2517 type_index: FunctionType.Index,
2518 };
2519
2520 pub const Slice = extern struct {
2521 /// Index into `out_relocs`.
2522 off: u32,
2523 len: u32,
2524
2525 pub fn slice(s: Slice, wasm: *const Wasm) []OutReloc {
2526 return wasm.relocations.items[s.off..][0..s.len];
2527 }
2528 };
2529};
2530
2531pub const ObjectRelocation = struct {
2532 tag: Tag,
2533 /// Offset of the value to rewrite relative to the relevant section's contents.
2534 /// When `offset` is zero, its position is immediately after the id and size of the section.
2535 offset: u32,
2536 pointee: Pointee,
2537 /// Populated only for `memory_addr_*`, `function_offset_i32` and `section_offset_i32`.
2538 addend: i32,
2539
2540 pub const Tag = enum(u8) {
2541 // These use `Pointee.function`.
2542 function_index_i32,
2543 function_index_leb,
2544 function_offset_i32,
2545 function_offset_i64,
2546 table_index_i32,
2547 table_index_i64,
2548 table_index_rel_sleb,
2549 table_index_rel_sleb64,
2550 table_index_sleb,
2551 table_index_sleb64,
2552 // These use `Pointee.symbol_name`.
2553 function_import_index_i32,
2554 function_import_index_leb,
2555 function_import_offset_i32,
2556 function_import_offset_i64,
2557 table_import_index_i32,
2558 table_import_index_i64,
2559 table_import_index_rel_sleb,
2560 table_import_index_rel_sleb64,
2561 table_import_index_sleb,
2562 table_import_index_sleb64,
2563 // These use `Pointee.global`.
2564 global_index_i32,
2565 global_index_leb,
2566 // These use `Pointee.symbol_name`.
2567 global_import_index_i32,
2568 global_import_index_leb,
2569 // These use `Pointee.data`.
2570 memory_addr_i32,
2571 memory_addr_i64,
2572 memory_addr_leb,
2573 memory_addr_leb64,
2574 memory_addr_locrel_i32,
2575 memory_addr_rel_sleb,
2576 memory_addr_rel_sleb64,
2577 memory_addr_sleb,
2578 memory_addr_sleb64,
2579 memory_addr_tls_sleb,
2580 memory_addr_tls_sleb64,
2581 // These use `Pointee.symbol_name`.
2582 memory_addr_import_i32,
2583 memory_addr_import_i64,
2584 memory_addr_import_leb,
2585 memory_addr_import_leb64,
2586 memory_addr_import_locrel_i32,
2587 memory_addr_import_rel_sleb,
2588 memory_addr_import_rel_sleb64,
2589 memory_addr_import_sleb,
2590 memory_addr_import_sleb64,
2591 memory_addr_import_tls_sleb,
2592 memory_addr_import_tls_sleb64,
2593 /// Uses `Pointee.section`.
2594 section_offset_i32,
2595 /// Uses `Pointee.table`.
2596 table_number_leb,
2597 /// Uses `Pointee.symbol_name`.
2598 table_import_number_leb,
2599 /// Uses `Pointee.type_index`.
2600 type_index_leb,
2601
2602 pub fn fromType(t: Object.RelocationType) Tag {
2603 return switch (t) {
2604 .event_index_leb => unreachable,
2605 .function_index_i32 => .function_index_i32,
2606 .function_index_leb => .function_index_leb,
2607 .function_offset_i32 => .function_offset_i32,
2608 .function_offset_i64 => .function_offset_i64,
2609 .global_index_i32 => .global_index_i32,
2610 .global_index_leb => .global_index_leb,
2611 .memory_addr_i32 => .memory_addr_i32,
2612 .memory_addr_i64 => .memory_addr_i64,
2613 .memory_addr_leb => .memory_addr_leb,
2614 .memory_addr_leb64 => .memory_addr_leb64,
2615 .memory_addr_locrel_i32 => .memory_addr_locrel_i32,
2616 .memory_addr_rel_sleb => .memory_addr_rel_sleb,
2617 .memory_addr_rel_sleb64 => .memory_addr_rel_sleb64,
2618 .memory_addr_sleb => .memory_addr_sleb,
2619 .memory_addr_sleb64 => .memory_addr_sleb64,
2620 .memory_addr_tls_sleb => .memory_addr_tls_sleb,
2621 .memory_addr_tls_sleb64 => .memory_addr_tls_sleb64,
2622 .section_offset_i32 => .section_offset_i32,
2623 .table_index_i32 => .table_index_i32,
2624 .table_index_i64 => .table_index_i64,
2625 .table_index_rel_sleb => .table_index_rel_sleb,
2626 .table_index_rel_sleb64 => .table_index_rel_sleb64,
2627 .table_index_sleb => .table_index_sleb,
2628 .table_index_sleb64 => .table_index_sleb64,
2629 .table_number_leb => .table_number_leb,
2630 .type_index_leb => .type_index_leb,
2631 };
2632 }
2633
2634 pub fn fromTypeImport(t: Object.RelocationType) Tag {
2635 return switch (t) {
2636 .event_index_leb => unreachable,
2637 .function_index_i32 => .function_import_index_i32,
2638 .function_index_leb => .function_import_index_leb,
2639 .function_offset_i32 => .function_import_offset_i32,
2640 .function_offset_i64 => .function_import_offset_i64,
2641 .global_index_i32 => .global_import_index_i32,
2642 .global_index_leb => .global_import_index_leb,
2643 .memory_addr_i32 => .memory_addr_import_i32,
2644 .memory_addr_i64 => .memory_addr_import_i64,
2645 .memory_addr_leb => .memory_addr_import_leb,
2646 .memory_addr_leb64 => .memory_addr_import_leb64,
2647 .memory_addr_locrel_i32 => .memory_addr_import_locrel_i32,
2648 .memory_addr_rel_sleb => .memory_addr_import_rel_sleb,
2649 .memory_addr_rel_sleb64 => .memory_addr_import_rel_sleb64,
2650 .memory_addr_sleb => .memory_addr_import_sleb,
2651 .memory_addr_sleb64 => .memory_addr_import_sleb64,
2652 .memory_addr_tls_sleb => .memory_addr_import_tls_sleb,
2653 .memory_addr_tls_sleb64 => .memory_addr_import_tls_sleb64,
2654 .section_offset_i32 => unreachable,
2655 .table_index_i32 => .table_import_index_i32,
2656 .table_index_i64 => .table_import_index_i64,
2657 .table_index_rel_sleb => .table_import_index_rel_sleb,
2658 .table_index_rel_sleb64 => .table_import_index_rel_sleb64,
2659 .table_index_sleb => .table_import_index_sleb,
2660 .table_index_sleb64 => .table_import_index_sleb64,
2661 .table_number_leb => .table_import_number_leb,
2662 .type_index_leb => unreachable,
2663 };
2664 }
2665 };
2666
2667 pub const Pointee = union {
2668 symbol_name: String,
2669 data: ObjectData.Index,
2670 type_index: FunctionType.Index,
2671 section: ObjectSectionIndex,
2672 function: ObjectFunctionIndex,
2673 global: ObjectGlobalIndex,
2674 table: ObjectTableIndex,
2675 };
2676
2677 pub const Slice = extern struct {
2678 /// Index into `relocations`.
2679 off: u32,
2680 len: u32,
2681
2682 const empty: Slice = .{ .off = 0, .len = 0 };
2683
2684 pub fn tags(s: Slice, wasm: *const Wasm) []const ObjectRelocation.Tag {
2685 return wasm.object_relocations.items(.tag)[s.off..][0..s.len];
2686 }
2687
2688 pub fn offsets(s: Slice, wasm: *const Wasm) []const u32 {
2689 return wasm.object_relocations.items(.offset)[s.off..][0..s.len];
2690 }
2691
2692 pub fn pointees(s: Slice, wasm: *const Wasm) []const Pointee {
2693 return wasm.object_relocations.items(.pointee)[s.off..][0..s.len];
2694 }
2695
2696 pub fn addends(s: Slice, wasm: *const Wasm) []const i32 {
2697 return wasm.object_relocations.items(.addend)[s.off..][0..s.len];
2698 }
2699 };
2700
2701 pub const IterableSlice = struct {
2702 slice: Slice,
2703 /// Offset at which point to stop iterating.
2704 end: u32,
2705
2706 const empty: IterableSlice = .{ .slice = .empty, .end = 0 };
2707
2708 fn init(relocs: Slice, offset: u32, size: u32, wasm: *const Wasm) IterableSlice {
2709 const offsets = relocs.offsets(wasm);
2710 const start = std.sort.lowerBound(u32, offsets, offset, order);
2711 return .{
2712 .slice = .{
2713 .off = @intCast(relocs.off + start),
2714 .len = @intCast(relocs.len - start),
2715 },
2716 .end = offset + size,
2717 };
2718 }
2719
2720 fn order(lhs: u32, rhs: u32) std.math.Order {
2721 return std.math.order(lhs, rhs);
2722 }
2723 };
2724};
2725
2726pub const MemoryImport = extern struct {
2727 module_name: String,
2728 limits_min: u32,
2729 limits_max: u32,
2730 source_location: SourceLocation,
2731 limits_has_max: bool,
2732 limits_is_shared: bool,
2733 padding: [2]u8 = .{ 0, 0 },
2734
2735 pub fn limits(mi: *const MemoryImport) std.wasm.Limits {
2736 return .{
2737 .flags = .{
2738 .has_max = mi.limits_has_max,
2739 .is_shared = mi.limits_is_shared,
2740 },
2741 .min = mi.limits_min,
2742 .max = mi.limits_max,
2743 };
2744 }
2745};
2746
2747pub const Alignment = InternPool.Alignment;
2748
2749pub const InitFunc = extern struct {
2750 priority: u32,
2751 function_index: ObjectFunctionIndex,
2752
2753 pub fn lessThan(ctx: void, lhs: InitFunc, rhs: InitFunc) bool {
2754 _ = ctx;
2755 if (lhs.priority == rhs.priority) {
2756 return @intFromEnum(lhs.function_index) < @intFromEnum(rhs.function_index);
2757 } else {
2758 return lhs.priority < rhs.priority;
2759 }
2760 }
2761};
2762
2763pub const Comdat = struct {
2764 name: String,
2765 /// Must be zero, no flags are currently defined by the tool-convention.
2766 flags: u32,
2767 symbols: Comdat.Symbol.Slice,
2768
2769 pub const Symbol = struct {
2770 kind: Comdat.Symbol.Type,
2771 /// Index of the data segment/function/global/event/table within a WASM module.
2772 /// The object must not be an import.
2773 index: u32,
2774
2775 pub const Slice = struct {
2776 /// Index into Wasm object_comdat_symbols
2777 off: u32,
2778 len: u32,
2779 };
2780
2781 pub const Type = enum(u8) {
2782 data = 0,
2783 function = 1,
2784 global = 2,
2785 event = 3,
2786 table = 4,
2787 section = 5,
2788 };
2789 };
2790};
2791
2792/// Stored as a u8 so it can reuse the string table mechanism.
2793pub const Feature = packed struct(u8) {
2794 prefix: Prefix,
2795 /// Type of the feature, must be unique in the sequence of features.
2796 tag: Tag,
2797
2798 pub const sentinel: Feature = @bitCast(@as(u8, 0));
2799
2800 /// Stored identically to `String`. The bytes are reinterpreted as `Feature`
2801 /// elements. Elements must be sorted before string-interning.
2802 pub const Set = enum(u32) {
2803 _,
2804
2805 pub fn fromString(s: String) Set {
2806 return @enumFromInt(@intFromEnum(s));
2807 }
2808
2809 pub fn string(s: Set) String {
2810 return @enumFromInt(@intFromEnum(s));
2811 }
2812
2813 pub fn slice(s: Set, wasm: *const Wasm) [:sentinel]const Feature {
2814 return @ptrCast(string(s).slice(wasm));
2815 }
2816 };
2817
2818 /// Unlike `std.Target.wasm.Feature` this also contains linker-features such as shared-mem.
2819 /// Additionally the name uses convention matching the wasm binary format.
2820 pub const Tag = enum(u6) {
2821 atomics,
2822 @"bulk-memory",
2823 @"bulk-memory-opt",
2824 @"call-indirect-overlong",
2825 @"exception-handling",
2826 @"extended-const",
2827 fp16,
2828 memory64,
2829 multimemory,
2830 multivalue,
2831 @"mutable-globals",
2832 @"nontrapping-bulk-memory-len0",
2833 @"nontrapping-fptoint",
2834 @"reference-types",
2835 @"relaxed-simd",
2836 @"sign-ext",
2837 simd128,
2838 @"tail-call",
2839 @"shared-mem",
2840 @"wide-arithmetic",
2841
2842 pub fn fromCpuFeature(feature: std.Target.wasm.Feature) Tag {
2843 return switch (feature) {
2844 .atomics => .atomics,
2845 .bulk_memory => .@"bulk-memory",
2846 .bulk_memory_opt => .@"bulk-memory-opt",
2847 .call_indirect_overlong => .@"call-indirect-overlong",
2848 .exception_handling => .@"exception-handling",
2849 .extended_const => .@"extended-const",
2850 .fp16 => .fp16,
2851 .multimemory => .multimemory,
2852 .multivalue => .multivalue,
2853 .mutable_globals => .@"mutable-globals",
2854 .nontrapping_bulk_memory_len0 => .@"nontrapping-bulk-memory-len0", // Zig extension.
2855 .nontrapping_fptoint => .@"nontrapping-fptoint",
2856 .reference_types => .@"reference-types",
2857 .relaxed_simd => .@"relaxed-simd",
2858 .sign_ext => .@"sign-ext",
2859 .simd128 => .simd128,
2860 .tail_call => .@"tail-call",
2861 .wide_arithmetic => .@"wide-arithmetic",
2862 };
2863 }
2864
2865 pub fn toCpuFeature(tag: Tag) ?std.Target.wasm.Feature {
2866 return switch (tag) {
2867 .atomics => .atomics,
2868 .@"bulk-memory" => .bulk_memory,
2869 .@"bulk-memory-opt" => .bulk_memory_opt,
2870 .@"call-indirect-overlong" => .call_indirect_overlong,
2871 .@"exception-handling" => .exception_handling,
2872 .@"extended-const" => .extended_const,
2873 .fp16 => .fp16,
2874 .memory64 => null, // Linker-only feature.
2875 .multimemory => .multimemory,
2876 .multivalue => .multivalue,
2877 .@"mutable-globals" => .mutable_globals,
2878 .@"nontrapping-bulk-memory-len0" => .nontrapping_bulk_memory_len0, // Zig extension.
2879 .@"nontrapping-fptoint" => .nontrapping_fptoint,
2880 .@"reference-types" => .reference_types,
2881 .@"relaxed-simd" => .relaxed_simd,
2882 .@"sign-ext" => .sign_ext,
2883 .simd128 => .simd128,
2884 .@"tail-call" => .tail_call,
2885 .@"shared-mem" => null, // Linker-only feature.
2886 .@"wide-arithmetic" => .wide_arithmetic,
2887 };
2888 }
2889
2890 pub const format = @compileError("use @tagName instead");
2891 };
2892
2893 /// Provides information about the usage of the feature.
2894 pub const Prefix = enum(u2) {
2895 /// Reserved so that a 0-byte Feature is invalid and therefore can be a sentinel.
2896 invalid,
2897 /// Object uses this feature, and the link fails if feature is not in
2898 /// the allowed set.
2899 @"+",
2900 /// Object does not use this feature, and the link fails if this
2901 /// feature is in the allowed set.
2902 @"-",
2903 /// Object uses this feature, and the link fails if this feature is not
2904 /// in the allowed set, or if any object does not use this feature.
2905 @"=",
2906 };
2907
2908 pub fn format(feature: Feature, writer: *std.Io.Writer) std.Io.Writer.Error!void {
2909 try writer.print("{s} {s}", .{ @tagName(feature.prefix), @tagName(feature.tag) });
2910 }
2911
2912 pub fn lessThan(_: void, a: Feature, b: Feature) bool {
2913 assert(a != b);
2914 const a_int: u8 = @bitCast(a);
2915 const b_int: u8 = @bitCast(b);
2916 return a_int < b_int;
2917 }
2918};
2919
2920pub fn open(
2921 arena: Allocator,
2922 comp: *Compilation,
2923 emit: Path,
2924 options: link.File.OpenOptions,
2925) !*Wasm {
2926 // TODO: restore saved linker state, don't truncate the file, and
2927 // participate in incremental compilation.
2928 return createEmpty(arena, comp, emit, options);
2929}
2930
2931pub fn createEmpty(
2932 arena: Allocator,
2933 comp: *Compilation,
2934 emit: Path,
2935 options: link.File.OpenOptions,
2936) !*Wasm {
2937 const target = &comp.root_mod.resolved_target.result;
2938 assert(target.ofmt == .wasm);
2939
2940 const use_llvm = comp.config.use_llvm;
2941 const output_mode = comp.config.output_mode;
2942 const wasi_exec_model = comp.config.wasi_exec_model;
2943
2944 const wasm = try arena.create(Wasm);
2945 wasm.* = .{
2946 .base = .{
2947 .tag = .wasm,
2948 .comp = comp,
2949 .emit = emit,
2950 .zcu_object_basename = if (use_llvm)
2951 try std.fmt.allocPrint(arena, "{s}_zcu.o", .{fs.path.stem(emit.sub_path)})
2952 else
2953 null,
2954 // Garbage collection is so crucial to WebAssembly that we design
2955 // the linker around the assumption that it will be on in the vast
2956 // majority of cases, and therefore express "no garbage collection"
2957 // in terms of setting the no_strip and must_link flags on all
2958 // symbols.
2959 .gc_sections = options.gc_sections orelse (output_mode != .Obj),
2960 .print_gc_sections = options.print_gc_sections,
2961 .stack_size = options.stack_size orelse switch (target.os.tag) {
2962 .freestanding => 1 * 1024 * 1024, // 1 MiB
2963 else => 16 * 1024 * 1024, // 16 MiB
2964 },
2965 .allow_shlib_undefined = options.allow_shlib_undefined orelse false,
2966 .file = null,
2967 .build_id = options.build_id,
2968 },
2969 .name = undefined,
2970 .string_table = .empty,
2971 .string_bytes = .empty,
2972 .export_table = options.export_table,
2973 .import_symbols = options.import_symbols,
2974 .export_symbol_names = options.export_symbol_names,
2975 .global_base = options.global_base,
2976 .initial_memory = options.initial_memory,
2977 .max_memory = options.max_memory,
2978
2979 .entry_name = undefined,
2980 .dump_argv_list = .empty,
2981 .object_host_name = .none,
2982 .preloaded_strings = undefined,
2983 };
2984 errdefer wasm.base.destroy();
2985
2986 if (options.object_host_name) |name| wasm.object_host_name = (try wasm.internString(name)).toOptional();
2987
2988 inline for (@typeInfo(PreloadedStrings).@"struct".fields) |field| {
2989 @field(wasm.preloaded_strings, field.name) = try wasm.internString(field.name);
2990 }
2991
2992 wasm.entry_name = switch (options.entry) {
2993 .disabled => .none,
2994 .default => if (output_mode != .Exe) .none else defaultEntrySymbolName(&wasm.preloaded_strings, wasi_exec_model).toOptional(),
2995 .enabled => defaultEntrySymbolName(&wasm.preloaded_strings, wasi_exec_model).toOptional(),
2996 .named => |name| (try wasm.internString(name)).toOptional(),
2997 };
2998
2999 wasm.base.file = try emit.root_dir.handle.createFile(emit.sub_path, .{
3000 .truncate = true,
3001 .read = true,
3002 .mode = if (fs.has_executable_bit)
3003 if (target.os.tag == .wasi and output_mode == .Exe)
3004 fs.File.default_mode | 0b001_000_000
3005 else
3006 fs.File.default_mode
3007 else
3008 0,
3009 });
3010 wasm.name = emit.sub_path;
3011
3012 return wasm;
3013}
3014
3015fn openParseObjectReportingFailure(wasm: *Wasm, path: Path) void {
3016 const diags = &wasm.base.comp.link_diags;
3017 const obj = link.openObject(path, false, false) catch |err| {
3018 switch (diags.failParse(path, "failed to open object: {s}", .{@errorName(err)})) {
3019 error.LinkFailure => return,
3020 }
3021 };
3022 wasm.parseObject(obj) catch |err| {
3023 switch (diags.failParse(path, "failed to parse object: {s}", .{@errorName(err)})) {
3024 error.LinkFailure => return,
3025 }
3026 };
3027}
3028
3029fn parseObject(wasm: *Wasm, obj: link.Input.Object) !void {
3030 log.debug("parseObject {f}", .{obj.path});
3031 const gpa = wasm.base.comp.gpa;
3032 const io = wasm.base.comp.io;
3033 const gc_sections = wasm.base.gc_sections;
3034
3035 defer obj.file.close();
3036
3037 var file_reader = obj.file.reader(io, &.{});
3038
3039 try wasm.objects.ensureUnusedCapacity(gpa, 1);
3040 const size = std.math.cast(usize, try file_reader.getSize()) orelse return error.FileTooBig;
3041
3042 const file_contents = try gpa.alloc(u8, size);
3043 defer gpa.free(file_contents);
3044
3045 const n = file_reader.interface.readSliceShort(file_contents) catch |err| switch (err) {
3046 error.ReadFailed => return file_reader.err.?,
3047 };
3048 if (n != file_contents.len) return error.UnexpectedEndOfFile;
3049
3050 var ss: Object.ScratchSpace = .{};
3051 defer ss.deinit(gpa);
3052
3053 const object = try Object.parse(wasm, file_contents, obj.path, null, wasm.object_host_name, &ss, obj.must_link, gc_sections);
3054 wasm.objects.appendAssumeCapacity(object);
3055}
3056
3057fn parseArchive(wasm: *Wasm, obj: link.Input.Object) !void {
3058 log.debug("parseArchive {f}", .{obj.path});
3059 const gpa = wasm.base.comp.gpa;
3060 const io = wasm.base.comp.io;
3061 const gc_sections = wasm.base.gc_sections;
3062
3063 defer obj.file.close();
3064
3065 var file_reader = obj.file.reader(io, &.{});
3066
3067 const size = std.math.cast(usize, try file_reader.getSize()) orelse return error.FileTooBig;
3068
3069 const file_contents = try gpa.alloc(u8, size);
3070 defer gpa.free(file_contents);
3071
3072 const n = file_reader.interface.readSliceShort(file_contents) catch |err| switch (err) {
3073 error.ReadFailed => return file_reader.err.?,
3074 };
3075 if (n != file_contents.len) return error.UnexpectedEndOfFile;
3076
3077 var archive = try Archive.parse(gpa, file_contents);
3078 defer archive.deinit(gpa);
3079
3080 // In this case we must force link all embedded object files within the archive
3081 // We loop over all symbols, and then group them by offset as the offset
3082 // notates where the object file starts.
3083 var offsets = std.AutoArrayHashMap(u32, void).init(gpa);
3084 defer offsets.deinit();
3085 for (archive.toc.values()) |symbol_offsets| {
3086 for (symbol_offsets.items) |sym_offset| {
3087 try offsets.put(sym_offset, {});
3088 }
3089 }
3090
3091 var ss: Object.ScratchSpace = .{};
3092 defer ss.deinit(gpa);
3093
3094 try wasm.objects.ensureUnusedCapacity(gpa, offsets.count());
3095 for (offsets.keys()) |file_offset| {
3096 const object = try archive.parseObject(wasm, file_contents, file_offset, obj.path, wasm.object_host_name, &ss, obj.must_link, gc_sections);
3097 wasm.objects.appendAssumeCapacity(object);
3098 }
3099}
3100
3101pub fn deinit(wasm: *Wasm) void {
3102 const gpa = wasm.base.comp.gpa;
3103
3104 wasm.navs_exe.deinit(gpa);
3105 wasm.navs_obj.deinit(gpa);
3106 wasm.uavs_exe.deinit(gpa);
3107 wasm.uavs_obj.deinit(gpa);
3108 wasm.overaligned_uavs.deinit(gpa);
3109 wasm.zcu_funcs.deinit(gpa);
3110 wasm.nav_exports.deinit(gpa);
3111 wasm.uav_exports.deinit(gpa);
3112 wasm.imports.deinit(gpa);
3113
3114 wasm.flush_buffer.deinit(gpa);
3115
3116 wasm.mir_instructions.deinit(gpa);
3117 wasm.mir_extra.deinit(gpa);
3118 wasm.mir_locals.deinit(gpa);
3119
3120 if (wasm.dwarf) |*dwarf| dwarf.deinit();
3121
3122 wasm.object_function_imports.deinit(gpa);
3123 wasm.object_functions.deinit(gpa);
3124 wasm.object_global_imports.deinit(gpa);
3125 wasm.object_globals.deinit(gpa);
3126 wasm.object_table_imports.deinit(gpa);
3127 wasm.object_tables.deinit(gpa);
3128 wasm.object_memory_imports.deinit(gpa);
3129 wasm.object_memories.deinit(gpa);
3130 wasm.object_relocations.deinit(gpa);
3131 wasm.object_data_imports.deinit(gpa);
3132 wasm.object_data_segments.deinit(gpa);
3133 wasm.object_datas.deinit(gpa);
3134 wasm.object_custom_segments.deinit(gpa);
3135 wasm.object_init_funcs.deinit(gpa);
3136 wasm.object_comdats.deinit(gpa);
3137 wasm.object_relocations_table.deinit(gpa);
3138 wasm.object_comdat_symbols.deinit(gpa);
3139 wasm.objects.deinit(gpa);
3140
3141 wasm.func_types.deinit(gpa);
3142 wasm.function_exports.deinit(gpa);
3143 wasm.hidden_function_exports.deinit(gpa);
3144 wasm.function_imports.deinit(gpa);
3145 wasm.functions.deinit(gpa);
3146 wasm.globals.deinit(gpa);
3147 wasm.global_exports.deinit(gpa);
3148 wasm.global_imports.deinit(gpa);
3149 wasm.table_imports.deinit(gpa);
3150 wasm.tables.deinit(gpa);
3151 wasm.data_imports.deinit(gpa);
3152 wasm.data_segments.deinit(gpa);
3153 wasm.symbol_table.deinit(gpa);
3154 wasm.out_relocs.deinit(gpa);
3155 wasm.uav_fixups.deinit(gpa);
3156 wasm.nav_fixups.deinit(gpa);
3157 wasm.func_table_fixups.deinit(gpa);
3158
3159 wasm.zcu_indirect_function_set.deinit(gpa);
3160 wasm.object_indirect_function_import_set.deinit(gpa);
3161 wasm.object_indirect_function_set.deinit(gpa);
3162
3163 wasm.string_bytes.deinit(gpa);
3164 wasm.string_table.deinit(gpa);
3165 wasm.dump_argv_list.deinit(gpa);
3166
3167 wasm.params_scratch.deinit(gpa);
3168 wasm.returns_scratch.deinit(gpa);
3169
3170 wasm.error_name_bytes.deinit(gpa);
3171 wasm.error_name_offs.deinit(gpa);
3172 wasm.tag_name_bytes.deinit(gpa);
3173 wasm.tag_name_offs.deinit(gpa);
3174
3175 wasm.missing_exports.deinit(gpa);
3176}
3177
3178pub fn updateFunc(
3179 wasm: *Wasm,
3180 pt: Zcu.PerThread,
3181 func_index: InternPool.Index,
3182 any_mir: *const codegen.AnyMir,
3183) !void {
3184 if (build_options.skip_non_native and builtin.object_format != .wasm) {
3185 @panic("Attempted to compile for object format that was disabled by build configuration");
3186 }
3187
3188 dev.check(.wasm_backend);
3189
3190 // This linker implementation only works with codegen backend `.stage2_wasm`.
3191 const mir = &any_mir.wasm;
3192 const zcu = pt.zcu;
3193 const gpa = zcu.gpa;
3194 const ip = &zcu.intern_pool;
3195 const is_obj = zcu.comp.config.output_mode == .Obj;
3196 const target = &zcu.comp.root_mod.resolved_target.result;
3197 const owner_nav = zcu.funcInfo(func_index).owner_nav;
3198 log.debug("updateFunc {f}", .{ip.getNav(owner_nav).fqn.fmt(ip)});
3199
3200 // For Wasm, we do not lower the MIR to code just yet. That lowering happens during `flush`,
3201 // after garbage collection, which can affect function and global indexes, which affects the
3202 // LEB integer encoding, which affects the output binary size.
3203
3204 // However, we do move the MIR into a more efficient in-memory representation, where the arrays
3205 // for all functions are packed together rather than keeping them each in their own `Mir`.
3206 const mir_instructions_off: u32 = @intCast(wasm.mir_instructions.len);
3207 const mir_extra_off: u32 = @intCast(wasm.mir_extra.items.len);
3208 const mir_locals_off: u32 = @intCast(wasm.mir_locals.items.len);
3209 {
3210 // Copying MultiArrayList data is a little non-trivial. Resize, then memcpy both slices.
3211 const old_len = wasm.mir_instructions.len;
3212 try wasm.mir_instructions.resize(gpa, old_len + mir.instructions.len);
3213 const dest_slice = wasm.mir_instructions.slice().subslice(old_len, mir.instructions.len);
3214 const src_slice = mir.instructions;
3215 @memcpy(dest_slice.items(.tag), src_slice.items(.tag));
3216 @memcpy(dest_slice.items(.data), src_slice.items(.data));
3217 }
3218 try wasm.mir_extra.appendSlice(gpa, mir.extra);
3219 try wasm.mir_locals.appendSlice(gpa, mir.locals);
3220
3221 // We also need to populate some global state from `mir`.
3222 try wasm.zcu_indirect_function_set.ensureUnusedCapacity(gpa, mir.indirect_function_set.count());
3223 for (mir.indirect_function_set.keys()) |nav| wasm.zcu_indirect_function_set.putAssumeCapacity(nav, {});
3224 for (mir.func_tys.keys()) |func_ty| {
3225 const fn_info = zcu.typeToFunc(.fromInterned(func_ty)).?;
3226 _ = try wasm.internFunctionType(fn_info.cc, fn_info.param_types.get(ip), .fromInterned(fn_info.return_type), target);
3227 }
3228 wasm.error_name_table_ref_count += mir.error_name_table_ref_count;
3229 // We need to populate UAV data. In theory, we can lower the UAV values while we fill `mir.uavs`.
3230 // However, lowering the data might cause *more* UAVs to be created, and mixing them up would be
3231 // a headache. So instead, just write `undefined` placeholder code and use the `ZcuDataStarts`.
3232 const zds: ZcuDataStarts = .init(wasm);
3233 for (mir.uavs.keys(), mir.uavs.values()) |uav_val, uav_align| {
3234 if (uav_align != .none) {
3235 const gop = try wasm.overaligned_uavs.getOrPut(gpa, uav_val);
3236 gop.value_ptr.* = if (gop.found_existing) gop.value_ptr.maxStrict(uav_align) else uav_align;
3237 }
3238 if (is_obj) {
3239 const gop = try wasm.uavs_obj.getOrPut(gpa, uav_val);
3240 if (!gop.found_existing) gop.value_ptr.* = undefined; // `zds` handles lowering
3241 } else {
3242 const gop = try wasm.uavs_exe.getOrPut(gpa, uav_val);
3243 if (!gop.found_existing) gop.value_ptr.* = .{
3244 .code = undefined, // `zds` handles lowering
3245 .count = 0,
3246 };
3247 gop.value_ptr.count += 1;
3248 }
3249 }
3250 try zds.finish(wasm, pt); // actually generates the UAVs
3251
3252 try wasm.functions.ensureUnusedCapacity(gpa, 1);
3253 try wasm.zcu_funcs.ensureUnusedCapacity(gpa, 1);
3254
3255 // This converts AIR to MIR but does not yet lower to wasm code.
3256 wasm.zcu_funcs.putAssumeCapacity(func_index, .{ .function = .{
3257 .instructions_off = mir_instructions_off,
3258 .instructions_len = @intCast(mir.instructions.len),
3259 .extra_off = mir_extra_off,
3260 .extra_len = @intCast(mir.extra.len),
3261 .locals_off = mir_locals_off,
3262 .locals_len = @intCast(mir.locals.len),
3263 .prologue = mir.prologue,
3264 } });
3265 wasm.functions.putAssumeCapacity(.pack(wasm, .{ .zcu_func = @enumFromInt(wasm.zcu_funcs.entries.len - 1) }), {});
3266}
3267
3268// Generate code for the "Nav", storing it in memory to be later written to
3269// the file on flush().
3270pub fn updateNav(wasm: *Wasm, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index) !void {
3271 if (build_options.skip_non_native and builtin.object_format != .wasm) {
3272 @panic("Attempted to compile for object format that was disabled by build configuration");
3273 }
3274 const zcu = pt.zcu;
3275 const ip = &zcu.intern_pool;
3276 const nav = ip.getNav(nav_index);
3277 const comp = wasm.base.comp;
3278 const gpa = comp.gpa;
3279 const is_obj = comp.config.output_mode == .Obj;
3280 const target = &comp.root_mod.resolved_target.result;
3281
3282 const nav_init, const chased_nav_index = switch (ip.indexToKey(nav.status.fully_resolved.val)) {
3283 .func => return, // global const which is a function alias
3284 .@"extern" => |ext| {
3285 if (is_obj) {
3286 assert(!wasm.navs_obj.contains(ext.owner_nav));
3287 } else {
3288 assert(!wasm.navs_exe.contains(ext.owner_nav));
3289 }
3290 const name = try wasm.internString(ext.name.toSlice(ip));
3291 if (ext.lib_name.toSlice(ip)) |ext_name| _ = try wasm.internString(ext_name);
3292 try wasm.imports.ensureUnusedCapacity(gpa, 1);
3293 try wasm.function_imports.ensureUnusedCapacity(gpa, 1);
3294 try wasm.data_imports.ensureUnusedCapacity(gpa, 1);
3295 const zcu_import = wasm.addZcuImportReserved(ext.owner_nav);
3296 if (ip.isFunctionType(nav.typeOf(ip))) {
3297 wasm.function_imports.putAssumeCapacity(name, .fromZcuImport(zcu_import, wasm));
3298 // Ensure there is a corresponding function type table entry.
3299 const fn_info = zcu.typeToFunc(.fromInterned(ext.ty)).?;
3300 _ = try internFunctionType(wasm, fn_info.cc, fn_info.param_types.get(ip), .fromInterned(fn_info.return_type), target);
3301 } else {
3302 wasm.data_imports.putAssumeCapacity(name, .fromZcuImport(zcu_import, wasm));
3303 }
3304 return;
3305 },
3306 .variable => |variable| .{ variable.init, variable.owner_nav },
3307 else => .{ nav.status.fully_resolved.val, nav_index },
3308 };
3309 //log.debug("updateNav {f} {d}", .{ nav.fqn.fmt(ip), chased_nav_index });
3310 assert(!wasm.imports.contains(chased_nav_index));
3311
3312 if (nav_init != .none and !Value.fromInterned(nav_init).typeOf(zcu).hasRuntimeBits(zcu)) {
3313 if (is_obj) {
3314 assert(!wasm.navs_obj.contains(chased_nav_index));
3315 } else {
3316 assert(!wasm.navs_exe.contains(chased_nav_index));
3317 }
3318 return;
3319 }
3320
3321 if (is_obj) {
3322 const zcu_data_starts: ZcuDataStarts = .initObj(wasm);
3323 const navs_i = try refNavObj(wasm, chased_nav_index);
3324 const zcu_data = try lowerZcuData(wasm, pt, nav_init);
3325 navs_i.value(wasm).* = zcu_data;
3326 try zcu_data_starts.finishObj(wasm, pt);
3327 } else {
3328 const zcu_data_starts: ZcuDataStarts = .initExe(wasm);
3329 const navs_i = try refNavExe(wasm, chased_nav_index);
3330 const zcu_data = try lowerZcuData(wasm, pt, nav_init);
3331 navs_i.value(wasm).code = zcu_data.code;
3332 try zcu_data_starts.finishExe(wasm, pt);
3333 }
3334}
3335
3336pub fn updateLineNumber(wasm: *Wasm, pt: Zcu.PerThread, ti_id: InternPool.TrackedInst.Index) !void {
3337 const comp = wasm.base.comp;
3338 const diags = &comp.link_diags;
3339 if (wasm.dwarf) |*dw| {
3340 dw.updateLineNumber(pt.zcu, ti_id) catch |err| switch (err) {
3341 error.Overflow => return error.Overflow,
3342 error.OutOfMemory => return error.OutOfMemory,
3343 else => |e| return diags.fail("failed to update dwarf line numbers: {s}", .{@errorName(e)}),
3344 };
3345 }
3346}
3347
3348pub fn deleteExport(
3349 wasm: *Wasm,
3350 exported: Zcu.Exported,
3351 name: InternPool.NullTerminatedString,
3352) void {
3353 const zcu = wasm.base.comp.zcu.?;
3354 const ip = &zcu.intern_pool;
3355 const name_slice = name.toSlice(ip);
3356 const export_name = wasm.getExistingString(name_slice).?;
3357 switch (exported) {
3358 .nav => |nav_index| {
3359 log.debug("deleteExport '{s}' nav={d}", .{ name_slice, @intFromEnum(nav_index) });
3360 assert(wasm.nav_exports.swapRemove(.{ .nav_index = nav_index, .name = export_name }));
3361 },
3362 .uav => |uav_index| assert(wasm.uav_exports.swapRemove(.{ .uav_index = uav_index, .name = export_name })),
3363 }
3364}
3365
3366pub fn updateExports(
3367 wasm: *Wasm,
3368 pt: Zcu.PerThread,
3369 exported: Zcu.Exported,
3370 export_indices: []const Zcu.Export.Index,
3371) !void {
3372 if (build_options.skip_non_native and builtin.object_format != .wasm) {
3373 @panic("Attempted to compile for object format that was disabled by build configuration");
3374 }
3375
3376 const zcu = pt.zcu;
3377 const gpa = zcu.gpa;
3378 const ip = &zcu.intern_pool;
3379 for (export_indices) |export_idx| {
3380 const exp = export_idx.ptr(zcu);
3381 const name_slice = exp.opts.name.toSlice(ip);
3382 const name = try wasm.internString(name_slice);
3383 switch (exported) {
3384 .nav => |nav_index| {
3385 log.debug("updateExports '{s}' nav={d}", .{ name_slice, @intFromEnum(nav_index) });
3386 try wasm.nav_exports.put(gpa, .{ .nav_index = nav_index, .name = name }, export_idx);
3387 },
3388 .uav => |uav_index| try wasm.uav_exports.put(gpa, .{ .uav_index = uav_index, .name = name }, export_idx),
3389 }
3390 }
3391}
3392
3393pub fn loadInput(wasm: *Wasm, input: link.Input) !void {
3394 const comp = wasm.base.comp;
3395 const gpa = comp.gpa;
3396
3397 if (comp.verbose_link) {
3398 comp.mutex.lock(); // protect comp.arena
3399 defer comp.mutex.unlock();
3400
3401 const argv = &wasm.dump_argv_list;
3402 switch (input) {
3403 .res => unreachable,
3404 .dso_exact => unreachable,
3405 .dso => unreachable,
3406 .object, .archive => |obj| {
3407 try argv.append(gpa, try obj.path.toString(comp.arena));
3408 },
3409 }
3410 }
3411
3412 switch (input) {
3413 .res => unreachable,
3414 .dso_exact => unreachable,
3415 .dso => unreachable,
3416 .object => |obj| try parseObject(wasm, obj),
3417 .archive => |obj| try parseArchive(wasm, obj),
3418 }
3419}
3420
3421pub fn prelink(wasm: *Wasm, prog_node: std.Progress.Node) link.File.FlushError!void {
3422 const tracy = trace(@src());
3423 defer tracy.end();
3424
3425 const sub_prog_node = prog_node.start("Wasm Prelink", 0);
3426 defer sub_prog_node.end();
3427
3428 const comp = wasm.base.comp;
3429 const gpa = comp.gpa;
3430 const rdynamic = comp.config.rdynamic;
3431 const is_obj = comp.config.output_mode == .Obj;
3432
3433 assert(wasm.missing_exports.entries.len == 0);
3434 for (wasm.export_symbol_names) |exp_name| {
3435 const exp_name_interned = try wasm.internString(exp_name);
3436 if (wasm.object_function_imports.getPtr(exp_name_interned)) |import| {
3437 if (import.resolution != .unresolved) {
3438 import.flags.exported = true;
3439 continue;
3440 }
3441 }
3442 if (wasm.object_global_imports.getPtr(exp_name_interned)) |import| {
3443 if (import.resolution != .unresolved) {
3444 import.flags.exported = true;
3445 continue;
3446 }
3447 }
3448 if (wasm.object_table_imports.getPtr(exp_name_interned)) |import| {
3449 if (import.resolution != .unresolved) {
3450 import.flags.exported = true;
3451 continue;
3452 }
3453 }
3454 try wasm.missing_exports.put(gpa, exp_name_interned, {});
3455 }
3456
3457 if (wasm.entry_name.unwrap()) |entry_name| {
3458 if (wasm.object_function_imports.getPtr(entry_name)) |import| {
3459 if (import.resolution != .unresolved) {
3460 import.flags.exported = true;
3461 wasm.entry_resolution = import.resolution;
3462 }
3463 }
3464 }
3465
3466 if (comp.zcu != null) {
3467 // Zig always depends on a stack pointer global.
3468 // If emitting an object, it's an import. Otherwise, the linker synthesizes it.
3469 if (is_obj) {
3470 @panic("TODO");
3471 } else {
3472 try wasm.globals.put(gpa, .__stack_pointer, {});
3473 assert(wasm.globals.entries.len - 1 == @intFromEnum(GlobalIndex.stack_pointer));
3474 }
3475 }
3476
3477 // These loops do both recursive marking of alive symbols well as checking for undefined symbols.
3478 // At the end, output functions and globals will be populated.
3479 for (wasm.object_function_imports.keys(), wasm.object_function_imports.values(), 0..) |name, *import, i| {
3480 if (import.flags.isIncluded(rdynamic)) {
3481 try markFunctionImport(wasm, name, import, @enumFromInt(i));
3482 }
3483 }
3484 // Also treat init functions as roots.
3485 for (wasm.object_init_funcs.items) |init_func| {
3486 const func = init_func.function_index.ptr(wasm);
3487 if (func.object_index.ptr(wasm).is_included) {
3488 try markFunction(wasm, init_func.function_index, false);
3489 }
3490 }
3491 wasm.functions_end_prelink = @intCast(wasm.functions.entries.len);
3492
3493 for (wasm.object_global_imports.keys(), wasm.object_global_imports.values(), 0..) |name, *import, i| {
3494 if (import.flags.isIncluded(rdynamic)) {
3495 try markGlobalImport(wasm, name, import, @enumFromInt(i));
3496 }
3497 }
3498 wasm.globals_end_prelink = @intCast(wasm.globals.entries.len);
3499 wasm.global_exports_len = @intCast(wasm.global_exports.items.len);
3500
3501 for (wasm.object_table_imports.keys(), wasm.object_table_imports.values(), 0..) |name, *import, i| {
3502 if (import.flags.isIncluded(rdynamic)) {
3503 try markTableImport(wasm, name, import, @enumFromInt(i));
3504 }
3505 }
3506
3507 for (wasm.object_data_imports.keys(), wasm.object_data_imports.values(), 0..) |name, *import, i| {
3508 if (import.flags.isIncluded(rdynamic)) {
3509 try markDataImport(wasm, name, import, @enumFromInt(i));
3510 }
3511 }
3512
3513 // This is a wild ass guess at how to merge memories, haven't checked yet
3514 // what the proper way to do this is.
3515 for (wasm.object_memory_imports.values()) |*memory_import| {
3516 wasm.memories.limits.min = @min(wasm.memories.limits.min, memory_import.limits_min);
3517 wasm.memories.limits.max = @max(wasm.memories.limits.max, memory_import.limits_max);
3518 wasm.memories.limits.flags.has_max = wasm.memories.limits.flags.has_max or memory_import.limits_has_max;
3519 }
3520
3521 wasm.function_imports_len_prelink = @intCast(wasm.function_imports.entries.len);
3522 wasm.data_imports_len_prelink = @intCast(wasm.data_imports.entries.len);
3523}
3524
3525pub fn markFunctionImport(
3526 wasm: *Wasm,
3527 name: String,
3528 import: *FunctionImport,
3529 func_index: FunctionImport.Index,
3530) link.File.FlushError!void {
3531 if (import.flags.alive) return;
3532 import.flags.alive = true;
3533
3534 const comp = wasm.base.comp;
3535 const gpa = comp.gpa;
3536
3537 try wasm.functions.ensureUnusedCapacity(gpa, 1);
3538
3539 if (import.resolution == .unresolved) {
3540 if (name == wasm.preloaded_strings.__wasm_init_memory) {
3541 try wasm.resolveFunctionSynthetic(import, .__wasm_init_memory, &.{}, &.{});
3542 } else if (name == wasm.preloaded_strings.__wasm_apply_global_tls_relocs) {
3543 try wasm.resolveFunctionSynthetic(import, .__wasm_apply_global_tls_relocs, &.{}, &.{});
3544 } else if (name == wasm.preloaded_strings.__wasm_call_ctors) {
3545 try wasm.resolveFunctionSynthetic(import, .__wasm_call_ctors, &.{}, &.{});
3546 } else if (name == wasm.preloaded_strings.__wasm_init_tls) {
3547 try wasm.resolveFunctionSynthetic(import, .__wasm_init_tls, &.{.i32}, &.{});
3548 } else {
3549 try wasm.function_imports.put(gpa, name, .fromObject(func_index, wasm));
3550 }
3551 } else {
3552 try markFunction(wasm, import.resolution.unpack(wasm).object_function, import.flags.exported);
3553 }
3554}
3555
3556/// Recursively mark alive everything referenced by the function.
3557fn markFunction(wasm: *Wasm, i: ObjectFunctionIndex, override_export: bool) link.File.FlushError!void {
3558 const comp = wasm.base.comp;
3559 const gpa = comp.gpa;
3560 const gop = try wasm.functions.getOrPut(gpa, .fromObjectFunction(wasm, i));
3561 if (gop.found_existing) return;
3562
3563 const rdynamic = comp.config.rdynamic;
3564 const is_obj = comp.config.output_mode == .Obj;
3565 const function = i.ptr(wasm);
3566 markObject(wasm, function.object_index);
3567
3568 if (!is_obj and (override_export or function.flags.isExported(rdynamic))) {
3569 const symbol_name = function.name.unwrap().?;
3570 if (!override_export and function.flags.visibility_hidden) {
3571 try wasm.hidden_function_exports.put(gpa, symbol_name, @enumFromInt(gop.index));
3572 } else {
3573 try wasm.function_exports.put(gpa, symbol_name, @enumFromInt(gop.index));
3574 }
3575 }
3576
3577 try wasm.markRelocations(function.relocations(wasm));
3578}
3579
3580fn markObject(wasm: *Wasm, i: ObjectIndex) void {
3581 i.ptr(wasm).is_included = true;
3582}
3583
3584/// Recursively mark alive everything referenced by the global.
3585fn markGlobalImport(
3586 wasm: *Wasm,
3587 name: String,
3588 import: *GlobalImport,
3589 global_index: GlobalImport.Index,
3590) link.File.FlushError!void {
3591 if (import.flags.alive) return;
3592 import.flags.alive = true;
3593
3594 const comp = wasm.base.comp;
3595 const gpa = comp.gpa;
3596
3597 try wasm.globals.ensureUnusedCapacity(gpa, 1);
3598
3599 if (import.resolution == .unresolved) {
3600 if (name == wasm.preloaded_strings.__heap_base) {
3601 import.resolution = .__heap_base;
3602 wasm.globals.putAssumeCapacity(.__heap_base, {});
3603 } else if (name == wasm.preloaded_strings.__heap_end) {
3604 import.resolution = .__heap_end;
3605 wasm.globals.putAssumeCapacity(.__heap_end, {});
3606 } else if (name == wasm.preloaded_strings.__stack_pointer) {
3607 import.resolution = .__stack_pointer;
3608 wasm.globals.putAssumeCapacity(.__stack_pointer, {});
3609 } else if (name == wasm.preloaded_strings.__tls_align) {
3610 import.resolution = .__tls_align;
3611 wasm.globals.putAssumeCapacity(.__tls_align, {});
3612 } else if (name == wasm.preloaded_strings.__tls_base) {
3613 import.resolution = .__tls_base;
3614 wasm.globals.putAssumeCapacity(.__tls_base, {});
3615 } else if (name == wasm.preloaded_strings.__tls_size) {
3616 import.resolution = .__tls_size;
3617 wasm.globals.putAssumeCapacity(.__tls_size, {});
3618 } else {
3619 try wasm.global_imports.put(gpa, name, .fromObject(global_index, wasm));
3620 }
3621 } else {
3622 try markGlobal(wasm, import.resolution.unpack(wasm).object_global, import.flags.exported);
3623 }
3624}
3625
3626fn markGlobal(wasm: *Wasm, i: ObjectGlobalIndex, override_export: bool) link.File.FlushError!void {
3627 const comp = wasm.base.comp;
3628 const gpa = comp.gpa;
3629 const gop = try wasm.globals.getOrPut(gpa, .fromObjectGlobal(wasm, i));
3630 if (gop.found_existing) return;
3631
3632 const rdynamic = comp.config.rdynamic;
3633 const is_obj = comp.config.output_mode == .Obj;
3634 const global = i.ptr(wasm);
3635
3636 if (!is_obj and (override_export or global.flags.isExported(rdynamic))) try wasm.global_exports.append(gpa, .{
3637 .name = global.name.unwrap().?,
3638 .global_index = @enumFromInt(gop.index),
3639 });
3640
3641 try wasm.markRelocations(global.relocations(wasm));
3642}
3643
3644fn markTableImport(
3645 wasm: *Wasm,
3646 name: String,
3647 import: *TableImport,
3648 table_index: TableImport.Index,
3649) link.File.FlushError!void {
3650 if (import.flags.alive) return;
3651 import.flags.alive = true;
3652
3653 const comp = wasm.base.comp;
3654 const gpa = comp.gpa;
3655
3656 try wasm.tables.ensureUnusedCapacity(gpa, 1);
3657
3658 if (import.resolution == .unresolved) {
3659 if (name == wasm.preloaded_strings.__indirect_function_table) {
3660 import.resolution = .__indirect_function_table;
3661 wasm.tables.putAssumeCapacity(.__indirect_function_table, {});
3662 } else {
3663 try wasm.table_imports.put(gpa, name, table_index);
3664 }
3665 } else {
3666 wasm.tables.putAssumeCapacity(import.resolution, {});
3667 // Tables have no relocations.
3668 }
3669}
3670
3671fn markDataSegment(wasm: *Wasm, segment_index: ObjectDataSegment.Index) link.File.FlushError!void {
3672 const comp = wasm.base.comp;
3673 const segment = segment_index.ptr(wasm);
3674 if (segment.flags.alive) return;
3675 segment.flags.alive = true;
3676
3677 wasm.any_passive_inits = wasm.any_passive_inits or segment.flags.is_passive or
3678 (comp.config.import_memory and !wasm.isBss(segment.name));
3679
3680 try wasm.data_segments.put(comp.gpa, .pack(wasm, .{ .object = segment_index }), {});
3681 try wasm.markRelocations(segment.relocations(wasm));
3682}
3683
3684pub fn markDataImport(
3685 wasm: *Wasm,
3686 name: String,
3687 import: *ObjectDataImport,
3688 data_index: ObjectDataImport.Index,
3689) link.File.FlushError!void {
3690 if (import.flags.alive) return;
3691 import.flags.alive = true;
3692
3693 const comp = wasm.base.comp;
3694 const gpa = comp.gpa;
3695
3696 if (import.resolution == .unresolved) {
3697 if (name == wasm.preloaded_strings.__heap_base) {
3698 import.resolution = .__heap_base;
3699 wasm.data_segments.putAssumeCapacity(.__heap_base, {});
3700 } else if (name == wasm.preloaded_strings.__heap_end) {
3701 import.resolution = .__heap_end;
3702 wasm.data_segments.putAssumeCapacity(.__heap_end, {});
3703 } else {
3704 try wasm.data_imports.put(gpa, name, .fromObject(data_index, wasm));
3705 }
3706 } else if (import.resolution.objectDataSegment(wasm)) |segment_index| {
3707 try markDataSegment(wasm, segment_index);
3708 }
3709}
3710
3711fn markRelocations(wasm: *Wasm, relocs: ObjectRelocation.IterableSlice) link.File.FlushError!void {
3712 const gpa = wasm.base.comp.gpa;
3713 for (relocs.slice.tags(wasm), relocs.slice.pointees(wasm), relocs.slice.offsets(wasm)) |tag, pointee, offset| {
3714 if (offset >= relocs.end) break;
3715 switch (tag) {
3716 .function_import_index_leb,
3717 .function_import_index_i32,
3718 .function_import_offset_i32,
3719 .function_import_offset_i64,
3720 => {
3721 const name = pointee.symbol_name;
3722 const i: FunctionImport.Index = @enumFromInt(wasm.object_function_imports.getIndex(name).?);
3723 try markFunctionImport(wasm, name, i.value(wasm), i);
3724 },
3725 .table_import_index_sleb,
3726 .table_import_index_i32,
3727 .table_import_index_sleb64,
3728 .table_import_index_i64,
3729 .table_import_index_rel_sleb,
3730 .table_import_index_rel_sleb64,
3731 => {
3732 const name = pointee.symbol_name;
3733 try wasm.object_indirect_function_import_set.put(gpa, name, {});
3734 const i: FunctionImport.Index = @enumFromInt(wasm.object_function_imports.getIndex(name).?);
3735 try markFunctionImport(wasm, name, i.value(wasm), i);
3736 },
3737 .global_import_index_leb, .global_import_index_i32 => {
3738 const name = pointee.symbol_name;
3739 const i: GlobalImport.Index = @enumFromInt(wasm.object_global_imports.getIndex(name).?);
3740 try markGlobalImport(wasm, name, i.value(wasm), i);
3741 },
3742 .table_import_number_leb => {
3743 const name = pointee.symbol_name;
3744 const i: TableImport.Index = @enumFromInt(wasm.object_table_imports.getIndex(name).?);
3745 try markTableImport(wasm, name, i.value(wasm), i);
3746 },
3747 .memory_addr_import_leb,
3748 .memory_addr_import_sleb,
3749 .memory_addr_import_i32,
3750 .memory_addr_import_rel_sleb,
3751 .memory_addr_import_leb64,
3752 .memory_addr_import_sleb64,
3753 .memory_addr_import_i64,
3754 .memory_addr_import_rel_sleb64,
3755 .memory_addr_import_tls_sleb,
3756 .memory_addr_import_locrel_i32,
3757 .memory_addr_import_tls_sleb64,
3758 => {
3759 const name = pointee.symbol_name;
3760 const i = ObjectDataImport.Index.fromSymbolName(wasm, name).?;
3761 try markDataImport(wasm, name, i.value(wasm), i);
3762 },
3763
3764 .function_index_leb,
3765 .function_index_i32,
3766 .function_offset_i32,
3767 .function_offset_i64,
3768 => try markFunction(wasm, pointee.function.chaseWeak(wasm), false),
3769 .table_index_sleb,
3770 .table_index_i32,
3771 .table_index_sleb64,
3772 .table_index_i64,
3773 .table_index_rel_sleb,
3774 .table_index_rel_sleb64,
3775 => {
3776 const function = pointee.function;
3777 try wasm.object_indirect_function_set.put(gpa, function, {});
3778 try markFunction(wasm, function.chaseWeak(wasm), false);
3779 },
3780 .global_index_leb,
3781 .global_index_i32,
3782 => try markGlobal(wasm, pointee.global.chaseWeak(wasm), false),
3783 .table_number_leb,
3784 => try markTable(wasm, pointee.table.chaseWeak(wasm)),
3785
3786 .section_offset_i32 => {
3787 log.warn("TODO: ensure section {d} is included in output", .{pointee.section});
3788 },
3789
3790 .memory_addr_leb,
3791 .memory_addr_sleb,
3792 .memory_addr_i32,
3793 .memory_addr_rel_sleb,
3794 .memory_addr_leb64,
3795 .memory_addr_sleb64,
3796 .memory_addr_i64,
3797 .memory_addr_rel_sleb64,
3798 .memory_addr_tls_sleb,
3799 .memory_addr_locrel_i32,
3800 .memory_addr_tls_sleb64,
3801 => try markDataSegment(wasm, pointee.data.ptr(wasm).segment),
3802
3803 .type_index_leb => continue,
3804 }
3805 }
3806}
3807
3808fn markTable(wasm: *Wasm, i: ObjectTableIndex) link.File.FlushError!void {
3809 try wasm.tables.put(wasm.base.comp.gpa, .fromObjectTable(i), {});
3810}
3811
3812pub fn flush(
3813 wasm: *Wasm,
3814 arena: Allocator,
3815 tid: Zcu.PerThread.Id,
3816 prog_node: std.Progress.Node,
3817) link.File.FlushError!void {
3818 // The goal is to never use this because it's only needed if we need to
3819 // write to InternPool, but flush is too late to be writing to the
3820 // InternPool.
3821 _ = tid;
3822 const comp = wasm.base.comp;
3823 const diags = &comp.link_diags;
3824 const gpa = comp.gpa;
3825
3826 if (comp.verbose_link) Compilation.dump_argv(wasm.dump_argv_list.items);
3827
3828 if (wasm.base.zcu_object_basename) |raw| {
3829 const zcu_obj_path: Path = try comp.resolveEmitPathFlush(arena, .temp, raw);
3830 openParseObjectReportingFailure(wasm, zcu_obj_path);
3831 try prelink(wasm, prog_node);
3832 }
3833
3834 const tracy = trace(@src());
3835 defer tracy.end();
3836
3837 const sub_prog_node = prog_node.start("Wasm Flush", 0);
3838 defer sub_prog_node.end();
3839
3840 const functions_end_zcu: u32 = @intCast(wasm.functions.entries.len);
3841 defer wasm.functions.shrinkRetainingCapacity(functions_end_zcu);
3842
3843 const globals_end_zcu: u32 = @intCast(wasm.globals.entries.len);
3844 defer wasm.globals.shrinkRetainingCapacity(globals_end_zcu);
3845
3846 const function_exports_end_zcu: u32 = @intCast(wasm.function_exports.entries.len);
3847 defer wasm.function_exports.shrinkRetainingCapacity(function_exports_end_zcu);
3848
3849 const hidden_function_exports_end_zcu: u32 = @intCast(wasm.hidden_function_exports.entries.len);
3850 defer wasm.hidden_function_exports.shrinkRetainingCapacity(hidden_function_exports_end_zcu);
3851
3852 wasm.flush_buffer.clear();
3853 try wasm.flush_buffer.missing_exports.reinit(gpa, wasm.missing_exports.keys(), &.{});
3854 try wasm.flush_buffer.function_imports.reinit(gpa, wasm.function_imports.keys(), wasm.function_imports.values());
3855 try wasm.flush_buffer.global_imports.reinit(gpa, wasm.global_imports.keys(), wasm.global_imports.values());
3856 try wasm.flush_buffer.data_imports.reinit(gpa, wasm.data_imports.keys(), wasm.data_imports.values());
3857
3858 return wasm.flush_buffer.finish(wasm) catch |err| switch (err) {
3859 error.OutOfMemory => return error.OutOfMemory,
3860 error.LinkFailure => return error.LinkFailure,
3861 else => |e| return diags.fail("failed to flush wasm: {s}", .{@errorName(e)}),
3862 };
3863}
3864
3865fn defaultEntrySymbolName(
3866 preloaded_strings: *const PreloadedStrings,
3867 wasi_exec_model: std.builtin.WasiExecModel,
3868) String {
3869 return switch (wasi_exec_model) {
3870 .reactor => preloaded_strings._initialize,
3871 .command => preloaded_strings._start,
3872 };
3873}
3874
3875pub fn internOptionalString(wasm: *Wasm, optional_bytes: ?[]const u8) Allocator.Error!OptionalString {
3876 const bytes = optional_bytes orelse return .none;
3877 const string = try internString(wasm, bytes);
3878 return string.toOptional();
3879}
3880
3881pub fn internString(wasm: *Wasm, bytes: []const u8) Allocator.Error!String {
3882 assert(mem.indexOfScalar(u8, bytes, 0) == null);
3883 wasm.string_bytes_lock.lock();
3884 defer wasm.string_bytes_lock.unlock();
3885 const gpa = wasm.base.comp.gpa;
3886 const gop = try wasm.string_table.getOrPutContextAdapted(
3887 gpa,
3888 @as([]const u8, bytes),
3889 @as(String.TableIndexAdapter, .{ .bytes = wasm.string_bytes.items }),
3890 @as(String.TableContext, .{ .bytes = wasm.string_bytes.items }),
3891 );
3892 if (gop.found_existing) return gop.key_ptr.*;
3893
3894 try wasm.string_bytes.ensureUnusedCapacity(gpa, bytes.len + 1);
3895 const new_off: String = @enumFromInt(wasm.string_bytes.items.len);
3896
3897 wasm.string_bytes.appendSliceAssumeCapacity(bytes);
3898 wasm.string_bytes.appendAssumeCapacity(0);
3899
3900 gop.key_ptr.* = new_off;
3901
3902 return new_off;
3903}
3904
3905// TODO implement instead by appending to string_bytes
3906pub fn internStringFmt(wasm: *Wasm, comptime format: []const u8, args: anytype) Allocator.Error!String {
3907 var buffer: [32]u8 = undefined;
3908 const slice = std.fmt.bufPrint(&buffer, format, args) catch unreachable;
3909 return internString(wasm, slice);
3910}
3911
3912pub fn getExistingString(wasm: *const Wasm, bytes: []const u8) ?String {
3913 assert(mem.indexOfScalar(u8, bytes, 0) == null);
3914 return wasm.string_table.getKeyAdapted(bytes, @as(String.TableIndexAdapter, .{
3915 .bytes = wasm.string_bytes.items,
3916 }));
3917}
3918
3919pub fn internValtypeList(wasm: *Wasm, valtype_list: []const std.wasm.Valtype) Allocator.Error!ValtypeList {
3920 return .fromString(try internString(wasm, @ptrCast(valtype_list)));
3921}
3922
3923pub fn getExistingValtypeList(wasm: *const Wasm, valtype_list: []const std.wasm.Valtype) ?ValtypeList {
3924 return .fromString(getExistingString(wasm, @ptrCast(valtype_list)) orelse return null);
3925}
3926
3927pub fn addFuncType(wasm: *Wasm, ft: FunctionType) Allocator.Error!FunctionType.Index {
3928 const gpa = wasm.base.comp.gpa;
3929 const gop = try wasm.func_types.getOrPut(gpa, ft);
3930 return @enumFromInt(gop.index);
3931}
3932
3933pub fn getExistingFuncType(wasm: *const Wasm, ft: FunctionType) ?FunctionType.Index {
3934 const index = wasm.func_types.getIndex(ft) orelse return null;
3935 return @enumFromInt(index);
3936}
3937
3938pub fn getExistingFuncType2(wasm: *const Wasm, params: []const std.wasm.Valtype, returns: []const std.wasm.Valtype) FunctionType.Index {
3939 return getExistingFuncType(wasm, .{
3940 .params = getExistingValtypeList(wasm, params).?,
3941 .returns = getExistingValtypeList(wasm, returns).?,
3942 }).?;
3943}
3944
3945pub fn internFunctionType(
3946 wasm: *Wasm,
3947 cc: std.builtin.CallingConvention,
3948 params: []const InternPool.Index,
3949 return_type: Zcu.Type,
3950 target: *const std.Target,
3951) Allocator.Error!FunctionType.Index {
3952 try convertZcuFnType(wasm.base.comp, cc, params, return_type, target, &wasm.params_scratch, &wasm.returns_scratch);
3953 return wasm.addFuncType(.{
3954 .params = try wasm.internValtypeList(wasm.params_scratch.items),
3955 .returns = try wasm.internValtypeList(wasm.returns_scratch.items),
3956 });
3957}
3958
3959pub fn getExistingFunctionType(
3960 wasm: *Wasm,
3961 cc: std.builtin.CallingConvention,
3962 params: []const InternPool.Index,
3963 return_type: Zcu.Type,
3964 target: *const std.Target,
3965) ?FunctionType.Index {
3966 convertZcuFnType(wasm.base.comp, cc, params, return_type, target, &wasm.params_scratch, &wasm.returns_scratch) catch |err| switch (err) {
3967 error.OutOfMemory => return null,
3968 };
3969 return wasm.getExistingFuncType(.{
3970 .params = wasm.getExistingValtypeList(wasm.params_scratch.items) orelse return null,
3971 .returns = wasm.getExistingValtypeList(wasm.returns_scratch.items) orelse return null,
3972 });
3973}
3974
3975pub fn addExpr(wasm: *Wasm, bytes: []const u8) Allocator.Error!Expr {
3976 const gpa = wasm.base.comp.gpa;
3977 // We can't use string table deduplication here since these expressions can
3978 // have null bytes in them however it may be interesting to explore since
3979 // it is likely for globals to share initialization values. Then again
3980 // there may not be very many globals in total.
3981 try wasm.string_bytes.appendSlice(gpa, bytes);
3982 return @enumFromInt(wasm.string_bytes.items.len - bytes.len);
3983}
3984
3985pub fn addRelocatableDataPayload(wasm: *Wasm, bytes: []const u8) Allocator.Error!DataPayload {
3986 const gpa = wasm.base.comp.gpa;
3987 try wasm.string_bytes.appendSlice(gpa, bytes);
3988 return .{
3989 .off = @enumFromInt(wasm.string_bytes.items.len - bytes.len),
3990 .len = @intCast(bytes.len),
3991 };
3992}
3993
3994pub fn uavSymbolIndex(wasm: *Wasm, ip_index: InternPool.Index) Allocator.Error!SymbolTableIndex {
3995 const comp = wasm.base.comp;
3996 assert(comp.config.output_mode == .Obj);
3997 const gpa = comp.gpa;
3998 const name = try wasm.internStringFmt("__anon_{d}", .{@intFromEnum(ip_index)});
3999 const gop = try wasm.symbol_table.getOrPut(gpa, name);
4000 gop.value_ptr.* = {};
4001 return @enumFromInt(gop.index);
4002}
4003
4004pub fn navSymbolIndex(wasm: *Wasm, nav_index: InternPool.Nav.Index) Allocator.Error!SymbolTableIndex {
4005 const comp = wasm.base.comp;
4006 assert(comp.config.output_mode == .Obj);
4007 const zcu = comp.zcu.?;
4008 const ip = &zcu.intern_pool;
4009 const gpa = comp.gpa;
4010 const nav = ip.getNav(nav_index);
4011 const name = try wasm.internString(nav.fqn.toSlice(ip));
4012 const gop = try wasm.symbol_table.getOrPut(gpa, name);
4013 gop.value_ptr.* = {};
4014 return @enumFromInt(gop.index);
4015}
4016
4017pub fn errorNameTableSymbolIndex(wasm: *Wasm) Allocator.Error!SymbolTableIndex {
4018 const comp = wasm.base.comp;
4019 assert(comp.config.output_mode == .Obj);
4020 const gpa = comp.gpa;
4021 const gop = try wasm.symbol_table.getOrPut(gpa, wasm.preloaded_strings.__zig_error_name_table);
4022 gop.value_ptr.* = {};
4023 return @enumFromInt(gop.index);
4024}
4025
4026pub fn stackPointerSymbolIndex(wasm: *Wasm) Allocator.Error!SymbolTableIndex {
4027 const comp = wasm.base.comp;
4028 assert(comp.config.output_mode == .Obj);
4029 const gpa = comp.gpa;
4030 const gop = try wasm.symbol_table.getOrPut(gpa, wasm.preloaded_strings.__stack_pointer);
4031 gop.value_ptr.* = {};
4032 return @enumFromInt(gop.index);
4033}
4034
4035pub fn tagNameSymbolIndex(wasm: *Wasm, ip_index: InternPool.Index) Allocator.Error!SymbolTableIndex {
4036 const comp = wasm.base.comp;
4037 assert(comp.config.output_mode == .Obj);
4038 const gpa = comp.gpa;
4039 const name = try wasm.internStringFmt("__zig_tag_name_{d}", .{@intFromEnum(ip_index)});
4040 const gop = try wasm.symbol_table.getOrPut(gpa, name);
4041 gop.value_ptr.* = {};
4042 return @enumFromInt(gop.index);
4043}
4044
4045pub fn symbolNameIndex(wasm: *Wasm, name: String) Allocator.Error!SymbolTableIndex {
4046 const comp = wasm.base.comp;
4047 assert(comp.config.output_mode == .Obj);
4048 const gpa = comp.gpa;
4049 const gop = try wasm.symbol_table.getOrPut(gpa, name);
4050 gop.value_ptr.* = {};
4051 return @enumFromInt(gop.index);
4052}
4053
4054pub fn addUavReloc(
4055 wasm: *Wasm,
4056 reloc_offset: usize,
4057 uav_val: InternPool.Index,
4058 orig_ptr_ty: InternPool.Index,
4059 addend: u32,
4060) !void {
4061 const comp = wasm.base.comp;
4062 const zcu = comp.zcu.?;
4063 const ip = &zcu.intern_pool;
4064 const gpa = comp.gpa;
4065
4066 @"align": {
4067 const ptr_type = ip.indexToKey(orig_ptr_ty).ptr_type;
4068 const this_align = ptr_type.flags.alignment;
4069 if (this_align == .none) break :@"align";
4070 const abi_align = Zcu.Type.fromInterned(ptr_type.child).abiAlignment(zcu);
4071 if (this_align.compare(.lte, abi_align)) break :@"align";
4072 const gop = try wasm.overaligned_uavs.getOrPut(gpa, uav_val);
4073 gop.value_ptr.* = if (gop.found_existing) gop.value_ptr.maxStrict(this_align) else this_align;
4074 }
4075
4076 if (comp.config.output_mode == .Obj) {
4077 const gop = try wasm.uavs_obj.getOrPut(gpa, uav_val);
4078 if (!gop.found_existing) gop.value_ptr.* = undefined; // to avoid recursion, `ZcuDataStarts` will lower the value later
4079 try wasm.out_relocs.append(gpa, .{
4080 .offset = @intCast(reloc_offset),
4081 .pointee = .{ .symbol_index = try wasm.uavSymbolIndex(uav_val) },
4082 .tag = switch (wasm.pointerSize()) {
4083 32 => .memory_addr_i32,
4084 64 => .memory_addr_i64,
4085 else => unreachable,
4086 },
4087 .addend = @intCast(addend),
4088 });
4089 } else {
4090 const gop = try wasm.uavs_exe.getOrPut(gpa, uav_val);
4091 if (!gop.found_existing) gop.value_ptr.* = .{
4092 .code = undefined, // to avoid recursion, `ZcuDataStarts` will lower the value later
4093 .count = 0,
4094 };
4095 gop.value_ptr.count += 1;
4096 try wasm.uav_fixups.append(gpa, .{
4097 .uavs_exe_index = @enumFromInt(gop.index),
4098 .offset = @intCast(reloc_offset),
4099 .addend = addend,
4100 });
4101 }
4102}
4103
4104pub fn refNavObj(wasm: *Wasm, nav_index: InternPool.Nav.Index) !NavsObjIndex {
4105 const comp = wasm.base.comp;
4106 const gpa = comp.gpa;
4107 assert(comp.config.output_mode != .Obj);
4108 const gop = try wasm.navs_obj.getOrPut(gpa, nav_index);
4109 if (!gop.found_existing) gop.value_ptr.* = .{
4110 // Lowering the value is delayed to avoid recursion.
4111 .code = undefined,
4112 .relocs = undefined,
4113 };
4114 return @enumFromInt(gop.index);
4115}
4116
4117pub fn refNavExe(wasm: *Wasm, nav_index: InternPool.Nav.Index) !NavsExeIndex {
4118 const comp = wasm.base.comp;
4119 const gpa = comp.gpa;
4120 assert(comp.config.output_mode != .Obj);
4121 const gop = try wasm.navs_exe.getOrPut(gpa, nav_index);
4122 if (gop.found_existing) {
4123 gop.value_ptr.count += 1;
4124 } else {
4125 gop.value_ptr.* = .{
4126 // Lowering the value is delayed to avoid recursion.
4127 .code = undefined,
4128 .count = 0,
4129 };
4130 }
4131 return @enumFromInt(gop.index);
4132}
4133
4134/// Asserts it is called after `Flush.data_segments` is fully populated and sorted.
4135pub fn uavAddr(wasm: *Wasm, ip_index: InternPool.Index) u32 {
4136 assert(wasm.flush_buffer.memory_layout_finished);
4137 const comp = wasm.base.comp;
4138 assert(comp.config.output_mode != .Obj);
4139 const uav_index: UavsExeIndex = @enumFromInt(wasm.uavs_exe.getIndex(ip_index).?);
4140 const ds_id: DataSegmentId = .pack(wasm, .{ .uav_exe = uav_index });
4141 return wasm.flush_buffer.data_segments.get(ds_id).?;
4142}
4143
4144/// Asserts it is called after `Flush.data_segments` is fully populated and sorted.
4145pub fn navAddr(wasm: *Wasm, nav_index: InternPool.Nav.Index) u32 {
4146 assert(wasm.flush_buffer.memory_layout_finished);
4147 const comp = wasm.base.comp;
4148 assert(comp.config.output_mode != .Obj);
4149 if (wasm.navs_exe.getIndex(nav_index)) |i| {
4150 const navs_exe_index: NavsExeIndex = @enumFromInt(i);
4151 log.debug("navAddr {s} {}", .{ navs_exe_index.name(wasm), nav_index });
4152 const ds_id: DataSegmentId = .pack(wasm, .{ .nav_exe = navs_exe_index });
4153 return wasm.flush_buffer.data_segments.get(ds_id).?;
4154 }
4155 const zcu = comp.zcu.?;
4156 const ip = &zcu.intern_pool;
4157 const nav = ip.getNav(nav_index);
4158 if (nav.getResolvedExtern(ip)) |ext| {
4159 if (wasm.getExistingString(ext.name.toSlice(ip))) |symbol_name| {
4160 if (wasm.object_data_imports.getPtr(symbol_name)) |import| {
4161 switch (import.resolution.unpack(wasm)) {
4162 .unresolved => unreachable,
4163 .object => |object_data_index| {
4164 const object_data = object_data_index.ptr(wasm);
4165 const ds_id: DataSegmentId = .fromObjectDataSegment(wasm, object_data.segment);
4166 const segment_base_addr = wasm.flush_buffer.data_segments.get(ds_id).?;
4167 return segment_base_addr + object_data.offset;
4168 },
4169 .__zig_error_names => @panic("TODO"),
4170 .__zig_error_name_table => @panic("TODO"),
4171 .__heap_base => @panic("TODO"),
4172 .__heap_end => @panic("TODO"),
4173 .uav_exe => @panic("TODO"),
4174 .uav_obj => @panic("TODO"),
4175 .nav_exe => @panic("TODO"),
4176 .nav_obj => @panic("TODO"),
4177 }
4178 }
4179 }
4180 }
4181 // Otherwise it's a zero bit type; any address will do.
4182 return 0;
4183}
4184
4185/// Asserts it is called after `Flush.data_segments` is fully populated and sorted.
4186pub fn errorNameTableAddr(wasm: *Wasm) u32 {
4187 assert(wasm.flush_buffer.memory_layout_finished);
4188 const comp = wasm.base.comp;
4189 assert(comp.config.output_mode != .Obj);
4190 return wasm.flush_buffer.data_segments.get(.__zig_error_name_table).?;
4191}
4192
4193fn convertZcuFnType(
4194 comp: *Compilation,
4195 cc: std.builtin.CallingConvention,
4196 params: []const InternPool.Index,
4197 return_type: Zcu.Type,
4198 target: *const std.Target,
4199 params_buffer: *std.ArrayList(std.wasm.Valtype),
4200 returns_buffer: *std.ArrayList(std.wasm.Valtype),
4201) Allocator.Error!void {
4202 params_buffer.clearRetainingCapacity();
4203 returns_buffer.clearRetainingCapacity();
4204
4205 const gpa = comp.gpa;
4206 const zcu = comp.zcu.?;
4207
4208 if (CodeGen.firstParamSRet(cc, return_type, zcu, target)) {
4209 try params_buffer.append(gpa, .i32); // memory address is always a 32-bit handle
4210 } else if (return_type.hasRuntimeBitsIgnoreComptime(zcu)) {
4211 if (cc == .wasm_mvp) {
4212 switch (abi.classifyType(return_type, zcu)) {
4213 .direct => |scalar_ty| {
4214 assert(!abi.lowerAsDoubleI64(scalar_ty, zcu));
4215 try returns_buffer.append(gpa, CodeGen.typeToValtype(scalar_ty, zcu, target));
4216 },
4217 .indirect => unreachable,
4218 }
4219 } else {
4220 try returns_buffer.append(gpa, CodeGen.typeToValtype(return_type, zcu, target));
4221 }
4222 } else if (return_type.isError(zcu)) {
4223 try returns_buffer.append(gpa, .i32);
4224 }
4225
4226 // param types
4227 for (params) |param_type_ip| {
4228 const param_type = Zcu.Type.fromInterned(param_type_ip);
4229 if (!param_type.hasRuntimeBitsIgnoreComptime(zcu)) continue;
4230
4231 switch (cc) {
4232 .wasm_mvp => {
4233 switch (abi.classifyType(param_type, zcu)) {
4234 .direct => |scalar_ty| {
4235 if (!abi.lowerAsDoubleI64(scalar_ty, zcu)) {
4236 try params_buffer.append(gpa, CodeGen.typeToValtype(scalar_ty, zcu, target));
4237 } else {
4238 try params_buffer.append(gpa, .i64);
4239 try params_buffer.append(gpa, .i64);
4240 }
4241 },
4242 .indirect => try params_buffer.append(gpa, CodeGen.typeToValtype(param_type, zcu, target)),
4243 }
4244 },
4245 else => try params_buffer.append(gpa, CodeGen.typeToValtype(param_type, zcu, target)),
4246 }
4247 }
4248}
4249
4250pub fn isBss(wasm: *const Wasm, optional_name: OptionalString) bool {
4251 const s = optional_name.slice(wasm) orelse return false;
4252 return mem.eql(u8, s, ".bss") or mem.startsWith(u8, s, ".bss.");
4253}
4254
4255/// After this function is called, there may be additional entries in
4256/// `Wasm.uavs_obj`, `Wasm.uavs_exe`, `Wasm.navs_obj`, and `Wasm.navs_exe`
4257/// which have uninitialized code and relocations. This function is
4258/// non-recursive, so callers must coordinate additional calls to populate
4259/// those entries.
4260fn lowerZcuData(wasm: *Wasm, pt: Zcu.PerThread, ip_index: InternPool.Index) !ZcuDataObj {
4261 const code_start: u32 = @intCast(wasm.string_bytes.items.len);
4262 const relocs_start: u32 = @intCast(wasm.out_relocs.len);
4263 const uav_fixups_start: u32 = @intCast(wasm.uav_fixups.items.len);
4264 const nav_fixups_start: u32 = @intCast(wasm.nav_fixups.items.len);
4265 const func_table_fixups_start: u32 = @intCast(wasm.func_table_fixups.items.len);
4266 wasm.string_bytes_lock.lock();
4267
4268 {
4269 var aw: std.Io.Writer.Allocating = .fromArrayList(wasm.base.comp.gpa, &wasm.string_bytes);
4270 defer wasm.string_bytes = aw.toArrayList();
4271 codegen.generateSymbol(&wasm.base, pt, .unneeded, .fromInterned(ip_index), &aw.writer, .none) catch |err| switch (err) {
4272 error.WriteFailed => return error.OutOfMemory,
4273 else => |e| return e,
4274 };
4275 }
4276
4277 const code_len: u32 = @intCast(wasm.string_bytes.items.len - code_start);
4278 const relocs_len: u32 = @intCast(wasm.out_relocs.len - relocs_start);
4279 const any_fixups =
4280 uav_fixups_start != wasm.uav_fixups.items.len or
4281 nav_fixups_start != wasm.nav_fixups.items.len or
4282 func_table_fixups_start != wasm.func_table_fixups.items.len;
4283 wasm.string_bytes_lock.unlock();
4284
4285 const naive_code: DataPayload = .{
4286 .off = @enumFromInt(code_start),
4287 .len = code_len,
4288 };
4289
4290 // Only nonzero init values need to take up space in the output.
4291 // If any fixups are present, we still need the string bytes allocated since
4292 // that is the staging area for the fixups.
4293 const code: DataPayload = if (!any_fixups and std.mem.allEqual(u8, naive_code.slice(wasm), 0)) c: {
4294 wasm.string_bytes.shrinkRetainingCapacity(code_start);
4295 // Indicate empty by making off and len the same value, however, still
4296 // transmit the data size by using the size as that value.
4297 break :c .{
4298 .off = .none,
4299 .len = naive_code.len,
4300 };
4301 } else c: {
4302 wasm.any_passive_inits = wasm.any_passive_inits or wasm.base.comp.config.import_memory;
4303 break :c naive_code;
4304 };
4305
4306 return .{
4307 .code = code,
4308 .relocs = .{
4309 .off = relocs_start,
4310 .len = relocs_len,
4311 },
4312 };
4313}
4314
4315fn pointerAlignment(wasm: *const Wasm) Alignment {
4316 const target = &wasm.base.comp.root_mod.resolved_target.result;
4317 return switch (target.cpu.arch) {
4318 .wasm32 => .@"4",
4319 .wasm64 => .@"8",
4320 else => unreachable,
4321 };
4322}
4323
4324fn pointerSize(wasm: *const Wasm) u32 {
4325 const target = &wasm.base.comp.root_mod.resolved_target.result;
4326 return switch (target.cpu.arch) {
4327 .wasm32 => 4,
4328 .wasm64 => 8,
4329 else => unreachable,
4330 };
4331}
4332
4333fn addZcuImportReserved(wasm: *Wasm, nav_index: InternPool.Nav.Index) ZcuImportIndex {
4334 const gop = wasm.imports.getOrPutAssumeCapacity(nav_index);
4335 gop.value_ptr.* = {};
4336 return @enumFromInt(gop.index);
4337}
4338
4339fn resolveFunctionSynthetic(
4340 wasm: *Wasm,
4341 import: *FunctionImport,
4342 res: FunctionImport.Resolution,
4343 params: []const std.wasm.Valtype,
4344 returns: []const std.wasm.Valtype,
4345) link.File.FlushError!void {
4346 import.resolution = res;
4347 wasm.functions.putAssumeCapacity(res, {});
4348 // This is not only used for type-checking but also ensures the function
4349 // type index is interned so that it is guaranteed to exist during `flush`.
4350 const correct_func_type = try addFuncType(wasm, .{
4351 .params = try internValtypeList(wasm, params),
4352 .returns = try internValtypeList(wasm, returns),
4353 });
4354 if (import.type != correct_func_type) {
4355 const diags = &wasm.base.comp.link_diags;
4356 return import.source_location.fail(diags, "synthetic function {s} {f} imported with incorrect signature {f}", .{
4357 @tagName(res), correct_func_type.fmt(wasm), import.type.fmt(wasm),
4358 });
4359 }
4360}
4361
4362pub fn addFunction(
4363 wasm: *Wasm,
4364 resolution: FunctionImport.Resolution,
4365 params: []const std.wasm.Valtype,
4366 returns: []const std.wasm.Valtype,
4367) Allocator.Error!void {
4368 wasm.functions.putAssumeCapacity(resolution, {});
4369 _ = try wasm.addFuncType(.{
4370 .params = try wasm.internValtypeList(params),
4371 .returns = try wasm.internValtypeList(returns),
4372 });
4373}