Commit 031c84c8cb
Changed files (5)
src
src/arch/wasm/Emit.zig
@@ -89,7 +89,7 @@ pub fn lowerToCode(emit: *Emit) Error!void {
if (is_obj) {
try wasm.out_relocs.append(gpa, .{
.offset = @intCast(code.items.len),
- .index = try wasm.errorNameTableSymbolIndex(),
+ .pointee = .{ .symbol_index = try wasm.errorNameTableSymbolIndex() },
.tag = if (is_wasm32) .MEMORY_ADDR_LEB else .MEMORY_ADDR_LEB64,
.addend = 0,
});
@@ -143,7 +143,7 @@ pub fn lowerToCode(emit: *Emit) Error!void {
if (is_obj) {
try wasm.out_relocs.append(gpa, .{
.offset = @intCast(code.items.len),
- .index = try wasm.navSymbolIndex(datas[inst].nav_index),
+ .pointee = .{ .symbol_index = try wasm.navSymbolIndex(datas[inst].nav_index) },
.tag = .FUNCTION_INDEX_LEB,
.addend = 0,
});
@@ -164,7 +164,7 @@ pub fn lowerToCode(emit: *Emit) Error!void {
if (is_obj) {
try wasm.out_relocs.append(gpa, .{
.offset = @intCast(code.items.len),
- .index = func_ty_index,
+ .pointee = .{ .type_index = func_ty_index },
.tag = .TYPE_INDEX_LEB,
.addend = 0,
});
@@ -184,7 +184,7 @@ pub fn lowerToCode(emit: *Emit) Error!void {
if (is_obj) {
try wasm.out_relocs.append(gpa, .{
.offset = @intCast(code.items.len),
- .index = try wasm.tagNameSymbolIndex(datas[inst].ip_index),
+ .pointee = .{ .symbol_index = try wasm.tagNameSymbolIndex(datas[inst].ip_index) },
.tag = .FUNCTION_INDEX_LEB,
.addend = 0,
});
@@ -209,7 +209,7 @@ pub fn lowerToCode(emit: *Emit) Error!void {
if (is_obj) {
try wasm.out_relocs.append(gpa, .{
.offset = @intCast(code.items.len),
- .index = try wasm.symbolNameIndex(symbol_name),
+ .pointee = .{ .symbol_index = try wasm.symbolNameIndex(symbol_name) },
.tag = .FUNCTION_INDEX_LEB,
.addend = 0,
});
@@ -229,7 +229,7 @@ pub fn lowerToCode(emit: *Emit) Error!void {
if (is_obj) {
try wasm.out_relocs.append(gpa, .{
.offset = @intCast(code.items.len),
- .index = try wasm.stackPointerSymbolIndex(),
+ .pointee = .{ .symbol_index = try wasm.stackPointerSymbolIndex() },
.tag = .GLOBAL_INDEX_LEB,
.addend = 0,
});
@@ -249,7 +249,7 @@ pub fn lowerToCode(emit: *Emit) Error!void {
if (is_obj) {
try wasm.out_relocs.append(gpa, .{
.offset = @intCast(code.items.len),
- .index = try wasm.functionSymbolIndex(datas[inst].ip_index),
+ .pointee = .{ .symbol_index = try wasm.functionSymbolIndex(datas[inst].ip_index) },
.tag = .TABLE_INDEX_SLEB,
.addend = 0,
});
@@ -691,7 +691,7 @@ fn uavRefOff(wasm: *link.File.Wasm, code: *std.ArrayListUnmanaged(u8), data: Mir
if (is_obj) {
try wasm.out_relocs.append(gpa, .{
.offset = @intCast(code.items.len),
- .index = try wasm.uavSymbolIndex(data.ip_index),
+ .pointee = .{ .symbol_index = try wasm.uavSymbolIndex(data.ip_index) },
.tag = if (is_wasm32) .MEMORY_ADDR_LEB else .MEMORY_ADDR_LEB64,
.addend = data.offset,
});
@@ -700,7 +700,7 @@ fn uavRefOff(wasm: *link.File.Wasm, code: *std.ArrayListUnmanaged(u8), data: Mir
}
// When linking into the final binary, no relocation mechanism is necessary.
- const addr: i64 = try wasm.uavAddr(data.ip_index);
+ const addr = try wasm.uavAddr(data.ip_index);
leb.writeUleb128(code.fixedWriter(), addr + data.offset) catch unreachable;
}
@@ -720,13 +720,13 @@ fn navRefOff(wasm: *link.File.Wasm, code: *std.ArrayListUnmanaged(u8), data: Mir
if (is_obj) {
try wasm.out_relocs.append(gpa, .{
.offset = @intCast(code.items.len),
- .index = try wasm.navSymbolIndex(data.nav_index),
+ .pointee = .{ .symbol_index = try wasm.navSymbolIndex(data.nav_index) },
.tag = .TABLE_INDEX_SLEB,
.addend = data.offset,
});
code.appendNTimesAssumeCapacity(0, 5);
} else {
- const addr: i64 = try wasm.navAddr(data.nav_index);
+ const addr = try wasm.navAddr(data.nav_index);
leb.writeUleb128(code.fixedWriter(), addr + data.offset) catch unreachable;
}
} else {
@@ -735,13 +735,13 @@ fn navRefOff(wasm: *link.File.Wasm, code: *std.ArrayListUnmanaged(u8), data: Mir
if (is_obj) {
try wasm.out_relocs.append(gpa, .{
.offset = @intCast(code.items.len),
- .index = try wasm.navSymbolIndex(data.nav_index),
+ .pointee = .{ .symbol_index = try wasm.navSymbolIndex(data.nav_index) },
.tag = if (is_wasm32) .MEMORY_ADDR_LEB else .MEMORY_ADDR_LEB64,
.addend = data.offset,
});
code.appendNTimesAssumeCapacity(0, if (is_wasm32) 5 else 10);
} else {
- const addr: i64 = try wasm.navAddr(data.nav_index);
+ const addr = try wasm.navAddr(data.nav_index);
leb.writeUleb128(code.fixedWriter(), addr + data.offset) catch unreachable;
}
}
src/link/Wasm/Flush.zig
@@ -42,7 +42,6 @@ pub fn clear(f: *Flush) void {
f.data_segments.clearRetainingCapacity();
f.data_segment_groups.clearRetainingCapacity();
f.indirect_function_table.clearRetainingCapacity();
- f.global_exports.clearRetainingCapacity();
}
pub fn deinit(f: *Flush, gpa: Allocator) void {
@@ -50,11 +49,10 @@ pub fn deinit(f: *Flush, gpa: Allocator) void {
f.data_segments.deinit(gpa);
f.data_segment_groups.deinit(gpa);
f.indirect_function_table.deinit(gpa);
- f.global_exports.deinit(gpa);
f.* = undefined;
}
-pub fn finish(f: *Flush, wasm: *Wasm, arena: Allocator) anyerror!void {
+pub fn finish(f: *Flush, wasm: *Wasm, arena: Allocator) !void {
const comp = wasm.base.comp;
const shared_memory = comp.config.shared_memory;
const diags = &comp.link_diags;
@@ -115,7 +113,7 @@ pub fn finish(f: *Flush, wasm: *Wasm, arena: Allocator) anyerror!void {
src_loc.addError(wasm, "undefined global: {s}", .{name.slice(wasm)});
}
for (wasm.table_imports.keys(), wasm.table_imports.values()) |name, table_import_id| {
- const src_loc = table_import_id.ptr(wasm).source_location;
+ const src_loc = table_import_id.value(wasm).source_location;
src_loc.addError(wasm, "undefined table: {s}", .{name.slice(wasm)});
}
}
@@ -142,7 +140,7 @@ pub fn finish(f: *Flush, wasm: *Wasm, arena: Allocator) anyerror!void {
if (!ds.flags.alive) continue;
const data_segment_index: Wasm.DataSegment.Index = @enumFromInt(i);
any_passive_inits = any_passive_inits or ds.flags.is_passive or (import_memory and !isBss(wasm, ds.name));
- f.data_segments.putAssumeCapacityNoClobber(data_segment_index, .{ .offset = undefined });
+ f.data_segments.putAssumeCapacityNoClobber(data_segment_index, @as(u32, undefined));
}
try wasm.functions.ensureUnusedCapacity(gpa, 3);
@@ -159,8 +157,10 @@ pub fn finish(f: *Flush, wasm: *Wasm, arena: Allocator) anyerror!void {
// When we have TLS GOT entries and shared memory is enabled,
// we must perform runtime relocations or else we don't create the function.
if (shared_memory) {
- if (f.need_tls_relocs) wasm.functions.putAssumeCapacity(.__wasm_apply_global_tls_relocs, {});
- wasm.functions.putAssumeCapacity(gpa, .__wasm_init_tls, {});
+ // This logic that checks `any_tls_relocs` is missing the part where it
+ // also notices threadlocal globals from Zcu code.
+ if (wasm.any_tls_relocs) wasm.functions.putAssumeCapacity(.__wasm_apply_global_tls_relocs, {});
+ wasm.functions.putAssumeCapacity(.__wasm_init_tls, {});
}
// Sort order:
@@ -178,14 +178,14 @@ pub fn finish(f: *Flush, wasm: *Wasm, arena: Allocator) anyerror!void {
pub fn lessThan(ctx: @This(), lhs: usize, rhs: usize) bool {
const lhs_segment_index = ctx.segments[lhs];
const rhs_segment_index = ctx.segments[rhs];
- const lhs_segment = lhs_segment_index.ptr(wasm);
- const rhs_segment = rhs_segment_index.ptr(wasm);
+ const lhs_segment = lhs_segment_index.ptr(ctx.wasm);
+ const rhs_segment = rhs_segment_index.ptr(ctx.wasm);
const lhs_tls = @intFromBool(lhs_segment.flags.tls);
const rhs_tls = @intFromBool(rhs_segment.flags.tls);
if (lhs_tls < rhs_tls) return true;
if (lhs_tls > rhs_tls) return false;
- const lhs_prefix, const lhs_suffix = splitSegmentName(lhs_segment.name.unwrap().slice(ctx.wasm));
- const rhs_prefix, const rhs_suffix = splitSegmentName(rhs_segment.name.unwrap().slice(ctx.wasm));
+ const lhs_prefix, const lhs_suffix = splitSegmentName(lhs_segment.name.unwrap().?.slice(ctx.wasm));
+ const rhs_prefix, const rhs_suffix = splitSegmentName(rhs_segment.name.unwrap().?.slice(ctx.wasm));
switch (mem.order(u8, lhs_prefix, rhs_prefix)) {
.lt => return true,
.gt => return false,
@@ -213,14 +213,14 @@ pub fn finish(f: *Flush, wasm: *Wasm, arena: Allocator) anyerror!void {
const heap_alignment: Alignment = .@"16"; // wasm's heap alignment as specified by tool-convention
const pointer_alignment: Alignment = .@"4";
// Always place the stack at the start by default unless the user specified the global-base flag.
- const place_stack_first, var memory_ptr: u32 = if (wasm.global_base) |base| .{ false, base } else .{ true, 0 };
+ const place_stack_first, var memory_ptr: u64 = if (wasm.global_base) |base| .{ false, base } else .{ true, 0 };
const VirtualAddrs = struct {
stack_pointer: u32,
heap_base: u32,
heap_end: u32,
tls_base: ?u32,
- tls_align: ?u32,
+ tls_align: Alignment,
tls_size: ?u32,
init_memory_flag: ?u32,
};
@@ -229,7 +229,7 @@ pub fn finish(f: *Flush, wasm: *Wasm, arena: Allocator) anyerror!void {
.heap_base = undefined,
.heap_end = undefined,
.tls_base = null,
- .tls_align = null,
+ .tls_align = .none,
.tls_size = null,
.init_memory_flag = null,
};
@@ -237,7 +237,7 @@ pub fn finish(f: *Flush, wasm: *Wasm, arena: Allocator) anyerror!void {
if (place_stack_first and !is_obj) {
memory_ptr = stack_alignment.forward(memory_ptr);
memory_ptr += wasm.base.stack_size;
- virtual_addrs.stack_pointer = memory_ptr;
+ virtual_addrs.stack_pointer = @intCast(memory_ptr);
}
const segment_indexes = f.data_segments.keys();
@@ -247,27 +247,27 @@ pub fn finish(f: *Flush, wasm: *Wasm, arena: Allocator) anyerror!void {
var seen_tls: enum { before, during, after } = .before;
var offset: u32 = 0;
for (segment_indexes, segment_offsets, 0..) |segment_index, *segment_offset, i| {
- const segment = segment_index.ptr(f);
- memory_ptr = segment.alignment.forward(memory_ptr);
+ const segment = segment_index.ptr(wasm);
+ memory_ptr = segment.flags.alignment.forward(memory_ptr);
const want_new_segment = b: {
if (is_obj) break :b false;
switch (seen_tls) {
.before => if (segment.flags.tls) {
- virtual_addrs.tls_base = if (shared_memory) 0 else memory_ptr;
+ virtual_addrs.tls_base = if (shared_memory) 0 else @intCast(memory_ptr);
virtual_addrs.tls_align = segment.flags.alignment;
seen_tls = .during;
break :b true;
},
.during => if (!segment.flags.tls) {
- virtual_addrs.tls_size = memory_ptr - virtual_addrs.tls_base;
+ virtual_addrs.tls_size = @intCast(memory_ptr - virtual_addrs.tls_base.?);
virtual_addrs.tls_align = virtual_addrs.tls_align.maxStrict(segment.flags.alignment);
seen_tls = .after;
break :b true;
},
.after => {},
}
- break :b i >= 1 and !wasm.wantSegmentMerge(segment_indexes[i - 1], segment_index);
+ break :b i >= 1 and !wantSegmentMerge(wasm, segment_indexes[i - 1], segment_index);
};
if (want_new_segment) {
if (offset > 0) try f.data_segment_groups.append(gpa, offset);
@@ -275,26 +275,26 @@ pub fn finish(f: *Flush, wasm: *Wasm, arena: Allocator) anyerror!void {
}
segment_offset.* = offset;
- offset += segment.size;
- memory_ptr += segment.size;
+ offset += segment.payload.len;
+ memory_ptr += segment.payload.len;
}
if (offset > 0) try f.data_segment_groups.append(gpa, offset);
}
if (shared_memory and any_passive_inits) {
memory_ptr = pointer_alignment.forward(memory_ptr);
- virtual_addrs.init_memory_flag = memory_ptr;
+ virtual_addrs.init_memory_flag = @intCast(memory_ptr);
memory_ptr += 4;
}
if (!place_stack_first and !is_obj) {
memory_ptr = stack_alignment.forward(memory_ptr);
memory_ptr += wasm.base.stack_size;
- virtual_addrs.stack_pointer = memory_ptr;
+ virtual_addrs.stack_pointer = @intCast(memory_ptr);
}
memory_ptr = heap_alignment.forward(memory_ptr);
- virtual_addrs.heap_base = memory_ptr;
+ virtual_addrs.heap_base = @intCast(memory_ptr);
if (wasm.initial_memory) |initial_memory| {
if (!mem.isAlignedGeneric(u64, initial_memory, page_size)) {
@@ -311,7 +311,7 @@ pub fn finish(f: *Flush, wasm: *Wasm, arena: Allocator) anyerror!void {
} else {
memory_ptr = mem.alignForward(u64, memory_ptr, std.wasm.page_size);
}
- virtual_addrs.heap_end = memory_ptr;
+ virtual_addrs.heap_end = @intCast(memory_ptr);
// In case we do not import memory, but define it ourselves, set the
// minimum amount of pages on the memory section.
@@ -326,7 +326,7 @@ pub fn finish(f: *Flush, wasm: *Wasm, arena: Allocator) anyerror!void {
diags.addError("maximum memory value {d} insufficient; minimum {d}", .{ max_memory, memory_ptr });
}
if (max_memory > std.math.maxInt(u32)) {
- diags.addError("maximum memory exceeds 32-bit address space", .{max_memory});
+ diags.addError("maximum memory value {d} exceeds 32-bit address space", .{max_memory});
}
if (diags.hasErrors()) return error.LinkFailure;
wasm.memories.limits.max = @intCast(max_memory / page_size);
@@ -346,24 +346,26 @@ pub fn finish(f: *Flush, wasm: *Wasm, arena: Allocator) anyerror!void {
const binary_bytes = &f.binary_bytes;
assert(binary_bytes.items.len == 0);
- try binary_bytes.appendSlice(gpa, std.wasm.magic ++ std.wasm.version);
+ try binary_bytes.appendSlice(gpa, &std.wasm.magic ++ &std.wasm.version);
assert(binary_bytes.items.len == 8);
const binary_writer = binary_bytes.writer(gpa);
// Type section
- if (wasm.func_types.items.len != 0) {
+ if (wasm.func_types.entries.len != 0) {
const header_offset = try reserveVecSectionHeader(gpa, binary_bytes);
- log.debug("Writing type section. Count: ({d})", .{wasm.func_types.items.len});
- for (wasm.func_types.items) |func_type| {
+ log.debug("Writing type section. Count: ({d})", .{wasm.func_types.entries.len});
+ for (wasm.func_types.keys()) |func_type| {
try leb.writeUleb128(binary_writer, std.wasm.function_type);
- try leb.writeUleb128(binary_writer, @as(u32, @intCast(func_type.params.len)));
- for (func_type.params) |param_ty| {
- try leb.writeUleb128(binary_writer, std.wasm.valtype(param_ty));
+ const params = func_type.params.slice(wasm);
+ try leb.writeUleb128(binary_writer, @as(u32, @intCast(params.len)));
+ for (params) |param_ty| {
+ try leb.writeUleb128(binary_writer, @intFromEnum(param_ty));
}
- try leb.writeUleb128(binary_writer, @as(u32, @intCast(func_type.returns.len)));
- for (func_type.returns) |ret_ty| {
- try leb.writeUleb128(binary_writer, std.wasm.valtype(ret_ty));
+ const returns = func_type.returns.slice(wasm);
+ try leb.writeUleb128(binary_writer, @as(u32, @intCast(returns.len)));
+ for (returns) |ret_ty| {
+ try leb.writeUleb128(binary_writer, @intFromEnum(ret_ty));
}
}
@@ -372,19 +374,19 @@ pub fn finish(f: *Flush, wasm: *Wasm, arena: Allocator) anyerror!void {
header_offset,
.type,
@intCast(binary_bytes.items.len - header_offset - header_size),
- @intCast(wasm.func_types.items.len),
+ @intCast(wasm.func_types.entries.len),
);
section_index += 1;
}
// Import section
- const total_imports_len = wasm.function_imports.items.len + wasm.global_imports.items.len +
- wasm.table_imports.items.len + wasm.memory_imports.items.len + @intFromBool(import_memory);
+ const total_imports_len = wasm.function_imports.entries.len + wasm.global_imports.entries.len +
+ wasm.table_imports.entries.len + wasm.memory_imports.items.len + @intFromBool(import_memory);
if (total_imports_len > 0) {
const header_offset = try reserveVecSectionHeader(gpa, binary_bytes);
- for (wasm.function_imports.items) |*function_import| {
+ for (wasm.function_imports.values()) |*function_import| {
const module_name = function_import.module_name.slice(wasm);
try leb.writeUleb128(binary_writer, @as(u32, @intCast(module_name.len)));
try binary_writer.writeAll(module_name);
@@ -397,7 +399,7 @@ pub fn finish(f: *Flush, wasm: *Wasm, arena: Allocator) anyerror!void {
try leb.writeUleb128(binary_writer, function_import.index);
}
- for (wasm.table_imports.items) |*table_import| {
+ for (wasm.table_imports.values()) |*table_import| {
const module_name = table_import.module_name.slice(wasm);
try leb.writeUleb128(binary_writer, @as(u32, @intCast(module_name.len)));
try binary_writer.writeAll(module_name);
@@ -424,7 +426,7 @@ pub fn finish(f: *Flush, wasm: *Wasm, arena: Allocator) anyerror!void {
});
}
- for (wasm.global_imports.items) |*global_import| {
+ for (wasm.global_imports.values()) |*global_import| {
const module_name = global_import.module_name.slice(wasm);
try leb.writeUleb128(binary_writer, @as(u32, @intCast(module_name.len)));
try binary_writer.writeAll(module_name);
@@ -504,7 +506,7 @@ pub fn finish(f: *Flush, wasm: *Wasm, arena: Allocator) anyerror!void {
const header_offset = try reserveVecSectionHeader(gpa, binary_bytes);
for (wasm.output_globals.items) |global| {
- try binary_writer.writeByte(std.wasm.valtype(global.global_type.valtype));
+ try binary_writer.writeByte(@intFromEnum(global.global_type.valtype));
try binary_writer.writeByte(@intFromBool(global.global_type.mutable));
try emitInit(binary_writer, global.init);
}
@@ -841,7 +843,7 @@ fn writeCustomSectionHeader(buffer: []u8, offset: u32, size: u32) !void {
buffer[offset..][0..buf.len].* = buf;
}
-fn reserveCustomSectionHeader(gpa: Allocator, bytes: *std.ArrayListUnmanaged(u8)) error{OutOfMemory}!u32 {
+fn reserveCustomSectionHeader(gpa: Allocator, bytes: *std.ArrayListUnmanaged(u8)) Allocator.Error!u32 {
// unlike regular section, we don't emit the count
const header_size = 1 + 5;
try bytes.appendNTimes(gpa, 0, header_size);
@@ -1099,12 +1101,12 @@ fn wantSegmentMerge(wasm: *const Wasm, a_index: Wasm.DataSegment.Index, b_index:
if (a.flags.tls != b.flags.tls) return false;
if (a.flags.is_passive != b.flags.is_passive) return false;
if (a.name == b.name) return true;
- const a_prefix, _ = splitSegmentName(a.name.slice(wasm));
- const b_prefix, _ = splitSegmentName(b.name.slice(wasm));
+ const a_prefix, _ = splitSegmentName(a.name.slice(wasm).?);
+ const b_prefix, _ = splitSegmentName(b.name.slice(wasm).?);
return a_prefix.len > 0 and mem.eql(u8, a_prefix, b_prefix);
}
-fn reserveVecSectionHeader(gpa: Allocator, bytes: *std.ArrayListUnmanaged(u8)) error{OutOfMemory}!u32 {
+fn reserveVecSectionHeader(gpa: Allocator, bytes: *std.ArrayListUnmanaged(u8)) Allocator.Error!u32 {
// section id + fixed leb contents size + fixed leb vector length
const header_size = 1 + 5 + 5;
try bytes.appendNTimes(gpa, 0, header_size);
@@ -1125,7 +1127,7 @@ fn emitLimits(writer: anytype, limits: std.wasm.Limits) !void {
if (limits.flags.has_max) try leb.writeUleb128(writer, limits.max);
}
-fn emitMemoryImport(wasm: *Wasm, writer: anytype, memory_import: *const Wasm.MemoryImport) error{OutOfMemory}!void {
+fn emitMemoryImport(wasm: *Wasm, writer: anytype, memory_import: *const Wasm.MemoryImport) Allocator.Error!void {
const module_name = memory_import.module_name.slice(wasm);
try leb.writeUleb128(writer, @as(u32, @intCast(module_name.len)));
try writer.writeAll(module_name);
src/link/Wasm/Object.zig
@@ -36,7 +36,7 @@ global_imports: RelativeSlice,
table_imports: RelativeSlice,
/// Points into Wasm object_custom_segments
custom_segments: RelativeSlice,
-/// For calculating local section index from `Wasm.SectionIndex`.
+/// For calculating local section index from `Wasm.ObjectSectionIndex`.
local_section_index_base: u32,
/// Points into Wasm object_init_funcs
init_funcs: RelativeSlice,
@@ -109,10 +109,10 @@ pub const Symbol = struct {
},
data_import: void,
global: Wasm.ObjectGlobalIndex,
- global_import: Wasm.ObjectGlobalImportIndex,
+ global_import: Wasm.GlobalImport.Index,
section: Wasm.ObjectSectionIndex,
table: Wasm.ObjectTableIndex,
- table_import: Wasm.ObjectTableImportIndex,
+ table_import: Wasm.TableImport.Index,
};
};
@@ -159,7 +159,7 @@ pub const ScratchSpace = struct {
}
};
-fn parse(
+pub fn parse(
wasm: *Wasm,
bytes: []const u8,
path: Path,
@@ -181,18 +181,18 @@ fn parse(
pos += 4;
const data_segment_start: u32 = @intCast(wasm.object_data_segments.items.len);
- const custom_segment_start: u32 = @intCast(wasm.object_custom_segments.items.len);
+ const custom_segment_start: u32 = @intCast(wasm.object_custom_segments.entries.len);
const functions_start: u32 = @intCast(wasm.object_functions.items.len);
const tables_start: u32 = @intCast(wasm.object_tables.items.len);
const memories_start: u32 = @intCast(wasm.object_memories.items.len);
const globals_start: u32 = @intCast(wasm.object_globals.items.len);
const init_funcs_start: u32 = @intCast(wasm.object_init_funcs.items.len);
const comdats_start: u32 = @intCast(wasm.object_comdats.items.len);
- const function_imports_start: u32 = @intCast(wasm.object_function_imports.items.len);
- const global_imports_start: u32 = @intCast(wasm.object_global_imports.items.len);
- const table_imports_start: u32 = @intCast(wasm.object_table_imports.items.len);
+ const function_imports_start: u32 = @intCast(wasm.object_function_imports.entries.len);
+ const global_imports_start: u32 = @intCast(wasm.object_global_imports.entries.len);
+ const table_imports_start: u32 = @intCast(wasm.object_table_imports.entries.len);
const local_section_index_base = wasm.object_total_sections;
- const source_location: Wasm.SourceLocation = .fromObjectIndex(wasm.objects.items.len);
+ const source_location: Wasm.SourceLocation = .fromObject(@enumFromInt(wasm.objects.items.len), wasm);
ss.clear();
@@ -200,10 +200,9 @@ fn parse(
var opt_features: ?Wasm.Feature.Set = null;
var saw_linking_section = false;
var has_tls = false;
- var local_section_index: u32 = 0;
var table_count: usize = 0;
- while (pos < bytes.len) : (local_section_index += 1) {
- const section_index: Wasm.SectionIndex = @enumFromInt(local_section_index_base + local_section_index);
+ while (pos < bytes.len) : (wasm.object_total_sections += 1) {
+ const section_index: Wasm.ObjectSectionIndex = @enumFromInt(wasm.object_total_sections);
const section_tag: std.wasm.Section = @enumFromInt(bytes[pos]);
pos += 1;
@@ -245,7 +244,7 @@ fn parse(
.strings = flags.strings,
.tls = tls,
.alignment = @enumFromInt(alignment),
- .no_strip = flags.retain,
+ .retain = flags.retain,
},
};
}
@@ -257,13 +256,13 @@ fn parse(
if (symbol_index > ss.symbol_table.items.len)
return diags.failParse(path, "init_funcs before symbol table", .{});
const sym = &ss.symbol_table.items[symbol_index];
- if (sym.tag != .function) {
+ if (sym.pointee != .function) {
return diags.failParse(path, "init_func symbol '{s}' not a function", .{
- wasm.stringSlice(sym.name),
+ sym.name.slice(wasm).?,
});
} else if (sym.flags.undefined) {
return diags.failParse(path, "init_func symbol '{s}' is an import", .{
- wasm.stringSlice(sym.name),
+ sym.name.slice(wasm).?,
});
}
func.* = .{
@@ -278,22 +277,23 @@ fn parse(
const flags, pos = readLeb(u32, bytes, pos);
if (flags != 0) return error.UnexpectedComdatFlags;
const symbol_count, pos = readLeb(u32, bytes, pos);
- const start_off: u32 = @intCast(wasm.object_comdat_symbols.items.len);
- for (try wasm.object_comdat_symbols.addManyAsSlice(gpa, symbol_count)) |*symbol| {
+ const start_off: u32 = @intCast(wasm.object_comdat_symbols.len);
+ try wasm.object_comdat_symbols.ensureUnusedCapacity(gpa, symbol_count);
+ for (0..symbol_count) |_| {
const kind, pos = readEnum(Wasm.Comdat.Symbol.Type, bytes, pos);
const index, pos = readLeb(u32, bytes, pos);
if (true) @panic("TODO rebase index depending on kind");
- symbol.* = .{
+ wasm.object_comdat_symbols.appendAssumeCapacity(.{
.kind = kind,
.index = index,
- };
+ });
}
comdat.* = .{
.name = try wasm.internString(name),
.flags = flags,
.symbols = .{
.off = start_off,
- .len = @intCast(wasm.object_comdat_symbols.items.len - start_off),
+ .len = @intCast(wasm.object_comdat_symbols.len - start_off),
},
};
}
@@ -321,7 +321,7 @@ fn parse(
const size, pos = readLeb(u32, bytes, pos);
symbol.pointee = .{ .data = .{
- .index = @enumFromInt(data_segment_start + segment_index),
+ .segment_index = @enumFromInt(data_segment_start + segment_index),
.segment_offset = segment_offset,
.size = size,
} };
@@ -329,7 +329,7 @@ fn parse(
},
.section => {
const local_section, pos = readLeb(u32, bytes, pos);
- const section: Wasm.SectionIndex = @enumFromInt(local_section_index_base + local_section);
+ const section: Wasm.ObjectSectionIndex = @enumFromInt(local_section_index_base + local_section);
symbol.pointee = .{ .section = section };
},
@@ -337,7 +337,7 @@ fn parse(
const local_index, pos = readLeb(u32, bytes, pos);
if (symbol.flags.undefined) {
symbol.pointee = .{ .function_import = @enumFromInt(local_index) };
- if (flags.explicit_name) {
+ if (symbol.flags.explicit_name) {
const name, pos = readBytes(bytes, pos);
symbol.name = (try wasm.internString(name)).toOptional();
}
@@ -351,7 +351,7 @@ fn parse(
const local_index, pos = readLeb(u32, bytes, pos);
if (symbol.flags.undefined) {
symbol.pointee = .{ .global_import = @enumFromInt(global_imports_start + local_index) };
- if (flags.explicit_name) {
+ if (symbol.flags.explicit_name) {
const name, pos = readBytes(bytes, pos);
symbol.name = (try wasm.internString(name)).toOptional();
}
@@ -366,7 +366,7 @@ fn parse(
const local_index, pos = readLeb(u32, bytes, pos);
if (symbol.flags.undefined) {
symbol.pointee = .{ .table_import = @enumFromInt(table_imports_start + local_index) };
- if (flags.explicit_name) {
+ if (symbol.flags.explicit_name) {
const name, pos = readBytes(bytes, pos);
symbol.name = (try wasm.internString(name)).toOptional();
}
@@ -377,7 +377,7 @@ fn parse(
}
},
else => {
- log.debug("unrecognized symbol type tag: {x}", .{tag});
+ log.debug("unrecognized symbol type tag: {x}", .{@intFromEnum(tag)});
return error.UnrecognizedSymbolType;
},
}
@@ -396,14 +396,14 @@ fn parse(
// "Relocation sections can only target code, data and custom sections."
const local_section, pos = readLeb(u32, bytes, pos);
const count, pos = readLeb(u32, bytes, pos);
- const section: Wasm.SectionIndex = @enumFromInt(local_section_index_base + local_section);
+ const section: Wasm.ObjectSectionIndex = @enumFromInt(local_section_index_base + local_section);
log.debug("found {d} relocations for section={d}", .{ count, section });
var prev_offset: u32 = 0;
- try wasm.relocations.ensureUnusedCapacity(gpa, count);
+ try wasm.object_relocations.ensureUnusedCapacity(gpa, count);
for (0..count) |_| {
- const tag: Wasm.Relocation.Tag = @enumFromInt(bytes[pos]);
+ const tag: Wasm.ObjectRelocation.Tag = @enumFromInt(bytes[pos]);
pos += 1;
const offset, pos = readLeb(u32, bytes, pos);
const index, pos = readLeb(u32, bytes, pos);
@@ -426,9 +426,10 @@ fn parse(
.MEMORY_ADDR_TLS_SLEB64,
.FUNCTION_OFFSET_I32,
.SECTION_OFFSET_I32,
+ .FUNCTION_OFFSET_I64,
=> {
const addend: i32, pos = readLeb(i32, bytes, pos);
- wasm.relocations.appendAssumeCapacity(.{
+ wasm.object_relocations.appendAssumeCapacity(.{
.tag = tag,
.offset = offset,
.pointee = .{ .section = ss.symbol_table.items[index].pointee.section },
@@ -436,7 +437,7 @@ fn parse(
});
},
.TYPE_INDEX_LEB => {
- wasm.relocations.appendAssumeCapacity(.{
+ wasm.object_relocations.appendAssumeCapacity(.{
.tag = tag,
.offset = offset,
.pointee = .{ .type_index = ss.func_types.items[index] },
@@ -444,9 +445,19 @@ fn parse(
});
},
.FUNCTION_INDEX_LEB,
+ .FUNCTION_INDEX_I32,
.GLOBAL_INDEX_LEB,
+ .GLOBAL_INDEX_I32,
+ .TABLE_INDEX_SLEB,
+ .TABLE_INDEX_I32,
+ .TABLE_INDEX_SLEB64,
+ .TABLE_INDEX_I64,
+ .TABLE_NUMBER_LEB,
+ .TABLE_INDEX_REL_SLEB,
+ .TABLE_INDEX_REL_SLEB64,
+ .TAG_INDEX_LEB,
=> {
- wasm.relocations.appendAssumeCapacity(.{
+ wasm.object_relocations.appendAssumeCapacity(.{
.tag = tag,
.offset = offset,
.pointee = .{ .symbol_name = ss.symbol_table.items[index].name.unwrap().? },
@@ -457,7 +468,7 @@ fn parse(
}
try wasm.object_relocations_table.putNoClobber(gpa, section, .{
- .off = @intCast(wasm.relocations.items.len - count),
+ .off = @intCast(wasm.object_relocations.len - count),
.len = count,
});
} else if (std.mem.eql(u8, section_name, "target_features")) {
@@ -466,15 +477,15 @@ fn parse(
const debug_content = bytes[pos..section_end];
pos = section_end;
- const data_off: u32 = @enumFromInt(wasm.string_bytes.items.len);
+ const data_off: u32 = @intCast(wasm.string_bytes.items.len);
try wasm.string_bytes.appendSlice(gpa, debug_content);
try wasm.object_custom_segments.put(gpa, section_index, .{
- .data_off = data_off,
- .flags = .{
- .data_len = @intCast(debug_content.len),
- .represented = false, // set when scanning symbol table
+ .payload = .{
+ .off = data_off,
+ .len = @intCast(debug_content.len),
},
+ .flags = .{},
.section_name = try wasm.internString(section_name),
});
} else {
@@ -483,7 +494,7 @@ fn parse(
},
.type => {
const func_types_len, pos = readLeb(u32, bytes, pos);
- for (ss.func_types.addManyAsSlice(gpa, func_types_len)) |*func_type| {
+ for (try ss.func_types.addManyAsSlice(gpa, func_types_len)) |*func_type| {
if (bytes[pos] != std.wasm.function_type) return error.ExpectedFuncType;
pos += 1;
@@ -509,7 +520,7 @@ fn parse(
try ss.func_imports.append(gpa, .{
.module_name = interned_module_name,
.name = interned_name,
- .index = function,
+ .function_index = @enumFromInt(function),
});
},
.memory => {
@@ -527,24 +538,32 @@ fn parse(
const valtype, pos = readEnum(std.wasm.Valtype, bytes, pos);
const mutable = bytes[pos] == 0x01;
pos += 1;
- try wasm.object_global_imports.append(gpa, .{
+ try wasm.object_global_imports.put(gpa, interned_name, .{
+ .flags = .{
+ .global_type = .{
+ .valtype = .from(valtype),
+ .mutable = mutable,
+ },
+ },
.module_name = interned_module_name,
- .name = interned_name,
- .mutable = mutable,
- .valtype = valtype,
+ .source_location = source_location,
+ .resolution = .unresolved,
});
},
.table => {
- const reftype, pos = readEnum(std.wasm.RefType, bytes, pos);
+ const ref_type, pos = readEnum(std.wasm.RefType, bytes, pos);
const limits, pos = readLimits(bytes, pos);
- try wasm.object_table_imports.append(gpa, .{
+ try wasm.object_table_imports.put(gpa, interned_name, .{
+ .flags = .{
+ .limits_has_max = limits.flags.has_max,
+ .limits_is_shared = limits.flags.is_shared,
+ .ref_type = .from(ref_type),
+ },
.module_name = interned_module_name,
- .name = interned_name,
+ .source_location = source_location,
+ .resolution = .unresolved,
.limits_min = limits.min,
.limits_max = limits.max,
- .limits_has_max = limits.flags.has_max,
- .limits_is_shared = limits.flags.is_shared,
- .reftype = reftype,
});
},
}
@@ -553,17 +572,25 @@ fn parse(
.function => {
const functions_len, pos = readLeb(u32, bytes, pos);
for (try ss.func_type_indexes.addManyAsSlice(gpa, functions_len)) |*func_type_index| {
- func_type_index.*, pos = readLeb(u32, bytes, pos);
+ const i, pos = readLeb(u32, bytes, pos);
+ func_type_index.* = @enumFromInt(i);
}
},
.table => {
const tables_len, pos = readLeb(u32, bytes, pos);
for (try wasm.object_tables.addManyAsSlice(gpa, tables_len)) |*table| {
- const reftype, pos = readEnum(std.wasm.RefType, bytes, pos);
+ const ref_type, pos = readEnum(std.wasm.RefType, bytes, pos);
const limits, pos = readLimits(bytes, pos);
table.* = .{
- .reftype = reftype,
- .limits = limits,
+ .name = .none,
+ .module_name = .none,
+ .flags = .{
+ .ref_type = .from(ref_type),
+ .limits_has_max = limits.flags.has_max,
+ .limits_is_shared = limits.flags.is_shared,
+ },
+ .limits_min = limits.min,
+ .limits_max = limits.max,
};
}
},
@@ -582,8 +609,13 @@ fn parse(
pos += 1;
const expr, pos = try readInit(wasm, bytes, pos);
global.* = .{
- .valtype = valtype,
- .mutable = mutable,
+ .name = .none,
+ .flags = .{
+ .global_type = .{
+ .valtype = .from(valtype),
+ .mutable = mutable,
+ },
+ },
.expr = expr,
};
}
@@ -668,8 +700,6 @@ fn parse(
}
if (!saw_linking_section) return error.MissingLinkingSection;
- wasm.object_total_sections = local_section_index_base + local_section_index;
-
if (has_tls) {
const cpu_features = wasm.base.comp.root_mod.resolved_target.result.cpu.features;
if (!std.Target.wasm.featureSetHas(cpu_features, .atomics))
@@ -770,7 +800,7 @@ fn parse(
ptr.name = symbol.name;
ptr.flags = symbol.flags;
if (symbol.flags.undefined and symbol.flags.binding == .local) {
- const name = wasm.stringSlice(ptr.name.unwrap().?);
+ const name = ptr.name.slice(wasm).?;
diags.addParseError(path, "local symbol '{s}' references import", .{name});
}
},
@@ -779,7 +809,7 @@ fn parse(
const ptr = i.ptr(wasm);
ptr.flags = symbol.flags;
if (symbol.flags.undefined and symbol.flags.binding == .local) {
- const name = wasm.stringSlice(ptr.name);
+ const name = ptr.name.slice(wasm);
diags.addParseError(path, "local symbol '{s}' references import", .{name});
}
},
@@ -806,19 +836,20 @@ fn parse(
data.flags.no_strip = info.flags.retain;
data.flags.alignment = info.flags.alignment;
if (data.flags.undefined and data.flags.binding == .local) {
- const name = wasm.stringSlice(info.name);
+ const name = info.name.slice(wasm);
diags.addParseError(path, "local symbol '{s}' references import", .{name});
}
}
// Check for indirect function table in case of an MVP object file.
legacy_indirect_function_table: {
- const table_imports = wasm.object_table_imports.items[table_imports_start..];
+ const table_import_names = wasm.object_table_imports.keys()[table_imports_start..];
+ const table_import_values = wasm.object_table_imports.values()[table_imports_start..];
// If there is a symbol for each import table, this is not a legacy object file.
- if (table_imports.len == table_count) break :legacy_indirect_function_table;
+ if (table_import_names.len == table_count) break :legacy_indirect_function_table;
if (table_count != 0) {
return diags.failParse(path, "expected a table entry symbol for each of the {d} table(s), but instead got {d} symbols.", .{
- table_imports.len, table_count,
+ table_import_names.len, table_count,
});
}
// MVP object files cannot have any table definitions, only
@@ -827,16 +858,16 @@ fn parse(
if (tables.len > 0) {
return diags.failParse(path, "table definition without representing table symbols", .{});
}
- if (table_imports.len != 1) {
+ if (table_import_names.len != 1) {
return diags.failParse(path, "found more than one table import, but no representing table symbols", .{});
}
- const table_import_name = table_imports[0].name;
+ const table_import_name = table_import_names[0];
if (table_import_name != wasm.preloaded_strings.__indirect_function_table) {
return diags.failParse(path, "non-indirect function table import '{s}' is missing a corresponding symbol", .{
- wasm.stringSlice(table_import_name),
+ table_import_name.slice(wasm),
});
}
- table_imports[0].flags = .{
+ table_import_values[0].flags = .{
.undefined = true,
.no_strip = true,
};
@@ -874,11 +905,11 @@ fn parse(
},
.function_imports = .{
.off = function_imports_start,
- .len = @intCast(wasm.object_function_imports.items.len - function_imports_start),
+ .len = @intCast(wasm.object_function_imports.entries.len - function_imports_start),
},
.global_imports = .{
.off = global_imports_start,
- .len = @intCast(wasm.object_global_imports.items.len - global_imports_start),
+ .len = @intCast(wasm.object_global_imports.entries.len - global_imports_start),
},
.table_imports = .{
.off = table_imports_start,
@@ -894,7 +925,7 @@ fn parse(
},
.custom_segments = .{
.off = custom_segment_start,
- .len = @intCast(wasm.object_custom_segments.items.len - custom_segment_start),
+ .len = @intCast(wasm.object_custom_segments.entries.len - custom_segment_start),
},
.local_section_index_base = local_section_index_base,
};
@@ -920,7 +951,7 @@ fn parseFeatures(
'-' => .@"-",
'+' => .@"+",
'=' => .@"=",
- else => return error.InvalidFeaturePrefix,
+ else => |b| return diags.failParse(path, "invalid feature prefix: 0x{x}", .{b}),
};
pos += 1;
const name, pos = readBytes(bytes, pos);
@@ -935,7 +966,7 @@ fn parseFeatures(
std.mem.sortUnstable(Wasm.Feature, feature_buffer, {}, Wasm.Feature.lessThan);
return .{
- .fromString(try wasm.internString(@bitCast(feature_buffer))),
+ .fromString(try wasm.internString(@ptrCast(feature_buffer))),
pos,
};
}
@@ -966,9 +997,9 @@ fn readEnum(comptime T: type, bytes: []const u8, pos: usize) struct { T, usize }
}
fn readLimits(bytes: []const u8, start_pos: usize) struct { std.wasm.Limits, usize } {
- const flags = bytes[start_pos];
+ const flags: std.wasm.Limits.Flags = @bitCast(bytes[start_pos]);
const min, const max_pos = readLeb(u32, bytes, start_pos + 1);
- const max, const end_pos = if (flags.has_max) readLeb(u32, bytes, max_pos) else .{ undefined, max_pos };
+ const max, const end_pos = if (flags.has_max) readLeb(u32, bytes, max_pos) else .{ 0, max_pos };
return .{ .{
.flags = flags,
.min = min,
@@ -977,7 +1008,7 @@ fn readLimits(bytes: []const u8, start_pos: usize) struct { std.wasm.Limits, usi
}
fn readInit(wasm: *Wasm, bytes: []const u8, pos: usize) !struct { Wasm.Expr, usize } {
- const end_pos = skipInit(bytes, pos); // one after the end opcode
+ const end_pos = try skipInit(bytes, pos); // one after the end opcode
return .{ try wasm.addExpr(bytes[pos..end_pos]), end_pos };
}
@@ -991,6 +1022,7 @@ fn skipInit(bytes: []const u8, pos: usize) !usize {
.global_get => readLeb(u32, bytes, pos + 1)[1],
else => return error.InvalidInitOpcode,
};
- if (readEnum(std.wasm.Opcode, bytes, end_pos) != .end) return error.InitExprMissingEnd;
- return end_pos + 1;
+ const op, const final_pos = readEnum(std.wasm.Opcode, bytes, end_pos);
+ if (op != .end) return error.InitExprMissingEnd;
+ return final_pos;
}
src/link/Wasm.zig
@@ -100,7 +100,7 @@ object_global_imports: std.AutoArrayHashMapUnmanaged(String, GlobalImport) = .em
object_globals: std.ArrayListUnmanaged(Global) = .empty,
/// All table imports for all objects.
-object_table_imports: std.ArrayListUnmanaged(TableImport) = .empty,
+object_table_imports: std.AutoArrayHashMapUnmanaged(String, TableImport) = .empty,
/// All parsed table sections for all objects.
object_tables: std.ArrayListUnmanaged(Table) = .empty,
@@ -109,12 +109,28 @@ object_memory_imports: std.ArrayListUnmanaged(MemoryImport) = .empty,
/// All parsed memory sections for all objects.
object_memories: std.ArrayListUnmanaged(std.wasm.Memory) = .empty,
+/// All relocations from all objects concatenated. `relocs_start` marks the end
+/// point of object relocations and start point of Zcu relocations.
+object_relocations: std.MultiArrayList(ObjectRelocation) = .empty,
+
/// List of initialization functions. These must be called in order of priority
/// by the (synthetic) __wasm_call_ctors function.
object_init_funcs: std.ArrayListUnmanaged(InitFunc) = .empty,
-/// All relocations from all objects concatenated. `relocs_start` marks the end
-/// point of object relocations and start point of Zcu relocations.
-relocations: std.MultiArrayList(Relocation) = .empty,
+
+/// Relocations to be emitted into an object file. Remains empty when not
+/// emitting an object file.
+out_relocs: std.MultiArrayList(OutReloc) = .empty,
+/// List of locations within `string_bytes` that must be patched with the virtual
+/// memory address of a Uav during `flush`.
+/// When emitting an object file, `out_relocs` is used instead.
+uav_fixups: std.ArrayListUnmanaged(UavFixup) = .empty,
+/// List of locations within `string_bytes` that must be patched with the virtual
+/// memory address of a Nav during `flush`.
+/// When emitting an object file, `out_relocs` is used instead.
+nav_fixups: std.ArrayListUnmanaged(NavFixup) = .empty,
+/// Symbols to be emitted into an object file. Remains empty when not emitting
+/// an object file.
+symbol_table: std.AutoArrayHashMapUnmanaged(String, void) = .empty,
/// Non-synthetic section that can essentially be mem-cpy'd into place after performing relocations.
object_data_segments: std.ArrayListUnmanaged(DataSegment) = .empty,
@@ -125,7 +141,7 @@ object_custom_segments: std.AutoArrayHashMapUnmanaged(ObjectSectionIndex, Custom
object_comdats: std.ArrayListUnmanaged(Comdat) = .empty,
/// A table that maps the relocations to be performed where the key represents
/// the section (across all objects) that the slice of relocations applies to.
-object_relocations_table: std.AutoArrayHashMapUnmanaged(ObjectSectionIndex, Relocation.Slice) = .empty,
+object_relocations_table: std.AutoArrayHashMapUnmanaged(ObjectSectionIndex, ObjectRelocation.Slice) = .empty,
/// Incremented across all objects in order to enable calculation of `ObjectSectionIndex` values.
object_total_sections: u32 = 0,
/// All comdat symbols from all objects concatenated.
@@ -151,7 +167,10 @@ dump_argv_list: std.ArrayListUnmanaged([]const u8),
preloaded_strings: PreloadedStrings,
-navs: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, Nav) = .empty,
+/// This field is used when emitting an object; `navs_exe` used otherwise.
+navs_obj: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, NavObj) = .empty,
+/// This field is unused when emitting an object; `navs_exe` used otherwise.
+navs_exe: std.AutoArrayHashMapUnmanaged(InternPool.Nav.Index, NavExe) = .empty,
zcu_funcs: std.AutoArrayHashMapUnmanaged(InternPool.Index, ZcuFunc) = .empty,
nav_exports: std.AutoArrayHashMapUnmanaged(NavExport, Zcu.Export.Index) = .empty,
uav_exports: std.AutoArrayHashMapUnmanaged(UavExport, Zcu.Export.Index) = .empty,
@@ -201,9 +220,14 @@ global_imports: std.AutoArrayHashMapUnmanaged(String, GlobalImportId) = .empty,
/// Ordered list of non-import tables that will appear in the final binary.
/// Empty until prelink.
tables: std.AutoArrayHashMapUnmanaged(TableImport.Resolution, void) = .empty,
-table_imports: std.AutoArrayHashMapUnmanaged(String, ObjectTableImportIndex) = .empty,
+table_imports: std.AutoArrayHashMapUnmanaged(String, TableImport.Index) = .empty,
any_exports_updated: bool = true,
+/// Set to true if any `GLOBAL_INDEX` relocation is encountered with
+/// `SymbolFlags.tls` set to true. This is for objects only; final
+/// value must be this OR'd with the same logic for zig functions
+/// (set to true if any threadlocal global is used).
+any_tls_relocs: bool = false,
/// All MIR instructions for all Zcu functions.
mir_instructions: std.MultiArrayList(Mir.Inst) = .{},
@@ -212,6 +236,18 @@ mir_extra: std.ArrayListUnmanaged(u32) = .empty,
/// All local types for all Zcu functions.
all_zcu_locals: std.ArrayListUnmanaged(u8) = .empty,
+pub const UavFixup = extern struct {
+ ip_index: InternPool.Index,
+ /// Index into `string_bytes`.
+ offset: u32,
+};
+
+pub const NavFixup = extern struct {
+ nav_index: InternPool.Nav.Index,
+ /// Index into `string_bytes`.
+ offset: u32,
+};
+
/// Index into `objects`.
pub const ObjectIndex = enum(u32) {
_,
@@ -300,6 +336,10 @@ pub const SourceLocation = enum(u32) {
};
}
+ pub fn fromObject(object_index: ObjectIndex, wasm: *const Wasm) SourceLocation {
+ return pack(.{ .object_index = object_index }, wasm);
+ }
+
pub fn addError(sl: SourceLocation, wasm: *Wasm, comptime f: []const u8, args: anytype) void {
const diags = &wasm.base.comp.link_diags;
switch (sl.unpack(wasm)) {
@@ -345,7 +385,7 @@ pub const SymbolFlags = packed struct(u32) {
// Above here matches the tooling conventions ABI.
- padding1: u8 = 0,
+ padding1: u5 = 0,
/// Zig-specific. Dead things are allowed to be garbage collected.
alive: bool = false,
/// Zig-specific. Segments only. Signals that the segment contains only
@@ -360,6 +400,12 @@ pub const SymbolFlags = packed struct(u32) {
alignment: Alignment = .none,
/// Zig-specific. Globals only.
global_type: Global.Type = .zero,
+ /// Zig-specific. Tables only.
+ limits_has_max: bool = false,
+ /// Zig-specific. Tables only.
+ limits_is_shared: bool = false,
+ /// Zig-specific. Tables only.
+ ref_type: RefType1 = .funcref,
pub const Binding = enum(u2) {
strong = 0,
@@ -378,13 +424,16 @@ pub const SymbolFlags = packed struct(u32) {
};
pub fn initZigSpecific(flags: *SymbolFlags, must_link: bool, no_strip: bool) void {
+ flags.no_strip = no_strip;
flags.alive = false;
flags.strings = false;
flags.must_link = must_link;
- flags.no_strip = no_strip;
+ flags.is_passive = false;
flags.alignment = .none;
flags.global_type = .zero;
- flags.is_passive = false;
+ flags.limits_has_max = false;
+ flags.limits_is_shared = false;
+ flags.ref_type = .funcref;
}
pub fn isIncluded(flags: SymbolFlags, is_dynamic: bool) bool {
@@ -427,11 +476,28 @@ pub const SymbolFlags = packed struct(u32) {
}
};
-pub const Nav = extern struct {
+pub const NavObj = extern struct {
code: DataSegment.Payload,
- relocs: Relocation.Slice,
+ /// Empty if not emitting an object.
+ relocs: OutReloc.Slice,
- pub const Code = DataSegment.Payload;
+ /// Index into `navs`.
+ /// Note that swapRemove is sometimes performed on `navs`.
+ pub const Index = enum(u32) {
+ _,
+
+ pub fn key(i: @This(), wasm: *const Wasm) *InternPool.Nav.Index {
+ return &wasm.navs_obj.keys()[@intFromEnum(i)];
+ }
+
+ pub fn value(i: @This(), wasm: *const Wasm) *NavObj {
+ return &wasm.navs_obj.values()[@intFromEnum(i)];
+ }
+ };
+};
+
+pub const NavExe = extern struct {
+ code: DataSegment.Payload,
/// Index into `navs`.
/// Note that swapRemove is sometimes performed on `navs`.
@@ -439,11 +505,11 @@ pub const Nav = extern struct {
_,
pub fn key(i: @This(), wasm: *const Wasm) *InternPool.Nav.Index {
- return &wasm.navs.keys()[@intFromEnum(i)];
+ return &wasm.navs_exe.keys()[@intFromEnum(i)];
}
- pub fn value(i: @This(), wasm: *const Wasm) *Nav {
- return &wasm.navs.values()[@intFromEnum(i)];
+ pub fn value(i: @This(), wasm: *const Wasm) *NavExe {
+ return &wasm.navs_exe.values()[@intFromEnum(i)];
}
};
};
@@ -506,7 +572,7 @@ pub const FunctionImport = extern struct {
__wasm_init_tls,
__zig_error_names,
// Next, index into `object_functions`.
- // Next, index into `navs`.
+ // Next, index into `navs_exe` or `navs_obj` depending on whether emitting an object.
_,
const first_object_function = @intFromEnum(Resolution.__zig_error_names) + 1;
@@ -519,7 +585,8 @@ pub const FunctionImport = extern struct {
__wasm_init_tls,
__zig_error_names,
object_function: ObjectFunctionIndex,
- nav: Nav.Index,
+ nav_exe: NavExe.Index,
+ nav_obj: NavExe.Index,
};
pub fn unpack(r: Resolution, wasm: *const Wasm) Unpacked {
@@ -535,8 +602,14 @@ pub const FunctionImport = extern struct {
const object_function_index = i - first_object_function;
if (object_function_index < wasm.object_functions.items.len)
return .{ .object_function = @enumFromInt(object_function_index) };
+ const comp = wasm.base.comp;
+ const is_obj = comp.config.output_mode == .Obj;
const nav_index = object_function_index - wasm.object_functions.items.len;
- return .{ .nav = @enumFromInt(nav_index) };
+ return if (is_obj) .{
+ .nav_obj = @enumFromInt(nav_index),
+ } else .{
+ .nav_exe = @enumFromInt(nav_index),
+ };
},
};
}
@@ -550,17 +623,24 @@ pub const FunctionImport = extern struct {
.__wasm_init_tls => .__wasm_init_tls,
.__zig_error_names => .__zig_error_names,
.object_function => |i| @enumFromInt(first_object_function + @intFromEnum(i)),
- .nav => |i| @enumFromInt(first_object_function + wasm.object_functions.items.len + @intFromEnum(i)),
+ .nav_obj => |i| @enumFromInt(first_object_function + wasm.object_functions.items.len + @intFromEnum(i)),
+ .nav_exe => |i| @enumFromInt(first_object_function + wasm.object_functions.items.len + @intFromEnum(i)),
};
}
pub fn fromIpNav(wasm: *const Wasm, ip_nav: InternPool.Nav.Index) Resolution {
- return pack(wasm, .{ .nav = @enumFromInt(wasm.navs.getIndex(ip_nav).?) });
+ const comp = wasm.base.comp;
+ const is_obj = comp.config.output_mode == .Obj;
+ return pack(wasm, if (is_obj) .{
+ .nav_obj = @enumFromInt(wasm.navs_obj.getIndex(ip_nav).?),
+ } else .{
+ .nav_exe = @enumFromInt(wasm.navs_exe.getIndex(ip_nav).?),
+ });
}
pub fn isNavOrUnresolved(r: Resolution, wasm: *const Wasm) bool {
return switch (r.unpack(wasm)) {
- .unresolved, .nav => true,
+ .unresolved, .nav_obj, .nav_exe => true,
else => false,
};
}
@@ -608,7 +688,7 @@ pub const GlobalImport = extern struct {
__tls_size,
__zig_error_name_table,
// Next, index into `object_globals`.
- // Next, index into `navs`.
+ // Next, index into `navs_obj` or `navs_exe` depending on whether emitting an object.
_,
const first_object_global = @intFromEnum(Resolution.__zig_error_name_table) + 1;
@@ -623,7 +703,8 @@ pub const GlobalImport = extern struct {
__tls_size,
__zig_error_name_table,
object_global: ObjectGlobalIndex,
- nav: Nav.Index,
+ nav_exe: NavExe.Index,
+ nav_obj: NavObj.Index,
};
pub fn unpack(r: Resolution, wasm: *const Wasm) Unpacked {
@@ -639,8 +720,14 @@ pub const GlobalImport = extern struct {
const object_global_index = i - first_object_global;
if (object_global_index < wasm.object_globals.items.len)
return .{ .object_global = @enumFromInt(object_global_index) };
+ const comp = wasm.base.comp;
+ const is_obj = comp.config.output_mode == .Obj;
const nav_index = object_global_index - wasm.object_globals.items.len;
- return .{ .nav = @enumFromInt(nav_index) };
+ return if (is_obj) .{
+ .nav_obj = @enumFromInt(nav_index),
+ } else .{
+ .nav_exe = @enumFromInt(nav_index),
+ };
},
};
}
@@ -656,12 +743,19 @@ pub const GlobalImport = extern struct {
.__tls_size => .__tls_size,
.__zig_error_name_table => .__zig_error_name_table,
.object_global => |i| @enumFromInt(first_object_global + @intFromEnum(i)),
- .nav => |i| @enumFromInt(first_object_global + wasm.object_globals.items.len + @intFromEnum(i)),
+ .nav_obj => |i| @enumFromInt(first_object_global + wasm.object_globals.items.len + @intFromEnum(i)),
+ .nav_exe => |i| @enumFromInt(first_object_global + wasm.object_globals.items.len + @intFromEnum(i)),
};
}
pub fn fromIpNav(wasm: *const Wasm, ip_nav: InternPool.Nav.Index) Resolution {
- return pack(wasm, .{ .nav = @enumFromInt(wasm.navs.getIndex(ip_nav).?) });
+ const comp = wasm.base.comp;
+ const is_obj = comp.config.output_mode == .Obj;
+ return pack(wasm, if (is_obj) .{
+ .nav_obj = @enumFromInt(wasm.navs_obj.getIndex(ip_nav).?),
+ } else .{
+ .nav_exe = @enumFromInt(wasm.navs_exe.getIndex(ip_nav).?),
+ });
}
};
@@ -717,11 +811,32 @@ pub const Global = extern struct {
};
};
+pub const RefType1 = enum(u1) {
+ funcref,
+ externref,
+
+ pub fn from(rt: std.wasm.RefType) RefType1 {
+ return switch (rt) {
+ .funcref => .funcref,
+ .externref => .externref,
+ };
+ }
+
+ pub fn to(rt: RefType1) std.wasm.RefType {
+ return switch (rt) {
+ .funcref => .funcref,
+ .externref => .externref,
+ };
+ }
+};
+
pub const TableImport = extern struct {
flags: SymbolFlags,
module_name: String,
source_location: SourceLocation,
resolution: Resolution,
+ limits_min: u32,
+ limits_max: u32,
/// Represents a synthetic table, or a table from an object.
pub const Resolution = enum(u32) {
@@ -730,35 +845,36 @@ pub const TableImport = extern struct {
// Next, index into `object_tables`.
_,
};
+
+ /// Index into `object_table_imports`.
+ pub const Index = enum(u32) {
+ _,
+
+ pub fn key(index: Index, wasm: *const Wasm) *String {
+ return &wasm.object_table_imports.keys()[@intFromEnum(index)];
+ }
+
+ pub fn value(index: Index, wasm: *const Wasm) *TableImport {
+ return &wasm.object_table_imports.values()[@intFromEnum(index)];
+ }
+ };
};
pub const Table = extern struct {
- module_name: String,
- name: String,
+ module_name: OptionalString,
+ name: OptionalString,
flags: SymbolFlags,
limits_min: u32,
limits_max: u32,
- limits_has_max: bool,
- limits_is_shared: bool,
- reftype: std.wasm.RefType,
- padding: [1]u8 = .{0},
};
-/// Uniquely identifies a section across all objects. Each Object has a section_start field.
-/// By subtracting that value from this one, the Object section index is obtained.
+/// Uniquely identifies a section across all objects. By subtracting
+/// `Object.local_section_index_base` from this one, the Object section index
+/// is obtained.
pub const ObjectSectionIndex = enum(u32) {
_,
};
-/// Index into `object_table_imports`.
-pub const ObjectTableImportIndex = enum(u32) {
- _,
-
- pub fn ptr(index: ObjectTableImportIndex, wasm: *const Wasm) *TableImport {
- return &wasm.object_table_imports.items[@intFromEnum(index)];
- }
-};
-
/// Index into `object_tables`.
pub const ObjectTableIndex = enum(u32) {
_,
@@ -820,13 +936,18 @@ pub const DataSegment = extern struct {
len: u32,
fn slice(p: DataSegment.Payload, wasm: *const Wasm) []const u8 {
+ assert(p.off != p.len);
return wasm.string_bytes.items[p.off..][0..p.len];
}
};
- /// Index into `object_data_segments`.
+ /// Index into `Wasm.object_data_segments`.
pub const Index = enum(u32) {
_,
+
+ pub fn ptr(i: Index, wasm: *const Wasm) *DataSegment {
+ return &wasm.object_data_segments.items[@intFromEnum(i)];
+ }
};
};
@@ -992,6 +1113,10 @@ pub const FunctionImportId = enum(u32) {
return .{ .zcu_import = @enumFromInt(zcu_import_i) };
}
+ pub fn fromObject(function_import_index: FunctionImport.Index, wasm: *const Wasm) FunctionImportId {
+ return pack(.{ .object_function_import = function_import_index }, wasm);
+ }
+
/// This function is allowed O(N) lookup because it is only called during
/// diagnostic generation.
pub fn sourceLocation(id: FunctionImportId, wasm: *const Wasm) SourceLocation {
@@ -1035,6 +1160,10 @@ pub const GlobalImportId = enum(u32) {
return .{ .zcu_import = @enumFromInt(zcu_import_i) };
}
+ pub fn fromObject(object_global_import: GlobalImport.Index, wasm: *const Wasm) GlobalImportId {
+ return pack(.{ .object_global_import = object_global_import }, wasm);
+ }
+
/// This function is allowed O(N) lookup because it is only called during
/// diagnostic generation.
pub fn sourceLocation(id: GlobalImportId, wasm: *const Wasm) SourceLocation {
@@ -1054,7 +1183,38 @@ pub const GlobalImportId = enum(u32) {
}
};
-pub const Relocation = struct {
+/// Index into `Wasm.symbol_table`.
+pub const SymbolTableIndex = enum(u32) {
+ _,
+
+ pub fn key(i: @This(), wasm: *const Wasm) *String {
+ return &wasm.symbol_table.keys()[@intFromEnum(i)];
+ }
+};
+
+pub const OutReloc = struct {
+ tag: ObjectRelocation.Tag,
+ offset: u32,
+ pointee: Pointee,
+ addend: i32,
+
+ pub const Pointee = union {
+ symbol_index: SymbolTableIndex,
+ type_index: FunctionType.Index,
+ };
+
+ pub const Slice = extern struct {
+ /// Index into `out_relocs`.
+ off: u32,
+ len: u32,
+
+ pub fn slice(s: Slice, wasm: *const Wasm) []OutReloc {
+ return wasm.relocations.items[s.off..][0..s.len];
+ }
+ };
+};
+
+pub const ObjectRelocation = struct {
tag: Tag,
/// Offset of the value to rewrite relative to the relevant section's contents.
/// When `offset` is zero, its position is immediately after the id and size of the section.
@@ -1067,8 +1227,6 @@ pub const Relocation = struct {
symbol_name: String,
type_index: FunctionType.Index,
section: ObjectSectionIndex,
- nav_index: InternPool.Nav.Index,
- uav_index: InternPool.Index,
};
pub const Slice = extern struct {
@@ -1076,7 +1234,7 @@ pub const Relocation = struct {
off: u32,
len: u32,
- pub fn slice(s: Slice, wasm: *const Wasm) []Relocation {
+ pub fn slice(s: Slice, wasm: *const Wasm) []ObjectRelocation {
return wasm.relocations.items[s.off..][0..s.len];
}
};
@@ -1122,11 +1280,6 @@ pub const Relocation = struct {
// Above here, the tags correspond to symbol table ABI described in
// https://github.com/WebAssembly/tool-conventions/blob/main/Linking.md
// Below, the tags are compiler-internal.
-
- /// Uses `nav_index`. 4 or 8 bytes depending on wasm32 or wasm64.
- nav_index,
- /// Uses `uav_index`. 4 or 8 bytes depending on wasm32 or wasm64.
- uav_index,
};
};
@@ -1456,7 +1609,8 @@ pub fn deinit(wasm: *Wasm) void {
const gpa = wasm.base.comp.gpa;
if (wasm.llvm_object) |llvm_object| llvm_object.deinit();
- wasm.navs.deinit(gpa);
+ wasm.navs_exe.deinit(gpa);
+ wasm.navs_obj.deinit(gpa);
wasm.zcu_funcs.deinit(gpa);
wasm.nav_exports.deinit(gpa);
wasm.uav_exports.deinit(gpa);
@@ -1478,7 +1632,7 @@ pub fn deinit(wasm: *Wasm) void {
wasm.object_tables.deinit(gpa);
wasm.object_memory_imports.deinit(gpa);
wasm.object_memories.deinit(gpa);
-
+ wasm.object_relocations.deinit(gpa);
wasm.object_data_segments.deinit(gpa);
wasm.object_custom_segments.deinit(gpa);
wasm.object_init_funcs.deinit(gpa);
@@ -1492,8 +1646,13 @@ pub fn deinit(wasm: *Wasm) void {
wasm.function_imports.deinit(gpa);
wasm.functions.deinit(gpa);
wasm.globals.deinit(gpa);
+ wasm.global_exports.deinit(gpa);
wasm.global_imports.deinit(gpa);
wasm.table_imports.deinit(gpa);
+ wasm.symbol_table.deinit(gpa);
+ wasm.out_relocs.deinit(gpa);
+ wasm.uav_fixups.deinit(gpa);
+ wasm.nav_fixups.deinit(gpa);
wasm.string_bytes.deinit(gpa);
wasm.string_table.deinit(gpa);
@@ -1527,7 +1686,9 @@ pub fn updateNav(wasm: *Wasm, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index
const zcu = pt.zcu;
const ip = &zcu.intern_pool;
const nav = ip.getNav(nav_index);
- const gpa = wasm.base.comp.gpa;
+ const comp = wasm.base.comp;
+ const gpa = comp.gpa;
+ const is_obj = comp.config.output_mode == .Obj;
const nav_val = zcu.navValue(nav_index);
const is_extern, const nav_init = switch (ip.indexToKey(nav_val.toIntern())) {
@@ -1542,22 +1703,26 @@ pub fn updateNav(wasm: *Wasm, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index
if (!nav_init.typeOf(zcu).hasRuntimeBits(zcu)) {
_ = wasm.imports.swapRemove(nav_index);
- if (wasm.navs.swapRemove(nav_index)) {
- @panic("TODO reclaim resources");
+ if (is_obj) {
+ if (wasm.navs_obj.swapRemove(nav_index)) @panic("TODO reclaim resources");
+ } else {
+ if (wasm.navs_exe.swapRemove(nav_index)) @panic("TODO reclaim resources");
}
return;
}
if (is_extern) {
try wasm.imports.put(gpa, nav_index, {});
- if (wasm.navs.swapRemove(nav_index)) {
- @panic("TODO reclaim resources");
+ if (is_obj) {
+ if (wasm.navs_obj.swapRemove(nav_index)) @panic("TODO reclaim resources");
+ } else {
+ if (wasm.navs_exe.swapRemove(nav_index)) @panic("TODO reclaim resources");
}
return;
}
const code_start: u32 = @intCast(wasm.string_bytes.items.len);
- const relocs_start: u32 = @intCast(wasm.relocations.len);
+ const relocs_start: u32 = @intCast(wasm.out_relocs.len);
wasm.string_bytes_lock.lock();
try codegen.generateSymbol(
@@ -1570,15 +1735,45 @@ pub fn updateNav(wasm: *Wasm, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index
);
const code_len: u32 = @intCast(wasm.string_bytes.items.len - code_start);
- const relocs_len: u32 = @intCast(wasm.relocations.len - relocs_start);
+ const relocs_len: u32 = @intCast(wasm.out_relocs.len - relocs_start);
wasm.string_bytes_lock.unlock();
- const code: Nav.Code = .{
+ const naive_code: DataSegment.Payload = .{
.off = code_start,
.len = code_len,
};
- const gop = try wasm.navs.getOrPut(gpa, nav_index);
+ // Only nonzero init values need to take up space in the output.
+ const all_zeroes = std.mem.allEqual(u8, naive_code.slice(wasm), 0);
+ const code: DataSegment.Payload = if (!all_zeroes) naive_code else c: {
+ wasm.string_bytes.shrinkRetainingCapacity(code_start);
+ // Indicate empty by making off and len the same value, however, still
+ // transmit the data size by using the size as that value.
+ break :c .{
+ .off = naive_code.len,
+ .len = naive_code.len,
+ };
+ };
+
+ if (is_obj) {
+ const gop = try wasm.navs_obj.getOrPut(gpa, nav_index);
+ if (gop.found_existing) {
+ @panic("TODO reuse these resources");
+ } else {
+ _ = wasm.imports.swapRemove(nav_index);
+ }
+ gop.value_ptr.* = .{
+ .code = code,
+ .relocs = .{
+ .off = relocs_start,
+ .len = relocs_len,
+ },
+ };
+ }
+
+ assert(relocs_len == 0);
+
+ const gop = try wasm.navs_exe.getOrPut(gpa, nav_index);
if (gop.found_existing) {
@panic("TODO reuse these resources");
} else {
@@ -1586,10 +1781,6 @@ pub fn updateNav(wasm: *Wasm, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index
}
gop.value_ptr.* = .{
.code = code,
- .relocs = .{
- .off = relocs_start,
- .len = relocs_len,
- },
};
}
@@ -1705,6 +1896,12 @@ pub fn prelink(wasm: *Wasm, prog_node: std.Progress.Node) link.File.FlushError!v
continue;
}
}
+ if (wasm.object_table_imports.getPtr(exp_name_interned)) |import| {
+ if (import.resolution != .unresolved) {
+ import.flags.exported = true;
+ continue;
+ }
+ }
try missing_exports.put(gpa, exp_name_interned, {});
}
wasm.missing_exports_init = try gpa.dupe(String, missing_exports.keys());
@@ -1724,32 +1921,28 @@ pub fn prelink(wasm: *Wasm, prog_node: std.Progress.Node) link.File.FlushError!v
for (wasm.object_function_imports.keys(), wasm.object_function_imports.values(), 0..) |name, *import, i| {
if (import.flags.isIncluded(rdynamic)) {
try markFunction(wasm, name, import, @enumFromInt(i));
- continue;
}
}
wasm.functions_len = @intCast(wasm.functions.entries.len);
wasm.function_imports_init_keys = try gpa.dupe(String, wasm.function_imports.keys());
- wasm.function_imports_init_vals = try gpa.dupe(FunctionImportId, wasm.function_imports.vals());
+ wasm.function_imports_init_vals = try gpa.dupe(FunctionImportId, wasm.function_imports.values());
wasm.function_exports_len = @intCast(wasm.function_exports.items.len);
for (wasm.object_global_imports.keys(), wasm.object_global_imports.values(), 0..) |name, *import, i| {
if (import.flags.isIncluded(rdynamic)) {
try markGlobal(wasm, name, import, @enumFromInt(i));
- continue;
}
}
- wasm.globals_len = @intCast(wasm.globals.items.len);
+ wasm.globals_len = @intCast(wasm.globals.entries.len);
wasm.global_imports_init_keys = try gpa.dupe(String, wasm.global_imports.keys());
wasm.global_imports_init_vals = try gpa.dupe(GlobalImportId, wasm.global_imports.values());
wasm.global_exports_len = @intCast(wasm.global_exports.items.len);
- for (wasm.object_table_imports.items, 0..) |*import, i| {
+ for (wasm.object_table_imports.keys(), wasm.object_table_imports.values(), 0..) |name, *import, i| {
if (import.flags.isIncluded(rdynamic)) {
- try markTable(wasm, import.name, import, @enumFromInt(i));
- continue;
+ try markTable(wasm, name, import, @enumFromInt(i));
}
}
- wasm.tables_len = @intCast(wasm.tables.items.len);
}
/// Recursively mark alive everything referenced by the function.
@@ -1758,7 +1951,7 @@ fn markFunction(
name: String,
import: *FunctionImport,
func_index: FunctionImport.Index,
-) error{OutOfMemory}!void {
+) Allocator.Error!void {
if (import.flags.alive) return;
import.flags.alive = true;
@@ -1783,15 +1976,15 @@ fn markFunction(
import.resolution = .__wasm_init_tls;
wasm.functions.putAssumeCapacity(.__wasm_init_tls, {});
} else {
- try wasm.function_imports.put(gpa, name, .fromObject(func_index));
+ try wasm.function_imports.put(gpa, name, .fromObject(func_index, wasm));
}
} else {
const gop = wasm.functions.getOrPutAssumeCapacity(import.resolution);
if (!is_obj and import.flags.isExported(rdynamic))
- try wasm.function_exports.append(gpa, @intCast(gop.index));
+ try wasm.function_exports.append(gpa, @enumFromInt(gop.index));
- for (wasm.functionResolutionRelocSlice(import.resolution)) |reloc|
+ for (try wasm.functionResolutionRelocSlice(import.resolution)) |reloc|
try wasm.markReloc(reloc);
}
}
@@ -1833,15 +2026,15 @@ fn markGlobal(
import.resolution = .__tls_size;
wasm.globals.putAssumeCapacity(.__tls_size, {});
} else {
- try wasm.global_imports.put(gpa, name, .fromObject(global_index));
+ try wasm.global_imports.put(gpa, name, .fromObject(global_index, wasm));
}
} else {
const gop = wasm.globals.getOrPutAssumeCapacity(import.resolution);
if (!is_obj and import.flags.isExported(rdynamic))
- try wasm.global_exports.append(gpa, @intCast(gop.index));
+ try wasm.global_exports.append(gpa, @enumFromInt(gop.index));
- for (wasm.globalResolutionRelocSlice(import.resolution)) |reloc|
+ for (try wasm.globalResolutionRelocSlice(import.resolution)) |reloc|
try wasm.markReloc(reloc);
}
}
@@ -1850,7 +2043,7 @@ fn markTable(
wasm: *Wasm,
name: String,
import: *TableImport,
- table_index: ObjectTableImportIndex,
+ table_index: TableImport.Index,
) !void {
if (import.flags.alive) return;
import.flags.alive = true;
@@ -1865,7 +2058,7 @@ fn markTable(
import.resolution = .__indirect_function_table;
wasm.tables.putAssumeCapacity(.__indirect_function_table, {});
} else {
- try wasm.table_imports.put(gpa, name, .fromObject(table_index));
+ try wasm.table_imports.put(gpa, name, table_index);
}
} else {
wasm.tables.putAssumeCapacity(import.resolution, {});
@@ -1873,18 +2066,24 @@ fn markTable(
}
}
-fn globalResolutionRelocSlice(wasm: *Wasm, resolution: GlobalImport.Resolution) ![]const Relocation {
- assert(resolution != .none);
+fn globalResolutionRelocSlice(wasm: *Wasm, resolution: GlobalImport.Resolution) ![]const ObjectRelocation {
+ assert(resolution != .unresolved);
_ = wasm;
@panic("TODO");
}
-fn functionResolutionRelocSlice(wasm: *Wasm, resolution: FunctionImport.Resolution) ![]const Relocation {
- assert(resolution != .none);
+fn functionResolutionRelocSlice(wasm: *Wasm, resolution: FunctionImport.Resolution) ![]const ObjectRelocation {
+ assert(resolution != .unresolved);
_ = wasm;
@panic("TODO");
}
+fn markReloc(wasm: *Wasm, reloc: ObjectRelocation) !void {
+ _ = wasm;
+ _ = reloc;
+ @panic("TODO");
+}
+
pub fn flushModule(
wasm: *Wasm,
arena: Allocator,
@@ -2349,8 +2548,10 @@ fn defaultEntrySymbolName(
};
}
-pub fn internString(wasm: *Wasm, bytes: []const u8) error{OutOfMemory}!String {
+pub fn internString(wasm: *Wasm, bytes: []const u8) Allocator.Error!String {
assert(mem.indexOfScalar(u8, bytes, 0) == null);
+ wasm.string_bytes_lock.lock();
+ defer wasm.string_bytes_lock.unlock();
const gpa = wasm.base.comp.gpa;
const gop = try wasm.string_table.getOrPutContextAdapted(
gpa,
@@ -2371,6 +2572,13 @@ pub fn internString(wasm: *Wasm, bytes: []const u8) error{OutOfMemory}!String {
return new_off;
}
+// TODO implement instead by appending to string_bytes
+pub fn internStringFmt(wasm: *Wasm, comptime format: []const u8, args: anytype) Allocator.Error!String {
+ var buffer: [32]u8 = undefined;
+ const slice = std.fmt.bufPrint(&buffer, format, args) catch unreachable;
+ return internString(wasm, slice);
+}
+
pub fn getExistingString(wasm: *const Wasm, bytes: []const u8) ?String {
assert(mem.indexOfScalar(u8, bytes, 0) == null);
return wasm.string_table.getKeyAdapted(bytes, @as(String.TableIndexAdapter, .{
@@ -2378,17 +2586,17 @@ pub fn getExistingString(wasm: *const Wasm, bytes: []const u8) ?String {
}));
}
-pub fn internValtypeList(wasm: *Wasm, valtype_list: []const std.wasm.Valtype) error{OutOfMemory}!ValtypeList {
+pub fn internValtypeList(wasm: *Wasm, valtype_list: []const std.wasm.Valtype) Allocator.Error!ValtypeList {
return .fromString(try internString(wasm, @ptrCast(valtype_list)));
}
-pub fn addFuncType(wasm: *Wasm, ft: FunctionType) error{OutOfMemory}!FunctionType.Index {
+pub fn addFuncType(wasm: *Wasm, ft: FunctionType) Allocator.Error!FunctionType.Index {
const gpa = wasm.base.comp.gpa;
const gop = try wasm.func_types.getOrPut(gpa, ft);
return @enumFromInt(gop.index);
}
-pub fn addExpr(wasm: *Wasm, bytes: []const u8) error{OutOfMemory}!Expr {
+pub fn addExpr(wasm: *Wasm, bytes: []const u8) Allocator.Error!Expr {
const gpa = wasm.base.comp.gpa;
// We can't use string table deduplication here since these expressions can
// have null bytes in them however it may be interesting to explore since
@@ -2398,8 +2606,29 @@ pub fn addExpr(wasm: *Wasm, bytes: []const u8) error{OutOfMemory}!Expr {
return @enumFromInt(wasm.string_bytes.items.len - bytes.len);
}
-pub fn addRelocatableDataPayload(wasm: *Wasm, bytes: []const u8) error{OutOfMemory}!DataSegment.Payload {
+pub fn addRelocatableDataPayload(wasm: *Wasm, bytes: []const u8) Allocator.Error!DataSegment.Payload {
const gpa = wasm.base.comp.gpa;
try wasm.string_bytes.appendSlice(gpa, bytes);
return @enumFromInt(wasm.string_bytes.items.len - bytes.len);
}
+
+pub fn uavSymbolIndex(wasm: *Wasm, ip_index: InternPool.Index) Allocator.Error!SymbolTableIndex {
+ const comp = wasm.base.comp;
+ assert(comp.config.output_mode == .Obj);
+ const gpa = comp.gpa;
+ const name = try wasm.internStringFmt("__anon_{d}", .{@intFromEnum(ip_index)});
+ const gop = try wasm.symbol_table.getOrPut(gpa, name);
+ return @enumFromInt(gop.index);
+}
+
+pub fn navSymbolIndex(wasm: *Wasm, nav_index: InternPool.Nav.Index) Allocator.Error!SymbolTableIndex {
+ const comp = wasm.base.comp;
+ assert(comp.config.output_mode == .Obj);
+ const zcu = comp.zcu.?;
+ const ip = &zcu.intern_pool;
+ const gpa = comp.gpa;
+ const nav = ip.getNav(nav_index);
+ const name = try wasm.internString(nav.fqn.toSlice(ip));
+ const gop = try wasm.symbol_table.getOrPut(gpa, name);
+ return @enumFromInt(gop.index);
+}
src/codegen.zig
@@ -2,7 +2,6 @@ const std = @import("std");
const build_options = @import("build_options");
const builtin = @import("builtin");
const assert = std.debug.assert;
-const leb128 = std.leb;
const link = @import("link.zig");
const log = std.log.scoped(.codegen);
const mem = std.mem;
@@ -643,15 +642,19 @@ fn lowerUavRef(
const zcu = pt.zcu;
const gpa = zcu.gpa;
const ip = &zcu.intern_pool;
- const target = lf.comp.root_mod.resolved_target.result;
-
+ const comp = lf.comp;
+ const target = &comp.root_mod.resolved_target.result;
const ptr_width_bytes = @divExact(target.ptrBitWidth(), 8);
+ const is_obj = comp.config.output_mode == .Obj;
const uav_val = uav.val;
const uav_ty = Type.fromInterned(ip.typeOf(uav_val));
- log.debug("lowerUavRef: ty = {}", .{uav_ty.fmt(pt)});
const is_fn_body = uav_ty.zigTypeTag(zcu) == .@"fn";
+
+ log.debug("lowerUavRef: ty = {}", .{uav_ty.fmt(pt)});
+ try code.ensureUnusedCapacity(gpa, ptr_width_bytes);
+
if (!is_fn_body and !uav_ty.hasRuntimeBits(zcu)) {
- try code.appendNTimes(gpa, 0xaa, ptr_width_bytes);
+ code.appendNTimesAssumeCapacity(0xaa, ptr_width_bytes);
return;
}
@@ -663,13 +666,20 @@ fn lowerUavRef(
dev.check(link.File.Tag.wasm.devFeature());
const wasm = lf.cast(.wasm).?;
assert(reloc_parent == .none);
- try wasm.relocations.append(gpa, .{
- .tag = .uav_index,
- .addend = @intCast(offset),
- .offset = @intCast(code.items.len),
- .pointee = .{ .uav_index = uav.val },
- });
- try code.appendNTimes(gpa, 0, ptr_width_bytes);
+ if (is_obj) {
+ try wasm.out_relocs.append(gpa, .{
+ .offset = @intCast(code.items.len),
+ .pointee = .{ .symbol_index = try wasm.uavSymbolIndex(uav.val) },
+ .tag = if (ptr_width_bytes == 4) .MEMORY_ADDR_I32 else .MEMORY_ADDR_I64,
+ .addend = @intCast(offset),
+ });
+ } else {
+ try wasm.uav_fixups.append(gpa, .{
+ .ip_index = uav.val,
+ .offset = @intCast(code.items.len),
+ });
+ }
+ code.appendNTimesAssumeCapacity(0, ptr_width_bytes);
return;
},
else => {},
@@ -688,9 +698,9 @@ fn lowerUavRef(
});
const endian = target.cpu.arch.endian();
switch (ptr_width_bytes) {
- 2 => mem.writeInt(u16, try code.addManyAsArray(gpa, 2), @intCast(vaddr), endian),
- 4 => mem.writeInt(u32, try code.addManyAsArray(gpa, 4), @intCast(vaddr), endian),
- 8 => mem.writeInt(u64, try code.addManyAsArray(gpa, 8), vaddr, endian),
+ 2 => mem.writeInt(u16, code.addManyAsArrayAssumeCapacity(2), @intCast(vaddr), endian),
+ 4 => mem.writeInt(u32, code.addManyAsArrayAssumeCapacity(4), @intCast(vaddr), endian),
+ 8 => mem.writeInt(u64, code.addManyAsArrayAssumeCapacity(8), vaddr, endian),
else => unreachable,
}
}
@@ -709,12 +719,15 @@ fn lowerNavRef(
const gpa = zcu.gpa;
const ip = &zcu.intern_pool;
const target = zcu.navFileScope(nav_index).mod.resolved_target.result;
-
const ptr_width_bytes = @divExact(target.ptrBitWidth(), 8);
+ const is_obj = lf.comp.config.output_mode == .Obj;
const nav_ty = Type.fromInterned(ip.getNav(nav_index).typeOf(ip));
const is_fn_body = nav_ty.zigTypeTag(zcu) == .@"fn";
+
+ try code.ensureUnusedCapacity(gpa, ptr_width_bytes);
+
if (!is_fn_body and !nav_ty.hasRuntimeBits(zcu)) {
- try code.appendNTimes(gpa, 0xaa, ptr_width_bytes);
+ code.appendNTimesAssumeCapacity(0xaa, ptr_width_bytes);
return;
}
@@ -726,13 +739,20 @@ fn lowerNavRef(
dev.check(link.File.Tag.wasm.devFeature());
const wasm = lf.cast(.wasm).?;
assert(reloc_parent == .none);
- try wasm.relocations.append(gpa, .{
- .tag = .nav_index,
- .addend = @intCast(offset),
- .offset = @intCast(code.items.len),
- .pointee = .{ .nav_index = nav_index },
- });
- try code.appendNTimes(gpa, 0, ptr_width_bytes);
+ if (is_obj) {
+ try wasm.out_relocs.append(gpa, .{
+ .offset = @intCast(code.items.len),
+ .pointee = .{ .symbol_index = try wasm.navSymbolIndex(nav_index) },
+ .tag = if (ptr_width_bytes == 4) .MEMORY_ADDR_I32 else .MEMORY_ADDR_I64,
+ .addend = @intCast(offset),
+ });
+ } else {
+ try wasm.nav_fixups.append(gpa, .{
+ .nav_index = nav_index,
+ .offset = @intCast(code.items.len),
+ });
+ }
+ code.appendNTimesAssumeCapacity(0, ptr_width_bytes);
return;
},
else => {},
@@ -745,9 +765,9 @@ fn lowerNavRef(
}) catch @panic("TODO rework getNavVAddr");
const endian = target.cpu.arch.endian();
switch (ptr_width_bytes) {
- 2 => mem.writeInt(u16, try code.addManyAsArray(gpa, 2), @intCast(vaddr), endian),
- 4 => mem.writeInt(u32, try code.addManyAsArray(gpa, 4), @intCast(vaddr), endian),
- 8 => mem.writeInt(u64, try code.addManyAsArray(gpa, 8), vaddr, endian),
+ 2 => mem.writeInt(u16, code.addManyAsArrayAssumeCapacity(2), @intCast(vaddr), endian),
+ 4 => mem.writeInt(u32, code.addManyAsArrayAssumeCapacity(4), @intCast(vaddr), endian),
+ 8 => mem.writeInt(u64, code.addManyAsArrayAssumeCapacity(8), vaddr, endian),
else => unreachable,
}
}