Commit 13eef2f8ab
src/link/Wasm/Flush.zig
@@ -19,6 +19,9 @@ const leb = std.leb;
const log = std.log.scoped(.link);
const assert = std.debug.assert;
+/// Ordered list of data segments that will appear in the final binary.
+/// When sorted, to-be-merged segments will be made adjacent.
+/// Values are offset relative to segment start.
data_segments: std.AutoArrayHashMapUnmanaged(Wasm.DataSegment.Id, u32) = .empty,
/// Each time a `data_segment` offset equals zero it indicates a new group, and
/// the next element in this array will contain the total merged segment size.
@@ -35,8 +38,9 @@ indirect_function_table: std.AutoArrayHashMapUnmanaged(Wasm.OutputFunctionIndex,
memory_layout_finished: bool = false,
pub fn clear(f: *Flush) void {
- f.binary_bytes.clearRetainingCapacity();
+ f.data_segments.clearRetainingCapacity();
f.data_segment_groups.clearRetainingCapacity();
+ f.binary_bytes.clearRetainingCapacity();
f.indirect_function_table.clearRetainingCapacity();
f.memory_layout_finished = false;
}
@@ -138,12 +142,30 @@ pub fn finish(f: *Flush, wasm: *Wasm) !void {
// Merge and order the data segments. Depends on garbage collection so that
// unused segments can be omitted.
- try f.data_segments.ensureUnusedCapacity(gpa, wasm.object_data_segments.items.len + 1);
+ try f.data_segments.ensureUnusedCapacity(gpa, wasm.object_data_segments.items.len +
+ wasm.uavs_obj.entries.len + wasm.navs_obj.entries.len +
+ wasm.uavs_exe.entries.len + wasm.navs_exe.entries.len + 1);
+ if (is_obj) assert(wasm.uavs_exe.entries.len == 0);
+ if (is_obj) assert(wasm.navs_exe.entries.len == 0);
+ if (!is_obj) assert(wasm.uavs_obj.entries.len == 0);
+ if (!is_obj) assert(wasm.navs_obj.entries.len == 0);
+ for (0..wasm.uavs_obj.entries.len) |uavs_index| f.data_segments.putAssumeCapacityNoClobber(.pack(wasm, .{
+ .uav_obj = @enumFromInt(uavs_index),
+ }), @as(u32, undefined));
+ for (0..wasm.navs_obj.entries.len) |navs_index| f.data_segments.putAssumeCapacityNoClobber(.pack(wasm, .{
+ .nav_obj = @enumFromInt(navs_index),
+ }), @as(u32, undefined));
+ for (0..wasm.uavs_exe.entries.len) |uavs_index| f.data_segments.putAssumeCapacityNoClobber(.pack(wasm, .{
+ .uav_exe = @enumFromInt(uavs_index),
+ }), @as(u32, undefined));
+ for (0..wasm.navs_exe.entries.len) |navs_index| f.data_segments.putAssumeCapacityNoClobber(.pack(wasm, .{
+ .nav_exe = @enumFromInt(navs_index),
+ }), @as(u32, undefined));
for (wasm.object_data_segments.items, 0..) |*ds, i| {
if (!ds.flags.alive) continue;
const data_segment_index: Wasm.ObjectDataSegmentIndex = @enumFromInt(i);
any_passive_inits = any_passive_inits or ds.flags.is_passive or (import_memory and !wasm.isBss(ds.name));
- f.data_segments.putAssumeCapacityNoClobber(.pack(wasm, .{
+ _ = f.data_segments.putAssumeCapacityNoClobber(.pack(wasm, .{
.object = data_segment_index,
}), @as(u32, undefined));
}
@@ -644,6 +666,8 @@ pub fn finish(f: *Flush, wasm: *Wasm) !void {
const code_start = try reserveSize(gpa, binary_bytes);
defer replaceSize(binary_bytes, code_start);
+ log.debug("lowering function code for '{s}'", .{resolution.name(wasm).?});
+
try i.value(wasm).function.lower(wasm, binary_bytes);
},
};
src/link/Wasm.zig
@@ -235,11 +235,6 @@ global_imports: std.AutoArrayHashMapUnmanaged(String, GlobalImportId) = .empty,
tables: std.AutoArrayHashMapUnmanaged(TableImport.Resolution, void) = .empty,
table_imports: std.AutoArrayHashMapUnmanaged(String, TableImport.Index) = .empty,
-/// Ordered list of data segments that will appear in the final binary.
-/// When sorted, to-be-merged segments will be made adjacent.
-/// Values are offset relative to segment start.
-data_segments: std.AutoArrayHashMapUnmanaged(Wasm.DataSegment.Id, void) = .empty,
-
error_name_table_ref_count: u32 = 0,
/// Set to true if any `GLOBAL_INDEX` relocation is encountered with
@@ -2360,7 +2355,6 @@ pub fn deinit(wasm: *Wasm) void {
wasm.global_exports.deinit(gpa);
wasm.global_imports.deinit(gpa);
wasm.table_imports.deinit(gpa);
- wasm.data_segments.deinit(gpa);
wasm.symbol_table.deinit(gpa);
wasm.out_relocs.deinit(gpa);
wasm.uav_fixups.deinit(gpa);
@@ -2416,13 +2410,13 @@ pub fn updateNav(wasm: *Wasm, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index
const gpa = comp.gpa;
const is_obj = comp.config.output_mode == .Obj;
- const nav_init = switch (ip.indexToKey(nav.status.resolved.val)) {
+ const nav_init, const chased_nav_index = switch (ip.indexToKey(nav.status.resolved.val)) {
.func => return, // global const which is a function alias
.@"extern" => |ext| {
if (is_obj) {
- assert(!wasm.navs_obj.contains(nav_index));
+ assert(!wasm.navs_obj.contains(ext.owner_nav));
} else {
- assert(!wasm.navs_exe.contains(nav_index));
+ assert(!wasm.navs_exe.contains(ext.owner_nav));
}
const name = try wasm.internString(ext.name.toSlice(ip));
if (ext.lib_name.toSlice(ip)) |ext_name| _ = try wasm.internString(ext_name);
@@ -2436,27 +2430,28 @@ pub fn updateNav(wasm: *Wasm, pt: Zcu.PerThread, nav_index: InternPool.Nav.Index
}
return;
},
- .variable => |variable| variable.init,
- else => nav.status.resolved.val,
+ .variable => |variable| .{ variable.init, variable.owner_nav },
+ else => .{ nav.status.resolved.val, nav_index },
};
- assert(!wasm.imports.contains(nav_index));
+ log.debug("updateNav {} {}", .{ nav.fqn.fmt(ip), chased_nav_index });
+ assert(!wasm.imports.contains(chased_nav_index));
if (nav_init != .none and !Value.fromInterned(nav_init).typeOf(zcu).hasRuntimeBits(zcu)) {
if (is_obj) {
- assert(!wasm.navs_obj.contains(nav_index));
+ assert(!wasm.navs_obj.contains(chased_nav_index));
} else {
- assert(!wasm.navs_exe.contains(nav_index));
+ assert(!wasm.navs_exe.contains(chased_nav_index));
}
return;
}
if (is_obj) {
const zcu_data_starts: ZcuDataStarts = .initObj(wasm);
- _ = try refNavObj(wasm, nav_index); // Possibly creates an entry in `Wasm.navs_obj`.
+ _ = try refNavObj(wasm, chased_nav_index); // Possibly creates an entry in `Wasm.navs_obj`.
try zcu_data_starts.finishObj(wasm, pt);
} else {
const zcu_data_starts: ZcuDataStarts = .initExe(wasm);
- _ = try refNavExe(wasm, nav_index); // Possibly creates an entry in `Wasm.navs_exe`.
+ _ = try refNavExe(wasm, chased_nav_index); // Possibly creates an entry in `Wasm.navs_exe`.
try zcu_data_starts.finishExe(wasm, pt);
}
}
@@ -2820,12 +2815,8 @@ pub fn flushModule(
const globals_end_zcu: u32 = @intCast(wasm.globals.entries.len);
defer wasm.globals.shrinkRetainingCapacity(globals_end_zcu);
- const data_segments_end_zcu: u32 = @intCast(wasm.data_segments.entries.len);
- defer wasm.data_segments.shrinkRetainingCapacity(data_segments_end_zcu);
-
wasm.flush_buffer.clear();
try wasm.flush_buffer.missing_exports.reinit(gpa, wasm.missing_exports.keys(), &.{});
- try wasm.flush_buffer.data_segments.reinit(gpa, wasm.data_segments.keys(), &.{});
try wasm.flush_buffer.function_imports.reinit(gpa, wasm.function_imports.keys(), wasm.function_imports.values());
try wasm.flush_buffer.global_imports.reinit(gpa, wasm.global_imports.keys(), wasm.global_imports.values());
@@ -3427,16 +3418,13 @@ pub fn refUavObj(wasm: *Wasm, ip_index: InternPool.Index) !UavsObjIndex {
const comp = wasm.base.comp;
const gpa = comp.gpa;
assert(comp.config.output_mode == .Obj);
- try wasm.data_segments.ensureUnusedCapacity(gpa, 1);
const gop = try wasm.uavs_obj.getOrPut(gpa, ip_index);
if (!gop.found_existing) gop.value_ptr.* = .{
// Lowering the value is delayed to avoid recursion.
.code = undefined,
.relocs = undefined,
};
- const uav_index: UavsObjIndex = @enumFromInt(gop.index);
- wasm.data_segments.putAssumeCapacity(.pack(wasm, .{ .uav_obj = uav_index }), {});
- return uav_index;
+ return @enumFromInt(gop.index);
}
pub fn refUavExe(wasm: *Wasm, ip_index: InternPool.Index) !UavsExeIndex {
@@ -3453,9 +3441,7 @@ pub fn refUavExe(wasm: *Wasm, ip_index: InternPool.Index) !UavsExeIndex {
.count = 1,
};
}
- const uav_index: UavsExeIndex = @enumFromInt(gop.index);
- try wasm.data_segments.put(gpa, .pack(wasm, .{ .uav_exe = uav_index }), {});
- return uav_index;
+ return @enumFromInt(gop.index);
}
pub fn refNavObj(wasm: *Wasm, nav_index: InternPool.Nav.Index) !NavsObjIndex {
@@ -3468,9 +3454,7 @@ pub fn refNavObj(wasm: *Wasm, nav_index: InternPool.Nav.Index) !NavsObjIndex {
.code = undefined,
.relocs = undefined,
};
- const navs_obj_index: NavsObjIndex = @enumFromInt(gop.index);
- try wasm.data_segments.put(gpa, .pack(wasm, .{ .nav_obj = navs_obj_index }), {});
- return navs_obj_index;
+ return @enumFromInt(gop.index);
}
pub fn refNavExe(wasm: *Wasm, nav_index: InternPool.Nav.Index) !NavsExeIndex {
@@ -3487,9 +3471,7 @@ pub fn refNavExe(wasm: *Wasm, nav_index: InternPool.Nav.Index) !NavsExeIndex {
.count = 0,
};
}
- const navs_exe_index: NavsExeIndex = @enumFromInt(gop.index);
- try wasm.data_segments.put(gpa, .pack(wasm, .{ .nav_exe = navs_exe_index }), {});
- return navs_exe_index;
+ return @enumFromInt(gop.index);
}
/// Asserts it is called after `Flush.data_segments` is fully populated and sorted.
@@ -3506,7 +3488,9 @@ pub fn navAddr(wasm: *Wasm, nav_index: InternPool.Nav.Index) u32 {
assert(wasm.flush_buffer.memory_layout_finished);
const comp = wasm.base.comp;
assert(comp.config.output_mode != .Obj);
- const ds_id: DataSegment.Id = .pack(wasm, .{ .nav_exe = @enumFromInt(wasm.navs_exe.getIndex(nav_index).?) });
+ const navs_exe_index: NavsExeIndex = @enumFromInt(wasm.navs_exe.getIndex(nav_index).?);
+ log.debug("navAddr {s} {}", .{ navs_exe_index.name(wasm), nav_index });
+ const ds_id: DataSegment.Id = .pack(wasm, .{ .nav_exe = navs_exe_index });
return wasm.flush_buffer.data_segments.get(ds_id).?;
}