Commit e62bb1d689
Changed files (1)
src
arch
wasm
src/arch/wasm/CodeGen.zig
@@ -582,7 +582,7 @@ pub const Result = union(enum) {
};
/// Hashmap to store generated `WValue` for each `Air.Inst.Ref`
-pub const ValueTable = std.AutoHashMapUnmanaged(Air.Inst.Ref, WValue);
+pub const ValueTable = std.AutoArrayHashMapUnmanaged(Air.Inst.Ref, WValue);
const Self = @This();
@@ -598,8 +598,13 @@ liveness: Liveness,
gpa: mem.Allocator,
debug_output: codegen.DebugInfoOutput,
mod_fn: *const Module.Fn,
+/// Contains a list of current branches.
+/// When we return from a branch, the branch will be popped from this list,
+/// which means branches can only contain references from within its own branch,
+/// or a branch higher (lower index) in the tree.
+branches: std.ArrayListUnmanaged(Branch) = .{},
/// Table to save `WValue`'s generated by an `Air.Inst`
-values: ValueTable,
+// values: ValueTable,
/// Mapping from Air.Inst.Index to block ids
blocks: std.AutoArrayHashMapUnmanaged(Air.Inst.Index, struct {
label: u32,
@@ -682,7 +687,11 @@ const InnerError = error{
};
pub fn deinit(self: *Self) void {
- self.values.deinit(self.gpa);
+ for (self.branches.items) |*branch| {
+ branch.deinit(self.gpa);
+ }
+ self.branches.deinit(self.gpa);
+ // self.values.deinit(self.gpa);
self.blocks.deinit(self.gpa);
self.locals.deinit(self.gpa);
self.mir_instructions.deinit(self.gpa);
@@ -705,11 +714,21 @@ fn fail(self: *Self, comptime fmt: []const u8, args: anytype) InnerError {
/// Resolves the `WValue` for the given instruction `inst`
/// When the given instruction has a `Value`, it returns a constant instead
fn resolveInst(self: *Self, ref: Air.Inst.Ref) InnerError!WValue {
- const gop = try self.values.getOrPut(self.gpa, ref);
- if (gop.found_existing) return gop.value_ptr.*;
+ var branch_index = self.branches.items.len;
+ while (branch_index > 0) : (branch_index -= 1) {
+ const branch = self.branches.items[branch_index - 1];
+ if (branch.values.get(ref)) |value| {
+ return value;
+ }
+ }
// when we did not find an existing instruction, it
// means we must generate it from a constant.
+ // We always store constants in the most outer branch as they must never
+ // be removed. The most outer branch is always at index 0.
+ const gop = try self.branches.items[0].values.getOrPut(self.gpa, ref);
+ assert(!gop.found_existing);
+
const val = self.air.value(ref).?;
const ty = self.air.typeOf(ref);
if (!ty.hasRuntimeBitsIgnoreComptime() and !ty.isInt() and !ty.isError()) {
@@ -745,7 +764,8 @@ fn finishAir(self: *Self, inst: Air.Inst.Index, result: WValue, operands: []cons
// results of `none` can never be referenced.
if (result != .none) {
assert(result != .stack); // it's illegal to store a stack value as we cannot track its position
- self.values.putAssumeCapacityNoClobber(Air.indexToRef(inst), result);
+ const branch = self.currentBranch();
+ branch.values.putAssumeCapacityNoClobber(Air.indexToRef(inst), result);
}
if (builtin.mode == .Debug) {
@@ -753,6 +773,18 @@ fn finishAir(self: *Self, inst: Air.Inst.Index, result: WValue, operands: []cons
}
}
+const Branch = struct {
+ values: ValueTable = .{},
+
+ fn deinit(branch: *Branch, gpa: Allocator) void {
+ branch.values.deinit(gpa);
+ }
+};
+
+inline fn currentBranch(self: *Self) *Branch {
+ return &self.branches.items[self.branches.items.len - 1];
+}
+
const BigTomb = struct {
gen: *Self,
inst: Air.Inst.Index,
@@ -768,7 +800,7 @@ const BigTomb = struct {
fn finishAir(bt: *BigTomb, result: WValue) void {
assert(result != .stack);
if (result != .none) {
- bt.gen.values.putAssumeCapacityNoClobber(Air.indexToRef(bt.inst), result);
+ bt.gen.currentBranch().values.putAssumeCapacityNoClobber(Air.indexToRef(bt.inst), result);
}
if (builtin.mode == .Debug) {
@@ -778,7 +810,7 @@ const BigTomb = struct {
};
fn iterateBigTomb(self: *Self, inst: Air.Inst.Index, operand_count: usize) !BigTomb {
- try self.values.ensureUnusedCapacity(self.gpa, @intCast(u32, operand_count + 1));
+ try self.currentBranch().values.ensureUnusedCapacity(self.gpa, @intCast(u32, operand_count + 1));
return BigTomb{
.gen = self,
.inst = inst,
@@ -789,7 +821,10 @@ fn iterateBigTomb(self: *Self, inst: Air.Inst.Index, operand_count: usize) !BigT
fn processDeath(self: *Self, ref: Air.Inst.Ref) void {
const inst = Air.refToIndex(ref) orelse return;
if (self.air.instructions.items(.tag)[inst] == .constant) return;
- const value = self.values.getPtr(ref) orelse return;
+ // Branches are currently only allowed to free locals allocated
+ // within their own branch.
+ // TODO: Upon branch consolidation free any locals if needed.
+ const value = self.currentBranch().values.getPtr(ref) orelse return;
if (value.* != .local) return;
std.debug.print("Decreasing reference for ref: %{?d}\n", .{Air.refToIndex(ref)});
value.local.references -= 1; // if this panics, a call to `reuseOperand` was forgotten by the developer
@@ -924,17 +959,28 @@ fn emitWValue(self: *Self, value: WValue) InnerError!void {
/// returns the given `operand` itself instead.
fn reuseOperand(self: *Self, ref: Air.Inst.Ref, operand: WValue) WValue {
if (operand != .local and operand != .stack_offset) return operand;
- var copy = operand;
- switch (copy) {
+ var new_value = operand;
+ switch (new_value) {
.local => |*local| local.references += 1,
.stack_offset => |*stack_offset| stack_offset.references += 1,
else => unreachable,
}
-
- const gop = self.values.getOrPutAssumeCapacity(ref);
- assert(gop.found_existing);
- gop.value_ptr.* = copy;
- return copy;
+ const old_value = self.getResolvedInst(ref);
+ old_value.* = new_value;
+ return new_value;
+}
+
+/// From a reference, returns its resolved `WValue`.
+/// It's illegal to provide a `Air.Inst.Ref` that hasn't been resolved yet.
+fn getResolvedInst(self: *Self, ref: Air.Inst.Ref) *WValue {
+ var index = self.branches.items.len;
+ while (index > 0) : (index -= 1) {
+ const branch = self.branches.items[index - 1];
+ if (branch.values.getPtr(ref)) |value| {
+ return value;
+ }
+ }
+ unreachable; // developer-error: This can only be called on resolved instructions. Use `resolveInst` instead.
}
/// Creates one locals for a given `Type`.
@@ -1035,7 +1081,7 @@ pub fn generate(
.gpa = bin_file.allocator,
.air = air,
.liveness = liveness,
- .values = .{},
+ // .values = .{},
.code = code,
.decl_index = func.owner_decl,
.decl = bin_file.options.module.?.declPtr(func.owner_decl),
@@ -1070,8 +1116,13 @@ fn genFunc(self: *Self) InnerError!void {
try self.addTag(.dbg_prologue_end);
+ try self.branches.append(self.gpa, .{});
// Generate MIR for function body
try self.genBody(self.air.getMainBody());
+
+ // clean up outer branch
+ _ = self.branches.pop();
+
// In case we have a return value, but the last instruction is a noreturn (such as a while loop)
// we emit an unreachable instruction to tell the stack validator that part will never be reached.
if (func_type.returns.len != 0 and self.air.instructions.len > 0) {
@@ -1837,7 +1888,7 @@ fn genInst(self: *Self, inst: Air.Inst.Index) InnerError!void {
fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
for (body) |inst| {
const old_bookkeeping_value = self.air_bookkeeping;
- try self.values.ensureUnusedCapacity(self.gpa, Liveness.bpi);
+ try self.currentBranch().values.ensureUnusedCapacity(self.gpa, Liveness.bpi);
try self.genInst(inst);
if (builtin.mode == .Debug and self.air_bookkeeping < old_bookkeeping_value + 1) {
@@ -2779,7 +2830,7 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) InnerError!void {
const extra = self.air.extraData(Air.CondBr, pl_op.payload);
const then_body = self.air.extra[extra.end..][0..extra.data.then_body_len];
const else_body = self.air.extra[extra.end + then_body.len ..][0..extra.data.else_body_len];
- // const liveness_condbr = self.liveness.getCondBr(inst);
+ const liveness_condbr = self.liveness.getCondBr(inst);
// result type is always noreturn, so use `block_empty` as type.
try self.startBlock(.block, wasm.block_empty);
@@ -2791,15 +2842,70 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) InnerError!void {
// and continue with the then codepath
try self.addLabel(.br_if, 0);
+ try self.branches.ensureUnusedCapacity(self.gpa, 2);
+
+ const else_stack = self.branches.addOneAssumeCapacity();
+ else_stack.* = .{};
+ defer else_stack.deinit(self.gpa);
+
+ try self.currentBranch().values.ensureUnusedCapacity(self.gpa, @intCast(u32, liveness_condbr.else_deaths.len));
+ for (liveness_condbr.else_deaths) |death| {
+ self.processDeath(Air.indexToRef(death));
+ std.debug.print("Death inst: %{d}\n", .{death});
+ }
try self.genBody(else_body);
try self.endBlock();
+ else_stack.* = self.branches.pop();
// Outer block that matches the condition
+ const then_stack = self.branches.addOneAssumeCapacity();
+ then_stack.* = .{};
+ defer then_stack.deinit(self.gpa);
+
+ try self.currentBranch().values.ensureUnusedCapacity(self.gpa, @intCast(u32, liveness_condbr.then_deaths.len));
+ for (liveness_condbr.then_deaths) |death| {
+ self.processDeath(Air.indexToRef(death));
+ std.debug.print("Death inst: %{d}\n", .{death});
+ }
try self.genBody(then_body);
+ then_stack.* = self.branches.pop();
+
+ try self.canonicaliseBranches(then_stack, else_stack);
+ // TODO: Branch consilidation to process deaths from branches
self.finishAir(inst, .none, &.{});
}
+fn canonicaliseBranches(self: *Self, canon_branch: *Branch, target_branch: *Branch) !void {
+ const parent = self.currentBranch();
+
+ const target_slice = target_branch.values.entries.slice();
+ const target_keys = target_slice.items(.key);
+ const target_values = target_slice.items(.value);
+
+ try parent.values.ensureUnusedCapacity(self.gpa, target_branch.values.count());
+ for (target_keys) |key, index| {
+ const value = target_values[index];
+ const canon_value = if (canon_branch.values.fetchSwapRemove(key)) |canon_entry| {
+ // try parent.values.putAssumeCapacity(key, canon_entry.value);
+ _ = canon_entry;
+ // _ = result_value;
+ @panic("HMMMM THIS occurs");
+ // break :result_value canon_entry.value;
+ } else value;
+
+ parent.values.putAssumeCapacity(key, canon_value);
+ }
+
+ try parent.values.ensureUnusedCapacity(self.gpa, canon_branch.values.count());
+ const canon_slice = canon_branch.values.entries.slice();
+ const canon_keys = canon_slice.items(.key);
+ const canon_values = canon_slice.items(.value);
+ for (canon_keys) |key, index| {
+ parent.values.putAssumeCapacity(key, canon_values[index]);
+ }
+}
+
fn airCmp(self: *Self, inst: Air.Inst.Index, op: std.math.CompareOperator) InnerError!void {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
if (self.liveness.isUnused(inst)) return self.finishAir(inst, .none, &.{ bin_op.lhs, bin_op.rhs });
@@ -3176,6 +3282,7 @@ fn airSwitchBr(self: *Self, inst: Air.Inst.Index) InnerError!void {
break :blk target_ty.intInfo(self.target).signedness;
};
+ try self.branches.ensureUnusedCapacity(self.gpa, case_list.items.len + @boolToInt(has_else_body));
for (case_list.items) |case| {
// when sparse, we use if/else-chain, so emit conditional checks
if (is_sparse) {
@@ -3211,13 +3318,18 @@ fn airSwitchBr(self: *Self, inst: Air.Inst.Index) InnerError!void {
try self.endBlock();
}
}
+ // try self.branches.items
+ self.branches.appendAssumeCapacity(.{});
try self.genBody(case.body);
try self.endBlock();
+ _ = self.branches.pop();
}
if (has_else_body) {
+ self.branches.appendAssumeCapacity(.{});
try self.genBody(else_body);
try self.endBlock();
+ _ = self.branches.pop();
}
self.finishAir(inst, .none, &.{});
}
@@ -3722,7 +3834,7 @@ fn airPtrToInt(self: *Self, inst: Air.Inst.Index) InnerError!void {
const result = switch (operand) {
// for stack offset, return a pointer to this offset.
.stack_offset => try self.buildPointerOffset(operand, 0, .new),
- else => operand,
+ else => self.reuseOperand(un_op, operand),
};
self.finishAir(inst, result, &.{un_op});
}