Commit ac954eb539
Changed files (6)
src
arch
test
behavior
src/arch/aarch64/CodeGen.zig
@@ -23,6 +23,7 @@ const log = std.log.scoped(.codegen);
const build_options = @import("build_options");
const RegisterManagerFn = @import("../../register_manager.zig").RegisterManager;
const RegisterManager = RegisterManagerFn(Self, Register, &callee_preserved_regs);
+const RegisterLock = RegisterManager.RegisterLock;
const GenerateSymbolError = @import("../../codegen.zig").GenerateSymbolError;
const FnResult = @import("../../codegen.zig").FnResult;
@@ -910,16 +911,16 @@ pub fn spillInstruction(self: *Self, reg: Register, inst: Air.Inst.Index) !void
fn spillCompareFlagsIfOccupied(self: *Self) !void {
if (self.compare_flags_inst) |inst_to_save| {
const mcv = self.getResolvedInstValue(inst_to_save);
- switch (mcv) {
+ const new_mcv = switch (mcv) {
.compare_flags_signed,
.compare_flags_unsigned,
+ => try self.allocRegOrMem(inst_to_save, true),
.register_c_flag,
.register_v_flag,
- => {},
+ => try self.allocRegOrMem(inst_to_save, false),
else => unreachable, // mcv doesn't occupy the compare flags
- }
+ };
- const new_mcv = try self.allocRegOrMem(inst_to_save, true);
try self.setRegOrMem(self.air.typeOfIndex(inst_to_save), new_mcv, mcv);
log.debug("spilling {d} to mcv {any}", .{ inst_to_save, new_mcv });
@@ -927,6 +928,15 @@ fn spillCompareFlagsIfOccupied(self: *Self) !void {
try branch.inst_table.put(self.gpa, inst_to_save, new_mcv);
self.compare_flags_inst = null;
+
+ // TODO consolidate with register manager and spillInstruction
+ // this call should really belong in the register manager!
+ switch (mcv) {
+ .register_c_flag,
+ .register_v_flag,
+ => |reg| self.register_manager.freeReg(reg),
+ else => {},
+ }
}
}
@@ -1048,8 +1058,8 @@ fn trunc(
}
},
};
- self.register_manager.freezeRegs(&.{operand_reg});
- defer self.register_manager.unfreezeRegs(&.{operand_reg});
+ const lock = self.register_manager.freezeReg(operand_reg);
+ defer if (lock) |reg| self.register_manager.unfreezeReg(reg);
const dest_reg = if (maybe_inst) |inst| blk: {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
@@ -1135,8 +1145,8 @@ fn airNot(self: *Self, inst: Air.Inst.Index) !void {
.register => |r| r,
else => try self.copyToTmpRegister(operand_ty, operand),
};
- self.register_manager.freezeRegs(&.{op_reg});
- defer self.register_manager.unfreezeRegs(&.{op_reg});
+ const reg_lock = self.register_manager.freezeRegAssumeUnused(op_reg);
+ defer self.register_manager.unfreezeReg(reg_lock);
const dest_reg = blk: {
if (operand == .register and self.reuseOperand(inst, ty_op.operand, 0, operand)) {
@@ -1168,8 +1178,8 @@ fn airNot(self: *Self, inst: Air.Inst.Index) !void {
.register => |r| r,
else => try self.copyToTmpRegister(operand_ty, operand),
};
- self.register_manager.freezeRegs(&.{op_reg});
- defer self.register_manager.unfreezeRegs(&.{op_reg});
+ const reg_lock = self.register_manager.freezeRegAssumeUnused(op_reg);
+ defer self.register_manager.unfreezeReg(reg_lock);
const dest_reg = blk: {
if (operand == .register and self.reuseOperand(inst, ty_op.operand, 0, operand)) {
@@ -1257,8 +1267,17 @@ fn binOpRegister(
const lhs_is_register = lhs == .register;
const rhs_is_register = rhs == .register;
- if (lhs_is_register) self.register_manager.freezeRegs(&.{lhs.register});
- if (rhs_is_register) self.register_manager.freezeRegs(&.{rhs.register});
+ const lhs_lock: ?RegisterLock = if (lhs_is_register)
+ self.register_manager.freezeReg(lhs.register)
+ else
+ null;
+ defer if (lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
+
+ const rhs_lock: ?RegisterLock = if (rhs_is_register)
+ self.register_manager.freezeReg(rhs.register)
+ else
+ null;
+ defer if (rhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
@@ -1270,13 +1289,13 @@ fn binOpRegister(
const raw_reg = try self.register_manager.allocReg(track_inst);
const reg = registerAlias(raw_reg, lhs_ty.abiSize(self.target.*));
- self.register_manager.freezeRegs(&.{reg});
if (track_inst) |inst| branch.inst_table.putAssumeCapacity(inst, .{ .register = reg });
break :blk reg;
};
- defer self.register_manager.unfreezeRegs(&.{lhs_reg});
+ const new_lhs_lock = self.register_manager.freezeReg(lhs_reg);
+ defer if (new_lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const rhs_reg = if (rhs_is_register) rhs.register else blk: {
const track_inst: ?Air.Inst.Index = if (maybe_inst) |inst| inst: {
@@ -1286,13 +1305,13 @@ fn binOpRegister(
const raw_reg = try self.register_manager.allocReg(track_inst);
const reg = registerAlias(raw_reg, rhs_ty.abiAlignment(self.target.*));
- self.register_manager.freezeRegs(&.{reg});
if (track_inst) |inst| branch.inst_table.putAssumeCapacity(inst, .{ .register = reg });
break :blk reg;
};
- defer self.register_manager.unfreezeRegs(&.{rhs_reg});
+ const new_rhs_lock = self.register_manager.freezeReg(rhs_reg);
+ defer if (new_rhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const dest_reg = switch (mir_tag) {
.cmp_shifted_register => undefined, // cmp has no destination register
@@ -1394,7 +1413,11 @@ fn binOpImmediate(
) !MCValue {
const lhs_is_register = lhs == .register;
- if (lhs_is_register) self.register_manager.freezeRegs(&.{lhs.register});
+ const lhs_lock: ?RegisterLock = if (lhs_is_register)
+ self.register_manager.freezeReg(lhs.register)
+ else
+ null;
+ defer if (lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
@@ -1408,13 +1431,13 @@ fn binOpImmediate(
const raw_reg = try self.register_manager.allocReg(track_inst);
const reg = registerAlias(raw_reg, lhs_ty.abiSize(self.target.*));
- self.register_manager.freezeRegs(&.{reg});
if (track_inst) |inst| branch.inst_table.putAssumeCapacity(inst, .{ .register = reg });
break :blk reg;
};
- defer self.register_manager.unfreezeRegs(&.{lhs_reg});
+ const new_lhs_lock = self.register_manager.freezeReg(lhs_reg);
+ defer if (new_lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const dest_reg = switch (mir_tag) {
.cmp_immediate => undefined, // cmp has no destination register
@@ -1758,7 +1781,10 @@ fn airBinOp(self: *Self, inst: Air.Inst.Index) !void {
const lhs_ty = self.air.typeOf(bin_op.lhs);
const rhs_ty = self.air.typeOf(bin_op.rhs);
- const result: MCValue = if (self.liveness.isUnused(inst)) .dead else try self.binOp(tag, inst, lhs, rhs, lhs_ty, rhs_ty);
+ const result: MCValue = if (self.liveness.isUnused(inst))
+ .dead
+ else
+ try self.binOp(tag, inst, lhs, rhs, lhs_ty, rhs_ty);
return self.finishAir(inst, result, .{ bin_op.lhs, bin_op.rhs, .none });
}
@@ -1815,13 +1841,13 @@ fn airOverflow(self: *Self, inst: Air.Inst.Index) !void {
};
const dest = try self.binOp(base_tag, null, lhs, rhs, lhs_ty, rhs_ty);
const dest_reg = dest.register;
- self.register_manager.freezeRegs(&.{dest_reg});
- defer self.register_manager.unfreezeRegs(&.{dest_reg});
+ const dest_reg_lock = self.register_manager.freezeRegAssumeUnused(dest_reg);
+ defer self.register_manager.unfreezeReg(dest_reg_lock);
const raw_truncated_reg = try self.register_manager.allocReg(null);
const truncated_reg = registerAlias(raw_truncated_reg, lhs_ty.abiSize(self.target.*));
- self.register_manager.freezeRegs(&.{truncated_reg});
- defer self.register_manager.unfreezeRegs(&.{truncated_reg});
+ const truncated_reg_lock = self.register_manager.freezeRegAssumeUnused(truncated_reg);
+ defer self.register_manager.unfreezeReg(truncated_reg_lock);
// sbfx/ubfx truncated, dest, #0, #bits
try self.truncRegister(dest_reg, truncated_reg, int_info.signedness, int_info.bits);
@@ -1922,12 +1948,12 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
const dest = try self.binOpRegister(base_tag, null, lhs, rhs, lhs_ty, rhs_ty);
const dest_reg = dest.register;
- self.register_manager.freezeRegs(&.{dest_reg});
- defer self.register_manager.unfreezeRegs(&.{dest_reg});
+ const dest_reg_lock = self.register_manager.freezeRegAssumeUnused(dest_reg);
+ defer self.register_manager.unfreezeReg(dest_reg_lock);
const truncated_reg = try self.register_manager.allocReg(null);
- self.register_manager.freezeRegs(&.{truncated_reg});
- defer self.register_manager.unfreezeRegs(&.{truncated_reg});
+ const truncated_reg_lock = self.register_manager.freezeRegAssumeUnused(truncated_reg);
+ defer self.register_manager.unfreezeReg(truncated_reg_lock);
try self.truncRegister(
dest_reg.to32(),
@@ -1977,36 +2003,44 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
const lhs_is_register = lhs == .register;
const rhs_is_register = rhs == .register;
- if (lhs_is_register) self.register_manager.freezeRegs(&.{lhs.register});
- if (rhs_is_register) self.register_manager.freezeRegs(&.{rhs.register});
+ const lhs_lock: ?RegisterLock = if (lhs_is_register)
+ self.register_manager.freezeRegAssumeUnused(lhs.register)
+ else
+ null;
+ defer if (lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
+
+ const rhs_lock: ?RegisterLock = if (rhs_is_register)
+ self.register_manager.freezeRegAssumeUnused(rhs.register)
+ else
+ null;
+ defer if (rhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const lhs_reg = if (lhs_is_register) lhs.register else blk: {
const raw_reg = try self.register_manager.allocReg(null);
const reg = registerAlias(raw_reg, lhs_ty.abiSize(self.target.*));
- self.register_manager.freezeRegs(&.{reg});
break :blk reg;
};
- defer self.register_manager.unfreezeRegs(&.{lhs_reg});
+ const new_lhs_lock = self.register_manager.freezeReg(lhs_reg);
+ defer if (new_lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const rhs_reg = if (rhs_is_register) rhs.register else blk: {
const raw_reg = try self.register_manager.allocReg(null);
const reg = registerAlias(raw_reg, rhs_ty.abiAlignment(self.target.*));
- self.register_manager.freezeRegs(&.{reg});
break :blk reg;
};
- defer self.register_manager.unfreezeRegs(&.{rhs_reg});
+ const new_rhs_lock = self.register_manager.freezeReg(rhs_reg);
+ defer if (new_rhs_lock) |reg| self.register_manager.unfreezeReg(reg);
if (!lhs_is_register) try self.genSetReg(lhs_ty, lhs_reg, lhs);
if (!rhs_is_register) try self.genSetReg(rhs_ty, rhs_reg, rhs);
- // TODO reuse operands
const dest_reg = blk: {
const raw_reg = try self.register_manager.allocReg(null);
const reg = registerAlias(raw_reg, lhs_ty.abiSize(self.target.*));
- self.register_manager.freezeRegs(&.{reg});
break :blk reg;
};
- defer self.register_manager.unfreezeRegs(&.{dest_reg});
+ const dest_reg_lock = self.register_manager.freezeRegAssumeUnused(dest_reg);
+ defer self.register_manager.unfreezeReg(dest_reg_lock);
switch (int_info.signedness) {
.signed => {
@@ -2021,8 +2055,8 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
});
const dest_high_reg = try self.register_manager.allocReg(null);
- self.register_manager.freezeRegs(&.{dest_high_reg});
- defer self.register_manager.unfreezeRegs(&.{dest_high_reg});
+ const dest_high_reg_lock = self.register_manager.freezeRegAssumeUnused(dest_high_reg);
+ defer self.register_manager.unfreezeReg(dest_high_reg_lock);
// smulh dest_high, lhs, rhs
_ = try self.addInst(.{
@@ -2071,8 +2105,8 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
},
.unsigned => {
const dest_high_reg = try self.register_manager.allocReg(null);
- self.register_manager.freezeRegs(&.{dest_high_reg});
- defer self.register_manager.unfreezeRegs(&.{dest_high_reg});
+ const dest_high_reg_lock = self.register_manager.freezeRegAssumeUnused(dest_high_reg);
+ defer self.register_manager.unfreezeReg(dest_high_reg_lock);
// umulh dest_high, lhs, rhs
_ = try self.addInst(.{
@@ -2127,8 +2161,8 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
}
const truncated_reg = try self.register_manager.allocReg(null);
- self.register_manager.freezeRegs(&.{truncated_reg});
- defer self.register_manager.unfreezeRegs(&.{truncated_reg});
+ const truncated_reg_lock = self.register_manager.freezeRegAssumeUnused(truncated_reg);
+ defer self.register_manager.unfreezeReg(truncated_reg_lock);
try self.truncRegister(dest_reg, truncated_reg, int_info.signedness, int_info.bits);
@@ -2168,14 +2202,20 @@ fn airShlWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
if (int_info.bits <= 64) {
const stack_offset = try self.allocMem(inst, tuple_size, tuple_align);
- if (lhs == .register) self.register_manager.freezeRegs(&.{lhs.register});
- defer if (lhs == .register) self.register_manager.unfreezeRegs(&.{lhs.register});
+ const lhs_lock: ?RegisterLock = if (lhs == .register)
+ self.register_manager.freezeRegAssumeUnused(lhs.register)
+ else
+ null;
+ defer if (lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
try self.spillCompareFlagsIfOccupied();
self.compare_flags_inst = null;
// lsl dest, lhs, rhs
const dest = try self.binOp(.shl, null, lhs, rhs, lhs_ty, rhs_ty);
+ const dest_reg = dest.register;
+ const dest_reg_lock = self.register_manager.freezeRegAssumeUnused(dest_reg);
+ defer self.register_manager.unfreezeReg(dest_reg_lock);
// asr/lsr reconstructed, dest, rhs
const reconstructed = try self.binOp(.shr, null, dest, rhs, lhs_ty, rhs_ty);
@@ -2184,7 +2224,9 @@ fn airShlWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
_ = try self.binOp(.cmp_eq, null, lhs, reconstructed, lhs_ty, lhs_ty);
try self.genSetStack(lhs_ty, stack_offset, dest);
- try self.genSetStack(Type.initTag(.u1), stack_offset - overflow_bit_offset, .{ .compare_flags_unsigned = .neq });
+ try self.genSetStack(Type.initTag(.u1), stack_offset - overflow_bit_offset, .{
+ .compare_flags_unsigned = .neq,
+ });
break :result MCValue{ .stack_offset = stack_offset };
} else {
@@ -2411,14 +2453,18 @@ fn airSliceElemVal(self: *Self, inst: Air.Inst.Index) !void {
var buf: Type.SlicePtrFieldTypeBuffer = undefined;
const slice_ptr_field_type = slice_ty.slicePtrFieldType(&buf);
- if (index_is_register) self.register_manager.freezeRegs(&.{index_mcv.register});
- defer if (index_is_register) self.register_manager.unfreezeRegs(&.{index_mcv.register});
+ const index_lock: ?RegisterLock = if (index_is_register)
+ self.register_manager.freezeRegAssumeUnused(index_mcv.register)
+ else
+ null;
+ defer if (index_lock) |reg| self.register_manager.unfreezeReg(reg);
const base_mcv: MCValue = switch (slice_mcv) {
.stack_offset => |off| .{ .register = try self.copyToTmpRegister(slice_ptr_field_type, .{ .stack_offset = off }) },
else => return self.fail("TODO slice_elem_val when slice is {}", .{slice_mcv}),
};
- self.register_manager.freezeRegs(&.{base_mcv.register});
+ const base_lock = self.register_manager.freezeRegAssumeUnused(base_mcv.register);
+ defer self.register_manager.unfreezeReg(base_lock);
switch (elem_size) {
else => {
@@ -2559,8 +2605,8 @@ fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!vo
.immediate => |imm| try self.setRegOrMem(elem_ty, dst_mcv, .{ .memory = imm }),
.ptr_stack_offset => |off| try self.setRegOrMem(elem_ty, dst_mcv, .{ .stack_offset = off }),
.register => |addr_reg| {
- self.register_manager.freezeRegs(&.{addr_reg});
- defer self.register_manager.unfreezeRegs(&.{addr_reg});
+ const addr_reg_lock = self.register_manager.freezeReg(addr_reg);
+ defer if (addr_reg_lock) |reg| self.register_manager.unfreezeReg(reg);
switch (dst_mcv) {
.dead => unreachable,
@@ -2573,16 +2619,19 @@ fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!vo
if (elem_size <= 8) {
const raw_tmp_reg = try self.register_manager.allocReg(null);
const tmp_reg = registerAlias(raw_tmp_reg, elem_size);
- self.register_manager.freezeRegs(&.{tmp_reg});
- defer self.register_manager.unfreezeRegs(&.{tmp_reg});
+ const tmp_reg_lock = self.register_manager.freezeRegAssumeUnused(tmp_reg);
+ defer self.register_manager.unfreezeReg(tmp_reg_lock);
try self.load(.{ .register = tmp_reg }, ptr, ptr_ty);
try self.genSetStack(elem_ty, off, MCValue{ .register = tmp_reg });
} else {
// TODO optimize the register allocation
const regs = try self.register_manager.allocRegs(4, .{ null, null, null, null });
- self.register_manager.freezeRegs(®s);
- defer self.register_manager.unfreezeRegs(®s);
+ var regs_locks: [4]RegisterLock = undefined;
+ self.register_manager.freezeRegsAssumeUnused(4, regs, ®s_locks);
+ defer for (regs_locks) |reg| {
+ self.register_manager.unfreezeReg(reg);
+ };
const src_reg = addr_reg;
const dst_reg = regs[0];
@@ -2784,8 +2833,8 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
try self.genSetStack(value_ty, off, value);
},
.register => |addr_reg| {
- self.register_manager.freezeRegs(&.{addr_reg});
- defer self.register_manager.unfreezeRegs(&.{addr_reg});
+ const addr_reg_lock = self.register_manager.freezeReg(addr_reg);
+ defer if (addr_reg_lock) |reg| self.register_manager.unfreezeReg(reg);
switch (value) {
.register => |value_reg| {
@@ -2795,8 +2844,8 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
if (abi_size <= 8) {
const raw_tmp_reg = try self.register_manager.allocReg(null);
const tmp_reg = registerAlias(raw_tmp_reg, abi_size);
- self.register_manager.freezeRegs(&.{tmp_reg});
- defer self.register_manager.unfreezeRegs(&.{tmp_reg});
+ const tmp_reg_lock = self.register_manager.freezeRegAssumeUnused(tmp_reg);
+ defer self.register_manager.unfreezeReg(tmp_reg_lock);
try self.genSetReg(value_ty, tmp_reg, value);
try self.store(ptr, .{ .register = tmp_reg }, ptr_ty, value_ty);
@@ -2856,12 +2905,12 @@ fn structFieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, inde
const offset_reg = try self.copyToTmpRegister(ptr_ty, .{
.immediate = struct_field_offset,
});
- self.register_manager.freezeRegs(&.{offset_reg});
- defer self.register_manager.unfreezeRegs(&.{offset_reg});
+ const offset_reg_lock = self.register_manager.freezeRegAssumeUnused(offset_reg);
+ defer self.register_manager.unfreezeReg(offset_reg_lock);
const addr_reg = try self.copyToTmpRegister(ptr_ty, mcv);
- self.register_manager.freezeRegs(&.{addr_reg});
- defer self.register_manager.unfreezeRegs(&.{addr_reg});
+ const addr_reg_lock = self.register_manager.freezeRegAssumeUnused(addr_reg);
+ defer self.register_manager.unfreezeReg(addr_reg_lock);
const dest = try self.binOp(
.add,
@@ -3369,6 +3418,9 @@ fn airCondBr(self: *Self, inst: Air.Inst.Index) !void {
const parent_compare_flags_inst = self.compare_flags_inst;
try self.branch_stack.append(.{});
+ errdefer {
+ _ = self.branch_stack.pop();
+ }
try self.ensureProcessDeathCapacity(liveness_condbr.then_deaths.len);
for (liveness_condbr.then_deaths) |operand| {
@@ -3955,8 +4007,38 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) InnerErro
},
.register_c_flag,
.register_v_flag,
- => {
- return self.fail("TODO implement genSetStack {}", .{mcv});
+ => |reg| {
+ const reg_lock = self.register_manager.freezeReg(reg);
+ defer if (reg_lock) |locked_reg| self.register_manager.unfreezeReg(locked_reg);
+
+ const wrapped_ty = ty.structFieldType(0);
+ try self.genSetStack(wrapped_ty, stack_offset, .{ .register = reg });
+
+ const overflow_bit_ty = ty.structFieldType(1);
+ const overflow_bit_offset = @intCast(u32, ty.structFieldOffset(1, self.target.*));
+ const raw_cond_reg = try self.register_manager.allocReg(null);
+ const cond_reg = registerAlias(
+ raw_cond_reg,
+ @intCast(u32, overflow_bit_ty.abiSize(self.target.*)),
+ );
+
+ // C flag: cset reg, cs
+ // V flag: cset reg, vs
+ _ = try self.addInst(.{
+ .tag = .cset,
+ .data = .{ .r_cond = .{
+ .rd = cond_reg,
+ .cond = switch (mcv) {
+ .register_c_flag => .cs,
+ .register_v_flag => .vs,
+ else => unreachable,
+ },
+ } },
+ });
+
+ try self.genSetStack(overflow_bit_ty, stack_offset - overflow_bit_offset, .{
+ .register = cond_reg,
+ });
},
.got_load,
.direct_load,
@@ -3983,8 +4065,11 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: u32, mcv: MCValue) InnerErro
// TODO call extern memcpy
const regs = try self.register_manager.allocRegs(5, .{ null, null, null, null, null });
- self.register_manager.freezeRegs(®s);
- defer self.register_manager.unfreezeRegs(®s);
+ var regs_locks: [5]RegisterLock = undefined;
+ self.register_manager.freezeRegsAssumeUnused(5, regs, ®s_locks);
+ defer for (regs_locks) |reg| {
+ self.register_manager.unfreezeReg(reg);
+ };
const src_reg = regs[0];
const dst_reg = regs[1];
src/arch/arm/CodeGen.zig
@@ -23,6 +23,7 @@ const log = std.log.scoped(.codegen);
const build_options = @import("build_options");
const RegisterManagerFn = @import("../../register_manager.zig").RegisterManager;
const RegisterManager = RegisterManagerFn(Self, Register, &allocatable_registers);
+const RegisterLock = RegisterManager.RegisterLock;
const FnResult = @import("../../codegen.zig").FnResult;
const GenerateSymbolError = @import("../../codegen.zig").GenerateSymbolError;
@@ -1038,8 +1039,8 @@ fn trunc(
}
},
};
- self.register_manager.freezeRegs(&.{operand_reg});
- defer self.register_manager.unfreezeRegs(&.{operand_reg});
+ const operand_reg_lock = self.register_manager.freezeReg(operand_reg);
+ defer if (operand_reg_lock) |reg| self.register_manager.unfreezeReg(reg);
const dest_reg = if (maybe_inst) |inst| blk: {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
@@ -1127,8 +1128,8 @@ fn airNot(self: *Self, inst: Air.Inst.Index) !void {
.register => |r| r,
else => try self.copyToTmpRegister(operand_ty, operand),
};
- self.register_manager.freezeRegs(&.{op_reg});
- defer self.register_manager.unfreezeRegs(&.{op_reg});
+ const op_reg_lock = self.register_manager.freezeRegAssumeUnused(op_reg);
+ defer self.register_manager.unfreezeReg(op_reg_lock);
const dest_reg = blk: {
if (operand == .register and self.reuseOperand(inst, ty_op.operand, 0, operand)) {
@@ -1157,8 +1158,8 @@ fn airNot(self: *Self, inst: Air.Inst.Index) !void {
.register => |r| r,
else => try self.copyToTmpRegister(operand_ty, operand),
};
- self.register_manager.freezeRegs(&.{op_reg});
- defer self.register_manager.unfreezeRegs(&.{op_reg});
+ const op_reg_lock = self.register_manager.freezeRegAssumeUnused(op_reg);
+ defer self.register_manager.unfreezeReg(op_reg_lock);
const dest_reg = blk: {
if (operand == .register and self.reuseOperand(inst, ty_op.operand, 0, operand)) {
@@ -1218,15 +1219,15 @@ fn minMax(
.register => |r| r,
else => try self.copyToTmpRegister(lhs_ty, lhs),
};
- self.register_manager.freezeRegs(&.{lhs_reg});
- defer self.register_manager.unfreezeRegs(&.{lhs_reg});
+ const lhs_reg_lock = self.register_manager.freezeReg(lhs_reg);
+ defer if (lhs_reg_lock) |reg| self.register_manager.unfreezeReg(reg);
const rhs_reg = switch (rhs) {
.register => |r| r,
else => try self.copyToTmpRegister(rhs_ty, rhs),
};
- self.register_manager.freezeRegs(&.{rhs_reg});
- defer self.register_manager.unfreezeRegs(&.{rhs_reg});
+ const rhs_reg_lock = self.register_manager.freezeReg(rhs_reg);
+ defer if (rhs_reg_lock) |reg| self.register_manager.unfreezeReg(reg);
const dest_reg = if (maybe_inst) |inst| blk: {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
@@ -1392,12 +1393,12 @@ fn airOverflow(self: *Self, inst: Air.Inst.Index) !void {
};
const dest = try self.binOp(base_tag, null, lhs, rhs, lhs_ty, rhs_ty);
const dest_reg = dest.register;
- self.register_manager.freezeRegs(&.{dest_reg});
- defer self.register_manager.unfreezeRegs(&.{dest_reg});
+ const dest_reg_lock = self.register_manager.freezeRegAssumeUnused(dest_reg);
+ defer self.register_manager.unfreezeReg(dest_reg_lock);
const truncated_reg = try self.register_manager.allocReg(null);
- self.register_manager.freezeRegs(&.{truncated_reg});
- defer self.register_manager.unfreezeRegs(&.{truncated_reg});
+ const truncated_reg_lock = self.register_manager.freezeRegAssumeUnused(truncated_reg);
+ defer self.register_manager.unfreezeReg(truncated_reg_lock);
// sbfx/ubfx truncated, dest, #0, #bits
try self.truncRegister(dest_reg, truncated_reg, int_info.signedness, int_info.bits);
@@ -1493,12 +1494,12 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
const dest = try self.binOpRegister(base_tag, null, lhs, rhs, lhs_ty, rhs_ty);
const dest_reg = dest.register;
- self.register_manager.freezeRegs(&.{dest_reg});
- defer self.register_manager.unfreezeRegs(&.{dest_reg});
+ const dest_reg_lock = self.register_manager.freezeRegAssumeUnused(dest_reg);
+ defer self.register_manager.unfreezeReg(dest_reg_lock);
const truncated_reg = try self.register_manager.allocReg(null);
- self.register_manager.freezeRegs(&.{truncated_reg});
- defer self.register_manager.unfreezeRegs(&.{truncated_reg});
+ const truncated_reg_lock = self.register_manager.freezeRegAssumeUnused(truncated_reg);
+ defer self.register_manager.unfreezeReg(truncated_reg_lock);
// sbfx/ubfx truncated, dest, #0, #bits
try self.truncRegister(dest_reg, truncated_reg, int_info.signedness, int_info.bits);
@@ -1526,28 +1527,32 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
const lhs_is_register = lhs == .register;
const rhs_is_register = rhs == .register;
- if (lhs_is_register) self.register_manager.freezeRegs(&.{lhs.register});
- if (rhs_is_register) self.register_manager.freezeRegs(&.{rhs.register});
-
- const lhs_reg = if (lhs_is_register) lhs.register else blk: {
- const reg = try self.register_manager.allocReg(null);
- self.register_manager.freezeRegs(&.{reg});
-
- break :blk reg;
- };
- defer self.register_manager.unfreezeRegs(&.{lhs_reg});
+ const lhs_lock: ?RegisterLock = if (lhs_is_register)
+ self.register_manager.freezeReg(lhs.register)
+ else
+ null;
+ defer if (lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
- const rhs_reg = if (rhs_is_register) rhs.register else blk: {
- const reg = try self.register_manager.allocReg(null);
- self.register_manager.freezeRegs(&.{reg});
+ const lhs_reg = if (lhs_is_register)
+ lhs.register
+ else
+ try self.register_manager.allocReg(null);
+ const new_lhs_lock = self.register_manager.freezeReg(lhs_reg);
+ defer if (new_lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
- break :blk reg;
- };
- defer self.register_manager.unfreezeRegs(&.{rhs_reg});
+ const rhs_reg = if (rhs_is_register)
+ rhs.register
+ else
+ try self.register_manager.allocReg(null);
+ const new_rhs_lock = self.register_manager.freezeReg(rhs_reg);
+ defer if (new_rhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const dest_regs = try self.register_manager.allocRegs(2, .{ null, null });
- self.register_manager.freezeRegs(&dest_regs);
- defer self.register_manager.unfreezeRegs(&dest_regs);
+ var dest_regs_locks: [2]RegisterLock = undefined;
+ self.register_manager.freezeRegsAssumeUnused(2, dest_regs, &dest_regs_locks);
+ defer for (dest_regs_locks) |reg| {
+ self.register_manager.unfreezeReg(reg);
+ };
const rdlo = dest_regs[0];
const rdhi = dest_regs[1];
@@ -1555,8 +1560,8 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
if (!rhs_is_register) try self.genSetReg(rhs_ty, rhs_reg, rhs);
const truncated_reg = try self.register_manager.allocReg(null);
- self.register_manager.freezeRegs(&.{truncated_reg});
- defer self.register_manager.unfreezeRegs(&.{truncated_reg});
+ const truncated_reg_lock = self.register_manager.freezeRegAssumeUnused(truncated_reg);
+ defer self.register_manager.unfreezeReg(truncated_reg_lock);
_ = try self.addInst(.{
.tag = base_tag,
@@ -1648,8 +1653,11 @@ fn airShlWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
if (int_info.bits <= 32) {
const stack_offset = try self.allocMem(inst, tuple_size, tuple_align);
- if (lhs == .register) self.register_manager.freezeRegs(&.{lhs.register});
- defer if (lhs == .register) self.register_manager.unfreezeRegs(&.{lhs.register});
+ const lhs_lock: ?RegisterLock = if (lhs == .register)
+ self.register_manager.freezeRegAssumeUnused(lhs.register)
+ else
+ null;
+ defer if (lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
try self.spillCompareFlagsIfOccupied();
self.compare_flags_inst = null;
@@ -1939,8 +1947,11 @@ fn airSliceElemVal(self: *Self, inst: Air.Inst.Index) !void {
var buf: Type.SlicePtrFieldTypeBuffer = undefined;
const slice_ptr_field_type = slice_ty.slicePtrFieldType(&buf);
- if (index_is_register) self.register_manager.freezeRegs(&.{index_mcv.register});
- defer if (index_is_register) self.register_manager.unfreezeRegs(&.{index_mcv.register});
+ const index_lock: ?RegisterLock = if (index_is_register)
+ self.register_manager.freezeRegAssumeUnused(index_mcv.register)
+ else
+ null;
+ defer if (index_lock) |reg| self.register_manager.unfreezeReg(reg);
const base_mcv = slicePtr(slice_mcv);
@@ -1950,20 +1961,20 @@ fn airSliceElemVal(self: *Self, inst: Air.Inst.Index) !void {
.register => |r| r,
else => try self.copyToTmpRegister(slice_ptr_field_type, base_mcv),
};
- self.register_manager.freezeRegs(&.{base_reg});
- defer self.register_manager.unfreezeRegs(&.{base_reg});
+ const base_reg_lock = self.register_manager.freezeRegAssumeUnused(base_reg);
+ defer self.register_manager.unfreezeReg(base_reg_lock);
const dst_reg = try self.register_manager.allocReg(inst);
const dst_mcv = MCValue{ .register = dst_reg };
- self.register_manager.freezeRegs(&.{dst_reg});
- defer self.register_manager.unfreezeRegs(&.{dst_reg});
+ const dst_reg_lock = self.register_manager.freezeRegAssumeUnused(dst_reg);
+ defer self.register_manager.unfreezeReg(dst_reg_lock);
const index_reg: Register = switch (index_mcv) {
.register => |reg| reg,
else => try self.copyToTmpRegister(Type.usize, index_mcv),
};
- self.register_manager.freezeRegs(&.{index_reg});
- defer self.register_manager.unfreezeRegs(&.{index_reg});
+ const index_reg_lock = self.register_manager.freezeRegAssumeUnused(index_reg);
+ defer self.register_manager.unfreezeReg(index_reg_lock);
const tag: Mir.Inst.Tag = switch (elem_size) {
1 => .ldrb,
@@ -2149,8 +2160,8 @@ fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!vo
.immediate => |imm| try self.setRegOrMem(elem_ty, dst_mcv, .{ .memory = imm }),
.ptr_stack_offset => |off| try self.setRegOrMem(elem_ty, dst_mcv, .{ .stack_offset = off }),
.register => |reg| {
- self.register_manager.freezeRegs(&.{reg});
- defer self.register_manager.unfreezeRegs(&.{reg});
+ const reg_lock = self.register_manager.freezeReg(reg);
+ defer if (reg_lock) |reg_locked| self.register_manager.unfreezeReg(reg_locked);
switch (dst_mcv) {
.dead => unreachable,
@@ -2162,16 +2173,19 @@ fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!vo
.stack_offset => |off| {
if (elem_size <= 4) {
const tmp_reg = try self.register_manager.allocReg(null);
- self.register_manager.freezeRegs(&.{tmp_reg});
- defer self.register_manager.unfreezeRegs(&.{tmp_reg});
+ const tmp_reg_lock = self.register_manager.freezeRegAssumeUnused(tmp_reg);
+ defer self.register_manager.unfreezeReg(tmp_reg_lock);
try self.load(.{ .register = tmp_reg }, ptr, ptr_ty);
try self.genSetStack(elem_ty, off, MCValue{ .register = tmp_reg });
} else {
// TODO optimize the register allocation
const regs = try self.register_manager.allocRegs(4, .{ null, null, null, null });
- self.register_manager.freezeRegs(®s);
- defer self.register_manager.unfreezeRegs(®s);
+ var regs_locks: [4]RegisterLock = undefined;
+ self.register_manager.freezeRegsAssumeUnused(4, regs, ®s_locks);
+ defer for (regs_locks) |reg_locked| {
+ self.register_manager.unfreezeReg(reg_locked);
+ };
const src_reg = reg;
const dst_reg = regs[0];
@@ -2197,8 +2211,8 @@ fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!vo
.stack_argument_offset,
=> {
const reg = try self.register_manager.allocReg(null);
- self.register_manager.freezeRegs(&.{reg});
- defer self.register_manager.unfreezeRegs(&.{reg});
+ const reg_lock = self.register_manager.freezeRegAssumeUnused(reg);
+ defer self.register_manager.unfreezeReg(reg_lock);
try self.genSetReg(ptr_ty, reg, ptr);
try self.load(dst_mcv, .{ .register = reg }, ptr_ty);
@@ -2252,8 +2266,8 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
try self.genSetStack(value_ty, off, value);
},
.register => |addr_reg| {
- self.register_manager.freezeRegs(&.{addr_reg});
- defer self.register_manager.unfreezeRegs(&.{addr_reg});
+ const addr_reg_lock = self.register_manager.freezeReg(addr_reg);
+ defer if (addr_reg_lock) |reg| self.register_manager.unfreezeReg(reg);
switch (value) {
.dead => unreachable,
@@ -2264,15 +2278,18 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
else => {
if (elem_size <= 4) {
const tmp_reg = try self.register_manager.allocReg(null);
- self.register_manager.freezeRegs(&.{tmp_reg});
- defer self.register_manager.unfreezeRegs(&.{tmp_reg});
+ const tmp_reg_lock = self.register_manager.freezeRegAssumeUnused(tmp_reg);
+ defer self.register_manager.unfreezeReg(tmp_reg_lock);
try self.genSetReg(value_ty, tmp_reg, value);
try self.store(ptr, .{ .register = tmp_reg }, ptr_ty, value_ty);
} else {
const regs = try self.register_manager.allocRegs(4, .{ null, null, null, null });
- self.register_manager.freezeRegs(®s);
- defer self.register_manager.unfreezeRegs(®s);
+ var regs_locks: [4]RegisterLock = undefined;
+ self.register_manager.freezeRegsAssumeUnused(4, regs, ®s_locks);
+ defer for (regs_locks) |reg| {
+ self.register_manager.unfreezeReg(reg);
+ };
const src_reg = regs[0];
const dst_reg = addr_reg;
@@ -2356,12 +2373,12 @@ fn structFieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, inde
const offset_reg = try self.copyToTmpRegister(ptr_ty, .{
.immediate = struct_field_offset,
});
- self.register_manager.freezeRegs(&.{offset_reg});
- defer self.register_manager.unfreezeRegs(&.{offset_reg});
+ const offset_reg_lock = self.register_manager.freezeRegAssumeUnused(offset_reg);
+ defer self.register_manager.unfreezeReg(offset_reg_lock);
const addr_reg = try self.copyToTmpRegister(ptr_ty, mcv);
- self.register_manager.freezeRegs(&.{addr_reg});
- defer self.register_manager.unfreezeRegs(&.{addr_reg});
+ const addr_reg_lock = self.register_manager.freezeRegAssumeUnused(addr_reg);
+ defer self.register_manager.unfreezeReg(addr_reg_lock);
const dest = try self.binOp(
.add,
@@ -2477,8 +2494,11 @@ fn binOpRegister(
const lhs_is_register = lhs == .register;
const rhs_is_register = rhs == .register;
- if (lhs_is_register) self.register_manager.freezeRegs(&.{lhs.register});
- if (rhs_is_register) self.register_manager.freezeRegs(&.{rhs.register});
+ const lhs_lock: ?RegisterLock = if (lhs_is_register)
+ self.register_manager.freezeReg(lhs.register)
+ else
+ null;
+ defer if (lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
@@ -2489,13 +2509,13 @@ fn binOpRegister(
} else null;
const reg = try self.register_manager.allocReg(track_inst);
- self.register_manager.freezeRegs(&.{reg});
if (track_inst) |inst| branch.inst_table.putAssumeCapacity(inst, .{ .register = reg });
break :blk reg;
};
- defer self.register_manager.unfreezeRegs(&.{lhs_reg});
+ const new_lhs_lock = self.register_manager.freezeReg(lhs_reg);
+ defer if (new_lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const rhs_reg = if (rhs_is_register) rhs.register else blk: {
const track_inst: ?Air.Inst.Index = if (maybe_inst) |inst| inst: {
@@ -2504,13 +2524,13 @@ fn binOpRegister(
} else null;
const reg = try self.register_manager.allocReg(track_inst);
- self.register_manager.freezeRegs(&.{reg});
if (track_inst) |inst| branch.inst_table.putAssumeCapacity(inst, .{ .register = reg });
break :blk reg;
};
- defer self.register_manager.unfreezeRegs(&.{rhs_reg});
+ const new_rhs_lock = self.register_manager.freezeReg(rhs_reg);
+ defer if (new_rhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const dest_reg = switch (mir_tag) {
.cmp => .r0, // cmp has no destination regardless
@@ -2593,7 +2613,11 @@ fn binOpImmediate(
) !MCValue {
const lhs_is_register = lhs == .register;
- if (lhs_is_register) self.register_manager.freezeRegs(&.{lhs.register});
+ const lhs_lock: ?RegisterLock = if (lhs_is_register)
+ self.register_manager.freezeReg(lhs.register)
+ else
+ null;
+ defer if (lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
@@ -2606,13 +2630,13 @@ fn binOpImmediate(
} else null;
const reg = try self.register_manager.allocReg(track_inst);
- self.register_manager.freezeRegs(&.{reg});
if (track_inst) |inst| branch.inst_table.putAssumeCapacity(inst, .{ .register = reg });
break :blk reg;
};
- defer self.register_manager.unfreezeRegs(&.{lhs_reg});
+ const new_lhs_lock = self.register_manager.freezeReg(lhs_reg);
+ defer if (new_lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const dest_reg = switch (mir_tag) {
.cmp => .r0, // cmp has no destination reg
src/arch/riscv64/CodeGen.zig
@@ -23,6 +23,7 @@ const log = std.log.scoped(.codegen);
const build_options = @import("build_options");
const RegisterManagerFn = @import("../../register_manager.zig").RegisterManager;
const RegisterManager = RegisterManagerFn(Self, Register, &callee_preserved_regs);
+const RegisterLock = RegisterManager.RegisterLock;
const FnResult = @import("../../codegen.zig").FnResult;
const GenerateSymbolError = @import("../../codegen.zig").GenerateSymbolError;
@@ -937,8 +938,11 @@ fn binOpRegister(
const lhs_is_register = lhs == .register;
const rhs_is_register = rhs == .register;
- if (lhs_is_register) self.register_manager.freezeRegs(&.{lhs.register});
- if (rhs_is_register) self.register_manager.freezeRegs(&.{rhs.register});
+ const lhs_lock: ?RegisterLock = if (lhs_is_register)
+ self.register_manager.freezeReg(lhs.register)
+ else
+ null;
+ defer if (lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
@@ -949,13 +953,13 @@ fn binOpRegister(
} else null;
const reg = try self.register_manager.allocReg(track_inst);
- self.register_manager.freezeRegs(&.{reg});
if (track_inst) |inst| branch.inst_table.putAssumeCapacity(inst, .{ .register = reg });
break :blk reg;
};
- defer self.register_manager.unfreezeRegs(&.{lhs_reg});
+ const new_lhs_lock = self.register_manager.freezeReg(lhs_reg);
+ defer if (new_lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const rhs_reg = if (rhs_is_register) rhs.register else blk: {
const track_inst: ?Air.Inst.Index = if (maybe_inst) |inst| inst: {
@@ -964,13 +968,13 @@ fn binOpRegister(
} else null;
const reg = try self.register_manager.allocReg(track_inst);
- self.register_manager.freezeRegs(&.{reg});
if (track_inst) |inst| branch.inst_table.putAssumeCapacity(inst, .{ .register = reg });
break :blk reg;
};
- defer self.register_manager.unfreezeRegs(&.{rhs_reg});
+ const new_rhs_lock = self.register_manager.freezeReg(rhs_reg);
+ defer if (new_rhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const dest_reg = if (maybe_inst) |inst| blk: {
const bin_op = self.air.instructions.items(.data)[inst].bin_op;
@@ -1448,8 +1452,8 @@ fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!vo
.stack_offset,
=> {
const reg = try self.register_manager.allocReg(null);
- self.register_manager.freezeRegs(&.{reg});
- defer self.register_manager.unfreezeRegs(&.{reg});
+ const reg_lock = self.register_manager.freezeRegAssumeUnused(reg);
+ defer self.register_manager.unfreezeReg(reg_lock);
try self.genSetReg(ptr_ty, reg, ptr);
try self.load(dst_mcv, .{ .register = reg }, ptr_ty);
src/arch/x86_64/CodeGen.zig
@@ -22,6 +22,8 @@ const Liveness = @import("../../Liveness.zig");
const Mir = @import("Mir.zig");
const Module = @import("../../Module.zig");
const RegisterManagerFn = @import("../../register_manager.zig").RegisterManager;
+const RegisterManager = RegisterManagerFn(Self, Register, &allocatable_registers);
+const RegisterLock = RegisterManager.RegisterLock;
const Target = std.Target;
const Type = @import("../../type.zig").Type;
const TypedValue = @import("../../TypedValue.zig");
@@ -42,8 +44,6 @@ const InnerError = error{
OutOfRegisters,
};
-const RegisterManager = RegisterManagerFn(Self, Register, &allocatable_registers);
-
gpa: Allocator,
air: Air,
liveness: Liveness,
@@ -211,40 +211,6 @@ pub const MCValue = union(enum) {
else => false,
};
}
-
- fn freezeIfRegister(mcv: MCValue, mgr: *RegisterManager) void {
- switch (mcv) {
- .register,
- .register_overflow_signed,
- .register_overflow_unsigned,
- => |reg| {
- mgr.freezeRegs(&.{reg});
- },
- else => {},
- }
- }
-
- fn unfreezeIfRegister(mcv: MCValue, mgr: *RegisterManager) void {
- switch (mcv) {
- .register,
- .register_overflow_signed,
- .register_overflow_unsigned,
- => |reg| {
- mgr.unfreezeRegs(&.{reg});
- },
- else => {},
- }
- }
-
- fn asRegister(mcv: MCValue) ?Register {
- return switch (mcv) {
- .register,
- .register_overflow_signed,
- .register_overflow_unsigned,
- => |reg| reg,
- else => null,
- };
- }
};
const Branch = struct {
@@ -876,15 +842,20 @@ fn finishAir(self: *Self, inst: Air.Inst.Index, result: MCValue, operands: [Live
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
branch.inst_table.putAssumeCapacityNoClobber(inst, result);
- if (result.asRegister()) |reg| {
- // In some cases (such as bitcast), an operand
- // may be the same MCValue as the result. If
- // that operand died and was a register, it
- // was freed by processDeath. We have to
- // "re-allocate" the register.
- if (self.register_manager.isRegFree(reg)) {
- self.register_manager.getRegAssumeFree(reg, inst);
- }
+ switch (result) {
+ .register,
+ .register_overflow_signed,
+ .register_overflow_unsigned,
+ => |reg| {
+ // In some cases (such as bitcast), an operand
+ // may be the same MCValue as the result. If
+ // that operand died and was a register, it
+ // was freed by processDeath. We have to
+ // "re-allocate" the register.
+ if (self.register_manager.isRegFree(reg)) {
+ self.register_manager.getRegAssumeFree(reg, inst);
+ }
+ },
}
}
self.finishAirBookkeeping();
@@ -955,7 +926,15 @@ pub fn spillInstruction(self: *Self, reg: Register, inst: Air.Inst.Index) !void
const stack_mcv = try self.allocRegOrMem(inst, false);
log.debug("spilling {d} to stack mcv {any}", .{ inst, stack_mcv });
const reg_mcv = self.getResolvedInstValue(inst);
- assert(reg.to64() == reg_mcv.asRegister().?.to64());
+ switch (reg_mcv) {
+ .register,
+ .register_overflow_unsigned,
+ .register_overflow_signed,
+ => |other| {
+ assert(reg.to64() == other.to64());
+ },
+ else => {},
+ }
const branch = &self.branch_stack.items[self.branch_stack.items.len - 1];
try branch.inst_table.put(self.gpa, inst, stack_mcv);
try self.genSetStack(self.air.typeOfIndex(inst), stack_mcv.stack_offset, reg_mcv, .{});
@@ -1043,8 +1022,11 @@ fn airIntCast(self: *Self, inst: Air.Inst.Index) !void {
return self.fail("TODO implement intCast for abi sizes larger than 8", .{});
}
- operand.freezeIfRegister(&self.register_manager);
- defer operand.unfreezeIfRegister(&self.register_manager);
+ const operand_lock: ?RegisterLock = switch (operand) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (operand_lock) |reg| self.register_manager.unfreezeReg(reg);
const reg = try self.register_manager.allocReg(inst);
try self.genSetReg(dest_ty, reg, .{ .immediate = 0 });
@@ -1071,8 +1053,11 @@ fn airTrunc(self: *Self, inst: Air.Inst.Index) !void {
return self.fail("TODO implement trunc for abi sizes larger than 8", .{});
}
- operand.freezeIfRegister(&self.register_manager);
- defer operand.unfreezeIfRegister(&self.register_manager);
+ const operand_lock: ?RegisterLock = switch (operand) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (operand_lock) |reg| self.register_manager.unfreezeReg(reg);
const reg: Register = blk: {
if (operand.isRegister()) {
@@ -1156,16 +1141,22 @@ fn airMin(self: *Self, inst: Air.Inst.Index) !void {
// TODO improve by checking if any operand can be reused.
// TODO audit register allocation
const lhs = try self.resolveInst(bin_op.lhs);
- lhs.freezeIfRegister(&self.register_manager);
- defer lhs.unfreezeIfRegister(&self.register_manager);
+ const lhs_lock: ?RegisterLock = switch (lhs) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const lhs_reg = try self.copyToTmpRegister(ty, lhs);
- self.register_manager.freezeRegs(&.{lhs_reg});
- defer self.register_manager.unfreezeRegs(&.{lhs_reg});
+ const lhs_reg_lock = self.register_manager.freezeRegAssumeUnused(lhs_reg);
+ defer self.register_manager.unfreezeReg(lhs_reg_lock);
const rhs_mcv = try self.limitImmediateType(bin_op.rhs, i32);
- rhs_mcv.freezeIfRegister(&self.register_manager);
- defer rhs_mcv.unfreezeIfRegister(&self.register_manager);
+ const rhs_lock: ?RegisterLock = switch (rhs_mcv) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (rhs_lock) |reg| self.register_manager.unfreezeReg(reg);
try self.genBinMathOpMir(.cmp, ty, .{ .register = lhs_reg }, rhs_mcv);
@@ -1200,8 +1191,11 @@ fn genPtrBinMathOp(self: *Self, inst: Air.Inst.Index, op_lhs: Air.Inst.Ref, op_r
const offset = try self.resolveInst(op_rhs);
const offset_ty = self.air.typeOf(op_rhs);
- offset.freezeIfRegister(&self.register_manager);
- defer offset.unfreezeIfRegister(&self.register_manager);
+ const offset_lock: ?RegisterLock = switch (offset) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (offset_lock) |reg| self.register_manager.unfreezeReg(reg);
const dst_mcv = blk: {
if (self.reuseOperand(inst, op_lhs, 0, ptr)) {
@@ -1210,8 +1204,11 @@ fn genPtrBinMathOp(self: *Self, inst: Air.Inst.Index, op_lhs: Air.Inst.Ref, op_r
break :blk MCValue{ .register = try self.copyToTmpRegister(dst_ty, ptr) };
};
- dst_mcv.freezeIfRegister(&self.register_manager);
- defer dst_mcv.unfreezeIfRegister(&self.register_manager);
+ const dst_mcv_lock: ?RegisterLock = switch (dst_mcv) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (dst_mcv_lock) |reg| self.register_manager.unfreezeReg(reg);
const offset_mcv = blk: {
if (self.reuseOperand(inst, op_rhs, 1, offset)) {
@@ -1220,8 +1217,11 @@ fn genPtrBinMathOp(self: *Self, inst: Air.Inst.Index, op_lhs: Air.Inst.Ref, op_r
break :blk MCValue{ .register = try self.copyToTmpRegister(offset_ty, offset) };
};
- offset_mcv.freezeIfRegister(&self.register_manager);
- defer offset_mcv.unfreezeIfRegister(&self.register_manager);
+ const offset_mcv_lock: ?RegisterLock = switch (offset_mcv) {
+ .register => |reg| self.register_manager.freezeReg(reg),
+ else => null,
+ };
+ defer if (offset_mcv_lock) |reg| self.register_manager.unfreezeReg(reg);
try self.genIntMulComplexOpMir(offset_ty, offset_mcv, .{ .immediate = elem_size });
@@ -1306,12 +1306,18 @@ fn genSubOp(self: *Self, inst: Air.Inst.Index, op_lhs: Air.Inst.Ref, op_rhs: Air
const dst_ty = self.air.typeOf(op_lhs);
const lhs = try self.resolveInst(op_lhs);
- lhs.freezeIfRegister(&self.register_manager);
- defer lhs.unfreezeIfRegister(&self.register_manager);
+ const lhs_lock: ?RegisterLock = switch (lhs) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const rhs = try self.resolveInst(op_rhs);
- rhs.freezeIfRegister(&self.register_manager);
- defer rhs.unfreezeIfRegister(&self.register_manager);
+ const rhs_lock: ?RegisterLock = switch (rhs) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (rhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const dst_mcv = blk: {
if (self.reuseOperand(inst, op_lhs, 0, lhs) and lhs.isRegister()) {
@@ -1319,17 +1325,21 @@ fn genSubOp(self: *Self, inst: Air.Inst.Index, op_lhs: Air.Inst.Ref, op_rhs: Air
}
break :blk try self.copyToRegisterWithInstTracking(inst, dst_ty, lhs);
};
-
- dst_mcv.freezeIfRegister(&self.register_manager);
- defer dst_mcv.unfreezeIfRegister(&self.register_manager);
+ const dst_mcv_lock: ?RegisterLock = switch (dst_mcv) {
+ .register => |reg| self.register_manager.freezeReg(reg),
+ else => null,
+ };
+ defer if (dst_mcv_lock) |reg| self.register_manager.unfreezeReg(reg);
const rhs_mcv = blk: {
if (rhs.isMemory() or rhs.isRegister()) break :blk rhs;
break :blk MCValue{ .register = try self.copyToTmpRegister(dst_ty, rhs) };
};
-
- rhs_mcv.freezeIfRegister(&self.register_manager);
- defer rhs_mcv.unfreezeIfRegister(&self.register_manager);
+ const rhs_mcv_lock: ?RegisterLock = switch (rhs_mcv) {
+ .register => |reg| self.register_manager.freezeReg(reg),
+ else => null,
+ };
+ defer if (rhs_mcv_lock) |reg| self.register_manager.unfreezeReg(reg);
try self.genBinMathOpMir(.sub, dst_ty, dst_mcv, rhs_mcv);
@@ -1366,8 +1376,11 @@ fn airMul(self: *Self, inst: Air.Inst.Index) !void {
// Spill .rax and .rdx upfront to ensure we don't spill the operands too late.
try self.register_manager.getReg(.rax, inst);
try self.register_manager.getReg(.rdx, null);
- self.register_manager.freezeRegs(&.{ .rax, .rdx });
- defer self.register_manager.unfreezeRegs(&.{ .rax, .rdx });
+ var reg_locks: [2]RegisterLock = undefined;
+ self.register_manager.freezeRegsAssumeUnused(2, .{ .rax, .rdx }, ®_locks);
+ defer for (reg_locks) |reg| {
+ self.register_manager.unfreezeReg(reg);
+ };
const lhs = try self.resolveInst(bin_op.lhs);
const rhs = try self.resolveInst(bin_op.rhs);
@@ -1477,8 +1490,11 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
// Spill .rax and .rdx upfront to ensure we don't spill the operands too late.
try self.register_manager.getReg(.rax, inst);
try self.register_manager.getReg(.rdx, null);
- self.register_manager.freezeRegs(&.{ .rax, .rdx });
- defer self.register_manager.unfreezeRegs(&.{ .rax, .rdx });
+ var reg_locks: [2]RegisterLock = undefined;
+ self.register_manager.freezeRegsAssumeUnused(2, .{ .rax, .rdx }, ®_locks);
+ defer for (reg_locks) |reg| {
+ self.register_manager.unfreezeReg(reg);
+ };
const lhs = try self.resolveInst(bin_op.lhs);
const rhs = try self.resolveInst(bin_op.rhs);
@@ -1504,21 +1520,28 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
const lhs = try self.resolveInst(bin_op.lhs);
const rhs = try self.resolveInst(bin_op.rhs);
- rhs.freezeIfRegister(&self.register_manager);
- defer rhs.unfreezeIfRegister(&self.register_manager);
+ const rhs_lock: ?RegisterLock = switch (rhs) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (rhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const dst_reg: Register = blk: {
if (lhs.isRegister()) break :blk lhs.register;
break :blk try self.copyToTmpRegister(ty, lhs);
};
- self.register_manager.freezeRegs(&.{dst_reg});
+ const dst_reg_lock = self.register_manager.freezeRegAssumeUnused(dst_reg);
+ defer self.register_manager.unfreezeReg(dst_reg_lock);
const rhs_mcv = blk: {
if (rhs.isRegister() or rhs.isMemory()) break :blk rhs;
break :blk MCValue{ .register = try self.copyToTmpRegister(ty, rhs) };
};
- rhs_mcv.freezeIfRegister(&self.register_manager);
- defer rhs_mcv.unfreezeIfRegister(&self.register_manager);
+ const rhs_mcv_lock: ?RegisterLock = switch (rhs_mcv) {
+ .register => |reg| self.register_manager.freezeReg(reg),
+ else => null,
+ };
+ defer if (rhs_mcv_lock) |reg| self.register_manager.unfreezeReg(reg);
try self.genIntMulComplexOpMir(Type.isize, .{ .register = dst_reg }, rhs_mcv);
@@ -1528,8 +1551,11 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
// Spill .rax and .rdx upfront to ensure we don't spill the operands too late.
try self.register_manager.getReg(.rax, null);
try self.register_manager.getReg(.rdx, null);
- self.register_manager.freezeRegs(&.{ .rax, .rdx });
- defer self.register_manager.unfreezeRegs(&.{.rdx});
+ var reg_locks: [2]RegisterLock = undefined;
+ self.register_manager.freezeRegsAssumeUnused(2, .{ .rax, .rdx }, ®_locks);
+ defer for (reg_locks) |reg| {
+ self.register_manager.unfreezeReg(reg);
+ };
const lhs = try self.resolveInst(bin_op.lhs);
const rhs = try self.resolveInst(bin_op.rhs);
@@ -1540,7 +1566,8 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
},
}
};
- defer self.register_manager.unfreezeRegs(&.{dst_reg});
+ const dst_reg_lock = self.register_manager.freezeRegAssumeUnused(dst_reg);
+ defer self.register_manager.unfreezeReg(dst_reg_lock);
const tuple_ty = self.air.typeOfIndex(inst);
const tuple_size = @intCast(u32, tuple_ty.abiSize(self.target.*));
@@ -1554,8 +1581,11 @@ fn airMulWithOverflow(self: *Self, inst: Air.Inst.Index) !void {
};
const temp_regs = try self.register_manager.allocRegs(3, .{ null, null, null });
- self.register_manager.freezeRegs(&temp_regs);
- defer self.register_manager.unfreezeRegs(&temp_regs);
+ var temp_regs_locks: [3]RegisterLock = undefined;
+ self.register_manager.freezeRegsAssumeUnused(3, temp_regs, &temp_regs_locks);
+ defer for (temp_regs_locks) |reg| {
+ self.register_manager.unfreezeReg(reg);
+ };
const overflow_reg = temp_regs[0];
const flags: u2 = switch (int_info.signedness) {
@@ -1703,14 +1733,15 @@ fn genInlineIntDivFloor(self: *Self, ty: Type, lhs: MCValue, rhs: MCValue) !MCVa
.register => |reg| reg,
else => try self.copyToTmpRegister(ty, lhs),
};
- self.register_manager.freezeRegs(&.{dividend});
+ const dividend_lock = self.register_manager.freezeReg(dividend);
+ defer if (dividend_lock) |reg| self.register_manager.unfreezeReg(reg);
const divisor = switch (rhs) {
.register => |reg| reg,
else => try self.copyToTmpRegister(ty, rhs),
};
- self.register_manager.freezeRegs(&.{divisor});
- defer self.register_manager.unfreezeRegs(&.{ dividend, divisor });
+ const divisor_lock = self.register_manager.freezeReg(divisor);
+ defer if (divisor_lock) |reg| self.register_manager.unfreezeReg(reg);
try self.genIntMulDivOpMir(switch (signedness) {
.signed => .idiv,
@@ -1779,20 +1810,30 @@ fn airDiv(self: *Self, inst: Air.Inst.Index) !void {
};
try self.register_manager.getReg(.rax, track_rax);
try self.register_manager.getReg(.rdx, null);
- self.register_manager.freezeRegs(&.{ .rax, .rdx });
- defer self.register_manager.unfreezeRegs(&.{ .rax, .rdx });
+ var reg_locks: [2]RegisterLock = undefined;
+ self.register_manager.freezeRegsAssumeUnused(2, .{ .rax, .rdx }, ®_locks);
+ defer for (reg_locks) |reg| {
+ self.register_manager.unfreezeReg(reg);
+ };
const lhs = try self.resolveInst(bin_op.lhs);
- lhs.freezeIfRegister(&self.register_manager);
- defer lhs.unfreezeIfRegister(&self.register_manager);
+ const lhs_lock: ?RegisterLock = switch (lhs) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const rhs = blk: {
const rhs = try self.resolveInst(bin_op.rhs);
if (signedness == .signed) {
switch (tag) {
.div_floor => {
- rhs.freezeIfRegister(&self.register_manager);
- defer rhs.unfreezeIfRegister(&self.register_manager);
+ const rhs_lock: ?RegisterLock = switch (rhs) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (rhs_lock) |reg| self.register_manager.unfreezeReg(reg);
+
break :blk try self.copyToRegisterWithInstTracking(inst, ty, rhs);
},
else => {},
@@ -1800,8 +1841,11 @@ fn airDiv(self: *Self, inst: Air.Inst.Index) !void {
}
break :blk rhs;
};
- rhs.freezeIfRegister(&self.register_manager);
- defer rhs.unfreezeIfRegister(&self.register_manager);
+ const rhs_lock: ?RegisterLock = switch (rhs) {
+ .register => |reg| self.register_manager.freezeReg(reg),
+ else => null,
+ };
+ defer if (rhs_lock) |reg| self.register_manager.unfreezeReg(reg);
if (signedness == .unsigned) {
try self.genIntMulDivOpMir(.div, ty, signedness, lhs, rhs);
@@ -1835,8 +1879,11 @@ fn airRem(self: *Self, inst: Air.Inst.Index) !void {
// Spill .rax and .rdx upfront to ensure we don't spill the operands too late.
try self.register_manager.getReg(.rax, null);
try self.register_manager.getReg(.rdx, inst);
- self.register_manager.freezeRegs(&.{ .rax, .rdx });
- defer self.register_manager.unfreezeRegs(&.{ .rax, .rdx });
+ var reg_locks: [2]RegisterLock = undefined;
+ self.register_manager.freezeRegsAssumeUnused(2, .{ .rax, .rdx }, ®_locks);
+ defer for (reg_locks) |reg| {
+ self.register_manager.unfreezeReg(reg);
+ };
const lhs = try self.resolveInst(bin_op.lhs);
const rhs = try self.resolveInst(bin_op.rhs);
@@ -1863,8 +1910,11 @@ fn airMod(self: *Self, inst: Air.Inst.Index) !void {
// Spill .rax and .rdx upfront to ensure we don't spill the operands too late.
try self.register_manager.getReg(.rax, null);
try self.register_manager.getReg(.rdx, if (signedness == .unsigned) inst else null);
- self.register_manager.freezeRegs(&.{ .rax, .rdx });
- defer self.register_manager.unfreezeRegs(&.{ .rax, .rdx });
+ var reg_locks: [2]RegisterLock = undefined;
+ self.register_manager.freezeRegsAssumeUnused(2, .{ .rax, .rdx }, ®_locks);
+ defer for (reg_locks) |reg| {
+ self.register_manager.unfreezeReg(reg);
+ };
const lhs = try self.resolveInst(bin_op.lhs);
const rhs = try self.resolveInst(bin_op.rhs);
@@ -1954,12 +2004,15 @@ fn airShl(self: *Self, inst: Air.Inst.Index) !void {
try self.register_manager.getReg(.rcx, null);
try self.genSetReg(shift_ty, .rcx, shift);
}
- self.register_manager.freezeRegs(&.{.rcx});
- defer self.register_manager.unfreezeRegs(&.{.rcx});
+ const rcx_lock = self.register_manager.freezeRegAssumeUnused(.rcx);
+ defer self.register_manager.unfreezeReg(rcx_lock);
const value = try self.resolveInst(bin_op.lhs);
- value.freezeIfRegister(&self.register_manager);
- defer value.unfreezeIfRegister(&self.register_manager);
+ const value_lock: ?RegisterLock = switch (value) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (value_lock) |reg| self.register_manager.unfreezeReg(reg);
const dst_mcv = try self.copyToRegisterWithInstTracking(inst, ty, value);
_ = try self.addInst(.{
@@ -2055,8 +2108,11 @@ fn airUnwrapErrErr(self: *Self, inst: Air.Inst.Index) !void {
const err_ty = err_union_ty.errorUnionSet();
const payload_ty = err_union_ty.errorUnionPayload();
const operand = try self.resolveInst(ty_op.operand);
- operand.freezeIfRegister(&self.register_manager);
- defer operand.unfreezeIfRegister(&self.register_manager);
+ const operand_lock: ?RegisterLock = switch (operand) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (operand_lock) |reg| self.register_manager.unfreezeReg(reg);
const result: MCValue = result: {
if (!payload_ty.hasRuntimeBits()) break :result operand;
@@ -2085,8 +2141,11 @@ fn airUnwrapErrPayload(self: *Self, inst: Air.Inst.Index) !void {
if (!payload_ty.hasRuntimeBits()) break :result MCValue.none;
const operand = try self.resolveInst(ty_op.operand);
- operand.freezeIfRegister(&self.register_manager);
- defer operand.unfreezeIfRegister(&self.register_manager);
+ const operand_lock: ?RegisterLock = switch (operand) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (operand_lock) |reg| self.register_manager.unfreezeReg(reg);
const abi_align = err_union_ty.abiAlignment(self.target.*);
const err_ty = err_union_ty.errorUnionSet();
@@ -2154,8 +2213,11 @@ fn airWrapOptional(self: *Self, inst: Air.Inst.Index) !void {
const optional_ty = self.air.typeOfIndex(inst);
const operand = try self.resolveInst(ty_op.operand);
- operand.freezeIfRegister(&self.register_manager);
- defer operand.unfreezeIfRegister(&self.register_manager);
+ const operand_lock: ?RegisterLock = switch (operand) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (operand_lock) |reg| self.register_manager.unfreezeReg(reg);
if (optional_ty.isPtrLikeOptional()) {
// TODO should we check if we can reuse the operand?
@@ -2288,8 +2350,11 @@ fn elemOffset(self: *Self, index_ty: Type, index: MCValue, elem_size: u64) !Regi
fn genSliceElemPtr(self: *Self, lhs: Air.Inst.Ref, rhs: Air.Inst.Ref) !MCValue {
const slice_ty = self.air.typeOf(lhs);
const slice_mcv = try self.resolveInst(lhs);
- slice_mcv.freezeIfRegister(&self.register_manager);
- defer slice_mcv.unfreezeIfRegister(&self.register_manager);
+ const slice_mcv_lock: ?RegisterLock = switch (slice_mcv) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (slice_mcv_lock) |reg| self.register_manager.unfreezeReg(reg);
const elem_ty = slice_ty.childType();
const elem_size = elem_ty.abiSize(self.target.*);
@@ -2298,12 +2363,15 @@ fn genSliceElemPtr(self: *Self, lhs: Air.Inst.Ref, rhs: Air.Inst.Ref) !MCValue {
const index_ty = self.air.typeOf(rhs);
const index_mcv = try self.resolveInst(rhs);
- index_mcv.freezeIfRegister(&self.register_manager);
- defer index_mcv.unfreezeIfRegister(&self.register_manager);
+ const index_mcv_lock: ?RegisterLock = switch (index_mcv) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (index_mcv_lock) |reg| self.register_manager.unfreezeReg(reg);
const offset_reg = try self.elemOffset(index_ty, index_mcv, elem_size);
- self.register_manager.freezeRegs(&.{offset_reg});
- defer self.register_manager.unfreezeRegs(&.{offset_reg});
+ const offset_reg_lock = self.register_manager.freezeRegAssumeUnused(offset_reg);
+ defer self.register_manager.unfreezeReg(offset_reg_lock);
const addr_reg = try self.register_manager.allocReg(null);
switch (slice_mcv) {
@@ -2359,20 +2427,26 @@ fn airArrayElemVal(self: *Self, inst: Air.Inst.Index) !void {
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const array_ty = self.air.typeOf(bin_op.lhs);
const array = try self.resolveInst(bin_op.lhs);
- array.freezeIfRegister(&self.register_manager);
- defer array.unfreezeIfRegister(&self.register_manager);
+ const array_lock: ?RegisterLock = switch (array) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (array_lock) |reg| self.register_manager.unfreezeReg(reg);
const elem_ty = array_ty.childType();
const elem_abi_size = elem_ty.abiSize(self.target.*);
const index_ty = self.air.typeOf(bin_op.rhs);
const index = try self.resolveInst(bin_op.rhs);
- index.freezeIfRegister(&self.register_manager);
- defer index.unfreezeIfRegister(&self.register_manager);
+ const index_lock: ?RegisterLock = switch (index) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (index_lock) |reg| self.register_manager.unfreezeReg(reg);
const offset_reg = try self.elemOffset(index_ty, index, elem_abi_size);
- self.register_manager.freezeRegs(&.{offset_reg});
- defer self.register_manager.unfreezeRegs(&.{offset_reg});
+ const offset_reg_lock = self.register_manager.freezeRegAssumeUnused(offset_reg);
+ defer self.register_manager.unfreezeReg(offset_reg_lock);
const addr_reg = try self.register_manager.allocReg(null);
switch (array) {
@@ -2432,19 +2506,25 @@ fn airPtrElemVal(self: *Self, inst: Air.Inst.Index) !void {
const ptr_ty = self.air.typeOf(bin_op.lhs);
const ptr = try self.resolveInst(bin_op.lhs);
- ptr.freezeIfRegister(&self.register_manager);
- defer ptr.unfreezeIfRegister(&self.register_manager);
+ const ptr_lock: ?RegisterLock = switch (ptr) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (ptr_lock) |reg| self.register_manager.unfreezeReg(reg);
const elem_ty = ptr_ty.elemType2();
const elem_abi_size = elem_ty.abiSize(self.target.*);
const index_ty = self.air.typeOf(bin_op.rhs);
const index = try self.resolveInst(bin_op.rhs);
- index.freezeIfRegister(&self.register_manager);
- defer index.unfreezeIfRegister(&self.register_manager);
+ const index_lock: ?RegisterLock = switch (index) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (index_lock) |reg| self.register_manager.unfreezeReg(reg);
const offset_reg = try self.elemOffset(index_ty, index, elem_abi_size);
- self.register_manager.freezeRegs(&.{offset_reg});
- defer self.register_manager.unfreezeRegs(&.{offset_reg});
+ const offset_reg_lock = self.register_manager.freezeRegAssumeUnused(offset_reg);
+ defer self.register_manager.unfreezeReg(offset_reg_lock);
const dst_mcv = try self.copyToRegisterWithInstTracking(inst, ptr_ty, ptr);
try self.genBinMathOpMir(.add, ptr_ty, dst_mcv, .{ .register = offset_reg });
@@ -2473,19 +2553,25 @@ fn airPtrElemPtr(self: *Self, inst: Air.Inst.Index) !void {
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const ptr_ty = self.air.typeOf(extra.lhs);
const ptr = try self.resolveInst(extra.lhs);
- ptr.freezeIfRegister(&self.register_manager);
- defer ptr.unfreezeIfRegister(&self.register_manager);
+ const ptr_lock: ?RegisterLock = switch (ptr) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (ptr_lock) |reg| self.register_manager.unfreezeReg(reg);
const elem_ty = ptr_ty.elemType2();
const elem_abi_size = elem_ty.abiSize(self.target.*);
const index_ty = self.air.typeOf(extra.rhs);
const index = try self.resolveInst(extra.rhs);
- index.freezeIfRegister(&self.register_manager);
- defer index.unfreezeIfRegister(&self.register_manager);
+ const index_lock: ?RegisterLock = switch (index) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (index_lock) |reg| self.register_manager.unfreezeReg(reg);
const offset_reg = try self.elemOffset(index_ty, index, elem_abi_size);
- self.register_manager.freezeRegs(&.{offset_reg});
- defer self.register_manager.unfreezeRegs(&.{offset_reg});
+ const offset_reg_lock = self.register_manager.freezeRegAssumeUnused(offset_reg);
+ defer self.register_manager.unfreezeReg(offset_reg_lock);
const dst_mcv = try self.copyToRegisterWithInstTracking(inst, ptr_ty, ptr);
try self.genBinMathOpMir(.add, ptr_ty, dst_mcv, .{ .register = offset_reg });
@@ -2506,12 +2592,18 @@ fn airSetUnionTag(self: *Self, inst: Air.Inst.Index) !void {
}
const ptr = try self.resolveInst(bin_op.lhs);
- ptr.freezeIfRegister(&self.register_manager);
- defer ptr.unfreezeIfRegister(&self.register_manager);
+ const ptr_lock: ?RegisterLock = switch (ptr) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (ptr_lock) |reg| self.register_manager.unfreezeReg(reg);
const tag = try self.resolveInst(bin_op.rhs);
- tag.freezeIfRegister(&self.register_manager);
- defer tag.unfreezeIfRegister(&self.register_manager);
+ const tag_lock: ?RegisterLock = switch (tag) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (tag_lock) |reg| self.register_manager.unfreezeReg(reg);
const adjusted_ptr: MCValue = if (layout.payload_size > 0 and layout.tag_align < layout.payload_align) blk: {
// TODO reusing the operand
@@ -2541,8 +2633,11 @@ fn airGetUnionTag(self: *Self, inst: Air.Inst.Index) !void {
// TODO reusing the operand
const operand = try self.resolveInst(ty_op.operand);
- operand.freezeIfRegister(&self.register_manager);
- defer operand.unfreezeIfRegister(&self.register_manager);
+ const operand_lock: ?RegisterLock = switch (operand) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (operand_lock) |reg| self.register_manager.unfreezeReg(reg);
const tag_abi_size = tag_ty.abiSize(self.target.*);
const dst_mcv: MCValue = blk: {
@@ -2689,8 +2784,8 @@ fn load(self: *Self, dst_mcv: MCValue, ptr: MCValue, ptr_ty: Type) InnerError!vo
try self.setRegOrMem(elem_ty, dst_mcv, .{ .stack_offset = off });
},
.register => |reg| {
- self.register_manager.freezeRegs(&.{reg});
- defer self.register_manager.unfreezeRegs(&.{reg});
+ const reg_lock = self.register_manager.freezeReg(reg);
+ defer if (reg_lock) |locked_reg| self.register_manager.unfreezeReg(locked_reg);
switch (dst_mcv) {
.dead => unreachable,
@@ -2815,8 +2910,8 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
try self.genSetStack(value_ty, off, value, .{});
},
.register => |reg| {
- self.register_manager.freezeRegs(&.{reg});
- defer self.register_manager.unfreezeRegs(&.{reg});
+ const reg_lock = self.register_manager.freezeReg(reg);
+ defer if (reg_lock) |locked_reg| self.register_manager.unfreezeReg(locked_reg);
switch (value) {
.none => unreachable,
@@ -2906,12 +3001,15 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
.direct_load,
.memory,
=> {
- value.freezeIfRegister(&self.register_manager);
- defer value.unfreezeIfRegister(&self.register_manager);
+ const value_lock: ?RegisterLock = switch (value) {
+ .register => |reg| self.register_manager.freezeReg(reg),
+ else => null,
+ };
+ defer if (value_lock) |reg| self.register_manager.unfreezeReg(reg);
const addr_reg = try self.register_manager.allocReg(null);
- self.register_manager.freezeRegs(&.{addr_reg});
- defer self.register_manager.unfreezeRegs(&.{addr_reg});
+ const addr_reg_lock = self.register_manager.freezeRegAssumeUnused(addr_reg);
+ defer self.register_manager.unfreezeReg(addr_reg_lock);
try self.loadMemPtrIntoRegister(addr_reg, ptr_ty, ptr);
@@ -2982,8 +3080,8 @@ fn store(self: *Self, ptr: MCValue, value: MCValue, ptr_ty: Type, value_ty: Type
=> {
if (abi_size <= 8) {
const tmp_reg = try self.register_manager.allocReg(null);
- self.register_manager.freezeRegs(&.{tmp_reg});
- defer self.register_manager.unfreezeRegs(&.{tmp_reg});
+ const tmp_reg_lock = self.register_manager.freezeRegAssumeUnused(tmp_reg);
+ defer self.register_manager.unfreezeReg(tmp_reg_lock);
try self.loadMemPtrIntoRegister(tmp_reg, value_ty, value);
@@ -3073,8 +3171,8 @@ fn structFieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, inde
const offset_reg = try self.copyToTmpRegister(ptr_ty, .{
.immediate = struct_field_offset,
});
- self.register_manager.freezeRegs(&.{offset_reg});
- defer self.register_manager.unfreezeRegs(&.{offset_reg});
+ const offset_reg_lock = self.register_manager.freezeRegAssumeUnused(offset_reg);
+ defer self.register_manager.unfreezeReg(offset_reg_lock);
const dst_mcv = try self.copyToRegisterWithInstTracking(inst, ptr_ty, mcv);
try self.genBinMathOpMir(.add, ptr_ty, dst_mcv, .{ .register = offset_reg });
@@ -3085,24 +3183,27 @@ fn structFieldPtr(self: *Self, inst: Air.Inst.Index, operand: Air.Inst.Ref, inde
break :result MCValue{ .ptr_stack_offset = ptr_stack_offset };
},
.register => |reg| {
+ const reg_lock = self.register_manager.freezeRegAssumeUnused(reg);
+ defer self.register_manager.unfreezeReg(reg_lock);
+
const offset_reg = try self.copyToTmpRegister(ptr_ty, .{
.immediate = struct_field_offset,
});
- self.register_manager.freezeRegs(&.{offset_reg});
- defer self.register_manager.unfreezeRegs(&.{offset_reg});
+ const offset_reg_lock = self.register_manager.freezeRegAssumeUnused(offset_reg);
+ defer self.register_manager.unfreezeReg(offset_reg_lock);
const can_reuse_operand = self.reuseOperand(inst, operand, 0, mcv);
const result_reg = blk: {
if (can_reuse_operand) {
break :blk reg;
} else {
- self.register_manager.freezeRegs(&.{reg});
const result_reg = try self.register_manager.allocReg(inst);
try self.genSetReg(ptr_ty, result_reg, mcv);
break :blk result_reg;
}
};
- defer if (!can_reuse_operand) self.register_manager.unfreezeRegs(&.{reg});
+ const result_reg_lock = self.register_manager.freezeReg(result_reg);
+ defer if (result_reg_lock) |reg_locked| self.register_manager.unfreezeReg(reg_locked);
try self.genBinMathOpMir(.add, ptr_ty, .{ .register = result_reg }, .{ .register = offset_reg });
break :result MCValue{ .register = result_reg };
@@ -3130,8 +3231,8 @@ fn airStructFieldVal(self: *Self, inst: Air.Inst.Index) !void {
break :result MCValue{ .stack_offset = stack_offset };
},
.register => |reg| {
- self.register_manager.freezeRegs(&.{reg});
- defer self.register_manager.unfreezeRegs(&.{reg});
+ const reg_lock = self.register_manager.freezeRegAssumeUnused(reg);
+ defer self.register_manager.unfreezeReg(reg_lock);
const dst_mcv = blk: {
if (self.reuseOperand(inst, operand, 0, mcv)) {
@@ -3143,8 +3244,11 @@ fn airStructFieldVal(self: *Self, inst: Air.Inst.Index) !void {
break :blk dst_mcv;
}
};
- dst_mcv.freezeIfRegister(&self.register_manager);
- defer dst_mcv.unfreezeIfRegister(&self.register_manager);
+ const dst_mcv_lock: ?RegisterLock = switch (dst_mcv) {
+ .register => |reg| self.register_manager.freezeReg(reg),
+ else => null,
+ };
+ defer if (dst_mcv_lock) |reg_locked| self.register_manager.unfreezeReg(reg_locked);
// Shift by struct_field_offset.
const shift = @intCast(u8, struct_field_offset * @sizeOf(usize));
@@ -3186,8 +3290,8 @@ fn airStructFieldVal(self: *Self, inst: Air.Inst.Index) !void {
},
1 => {
// Get overflow bit.
- mcv.freezeIfRegister(&self.register_manager);
- defer mcv.unfreezeIfRegister(&self.register_manager);
+ const reg_lock = self.register_manager.freezeRegAssumeUnused(reg);
+ defer self.register_manager.unfreezeReg(reg_lock);
const dst_reg = try self.register_manager.allocReg(inst);
const flags: u2 = switch (mcv) {
@@ -3229,12 +3333,18 @@ fn genBinMathOp(self: *Self, inst: Air.Inst.Index, op_lhs: Air.Inst.Ref, op_rhs:
const dst_ty = self.air.typeOf(op_lhs);
const lhs = try self.resolveInst(op_lhs);
- lhs.freezeIfRegister(&self.register_manager);
- defer lhs.unfreezeIfRegister(&self.register_manager);
+ const lhs_lock: ?RegisterLock = switch (lhs) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const rhs = try self.resolveInst(op_rhs);
- rhs.freezeIfRegister(&self.register_manager);
- defer rhs.unfreezeIfRegister(&self.register_manager);
+ const rhs_lock: ?RegisterLock = switch (rhs) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (rhs_lock) |reg| self.register_manager.unfreezeReg(reg);
var flipped: bool = false;
const dst_mcv = blk: {
@@ -3247,16 +3357,22 @@ fn genBinMathOp(self: *Self, inst: Air.Inst.Index, op_lhs: Air.Inst.Ref, op_rhs:
}
break :blk try self.copyToRegisterWithInstTracking(inst, dst_ty, lhs);
};
- dst_mcv.freezeIfRegister(&self.register_manager);
- defer dst_mcv.unfreezeIfRegister(&self.register_manager);
+ const dst_mcv_lock: ?RegisterLock = switch (dst_mcv) {
+ .register => |reg| self.register_manager.freezeReg(reg),
+ else => null,
+ };
+ defer if (dst_mcv_lock) |reg| self.register_manager.unfreezeReg(reg);
const src_mcv = blk: {
const mcv = if (flipped) lhs else rhs;
if (mcv.isRegister() or mcv.isMemory()) break :blk mcv;
break :blk MCValue{ .register = try self.copyToTmpRegister(dst_ty, mcv) };
};
- src_mcv.freezeIfRegister(&self.register_manager);
- defer src_mcv.unfreezeIfRegister(&self.register_manager);
+ const src_mcv_lock: ?RegisterLock = switch (src_mcv) {
+ .register => |reg| self.register_manager.freezeReg(reg),
+ else => null,
+ };
+ defer if (src_mcv_lock) |reg| self.register_manager.unfreezeReg(reg);
const tag = self.air.instructions.items(.tag)[inst];
switch (tag) {
@@ -3287,8 +3403,9 @@ fn genBinMathOpMir(self: *Self, mir_tag: Mir.Inst.Tag, dst_ty: Type, dst_mcv: MC
.register_overflow_unsigned => unreachable,
.register_overflow_signed => unreachable,
.ptr_stack_offset => {
- self.register_manager.freezeRegs(&.{dst_reg});
- defer self.register_manager.unfreezeRegs(&.{dst_reg});
+ const dst_reg_lock = self.register_manager.freezeReg(dst_reg);
+ defer if (dst_reg_lock) |reg_locked| self.register_manager.unfreezeReg(reg_locked);
+
const reg = try self.copyToTmpRegister(dst_ty, src_mcv);
return self.genBinMathOpMir(mir_tag, dst_ty, dst_mcv, .{ .register = reg });
},
@@ -3318,8 +3435,9 @@ fn genBinMathOpMir(self: *Self, mir_tag: Mir.Inst.Tag, dst_ty: Type, dst_mcv: MC
.compare_flags_unsigned,
=> {
assert(abi_size <= 8);
- self.register_manager.freezeRegs(&.{dst_reg});
- defer self.register_manager.unfreezeRegs(&.{dst_reg});
+ const dst_reg_lock = self.register_manager.freezeReg(dst_reg);
+ defer if (dst_reg_lock) |reg_locked| self.register_manager.unfreezeReg(reg_locked);
+
const reg = try self.copyToTmpRegister(dst_ty, src_mcv);
return self.genBinMathOpMir(mir_tag, dst_ty, dst_mcv, .{ .register = reg });
},
@@ -3659,7 +3777,7 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
try self.register_manager.getReg(reg, null);
}
- if (info.return_value == .stack_offset) {
+ const rdi_lock: ?RegisterLock = if (info.return_value == .stack_offset) blk: {
const ret_ty = fn_ty.fnReturnType();
const ret_abi_size = @intCast(u32, ret_ty.abiSize(self.target.*));
const ret_abi_align = @intCast(u32, ret_ty.abiAlignment(self.target.*));
@@ -3668,11 +3786,13 @@ fn airCall(self: *Self, inst: Air.Inst.Index, modifier: std.builtin.CallOptions.
try self.register_manager.getReg(.rdi, null);
try self.genSetReg(Type.usize, .rdi, .{ .ptr_stack_offset = stack_offset });
- self.register_manager.freezeRegs(&.{.rdi});
+ const rdi_lock = self.register_manager.freezeRegAssumeUnused(.rdi);
info.return_value.stack_offset = stack_offset;
- }
- defer if (info.return_value == .stack_offset) self.register_manager.unfreezeRegs(&.{.rdi});
+
+ break :blk rdi_lock;
+ } else null;
+ defer if (rdi_lock) |reg| self.register_manager.unfreezeReg(reg);
for (args) |arg, arg_i| {
const mc_arg = info.args[arg_i];
@@ -3891,11 +4011,16 @@ fn airRet(self: *Self, inst: Air.Inst.Index) !void {
const ret_ty = self.fn_type.fnReturnType();
switch (self.ret_mcv) {
.stack_offset => {
- self.register_manager.freezeRegs(&.{ .rax, .rcx });
- defer self.register_manager.unfreezeRegs(&.{ .rax, .rcx });
+ var reg_locks: [2]RegisterLock = undefined;
+ self.register_manager.freezeRegsAssumeUnused(2, .{ .rax, .rcx }, ®_locks);
+ defer for (reg_locks) |reg| {
+ self.register_manager.unfreezeReg(reg);
+ };
+
const reg = try self.copyToTmpRegister(Type.usize, self.ret_mcv);
- self.register_manager.freezeRegs(&.{reg});
- defer self.register_manager.unfreezeRegs(&.{reg});
+ const reg_lock = self.register_manager.freezeRegAssumeUnused(reg);
+ defer self.register_manager.unfreezeReg(reg_lock);
+
try self.genSetStack(ret_ty, 0, operand, .{
.source_stack_base = .rbp,
.dest_stack_base = reg,
@@ -3926,11 +4051,16 @@ fn airRetLoad(self: *Self, inst: Air.Inst.Index) !void {
const elem_ty = ptr_ty.elemType();
switch (self.ret_mcv) {
.stack_offset => {
- self.register_manager.freezeRegs(&.{ .rax, .rcx });
- defer self.register_manager.unfreezeRegs(&.{ .rax, .rcx });
+ var reg_locks: [2]RegisterLock = undefined;
+ self.register_manager.freezeRegsAssumeUnused(2, .{ .rax, .rcx }, ®_locks);
+ defer for (reg_locks) |reg| {
+ self.register_manager.unfreezeReg(reg);
+ };
+
const reg = try self.copyToTmpRegister(Type.usize, self.ret_mcv);
- self.register_manager.freezeRegs(&.{reg});
- defer self.register_manager.unfreezeRegs(&.{reg});
+ const reg_lock = self.register_manager.freezeRegAssumeUnused(reg);
+ defer self.register_manager.unfreezeReg(reg_lock);
+
try self.genInlineMemcpy(.{ .stack_offset = 0 }, ptr, .{ .immediate = elem_ty.abiSize(self.target.*) }, .{
.source_stack_base = .rbp,
.dest_stack_base = reg,
@@ -3980,12 +4110,15 @@ fn airCmp(self: *Self, inst: Air.Inst.Index, op: math.CompareOperator) !void {
// Source operand can be an immediate, 8 bits or 32 bits.
// TODO look into reusing the operand
const lhs = try self.resolveInst(bin_op.lhs);
- lhs.freezeIfRegister(&self.register_manager);
- defer lhs.unfreezeIfRegister(&self.register_manager);
+ const lhs_lock: ?RegisterLock = switch (lhs) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (lhs_lock) |reg| self.register_manager.unfreezeReg(reg);
const dst_reg = try self.copyToTmpRegister(ty, lhs);
- self.register_manager.freezeRegs(&.{dst_reg});
- defer self.register_manager.unfreezeRegs(&.{dst_reg});
+ const dst_reg_lock = self.register_manager.freezeRegAssumeUnused(dst_reg);
+ defer self.register_manager.unfreezeReg(dst_reg_lock);
const dst_mcv = MCValue{ .register = dst_reg };
@@ -4448,8 +4581,13 @@ fn airIsNullPtr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand_ptr = try self.resolveInst(un_op);
- operand_ptr.freezeIfRegister(&self.register_manager);
- defer operand_ptr.unfreezeIfRegister(&self.register_manager);
+
+ const operand_ptr_lock: ?RegisterLock = switch (operand_ptr) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (operand_ptr_lock) |reg| self.register_manager.unfreezeReg(reg);
+
const operand: MCValue = blk: {
if (self.reuseOperand(inst, un_op, 0, operand_ptr)) {
// The MCValue that holds the pointer can be re-used as the value.
@@ -4479,8 +4617,13 @@ fn airIsNonNullPtr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand_ptr = try self.resolveInst(un_op);
- operand_ptr.freezeIfRegister(&self.register_manager);
- defer operand_ptr.unfreezeIfRegister(&self.register_manager);
+
+ const operand_ptr_lock: ?RegisterLock = switch (operand_ptr) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (operand_ptr_lock) |reg| self.register_manager.unfreezeReg(reg);
+
const operand: MCValue = blk: {
if (self.reuseOperand(inst, un_op, 0, operand_ptr)) {
// The MCValue that holds the pointer can be re-used as the value.
@@ -4510,8 +4653,13 @@ fn airIsErrPtr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand_ptr = try self.resolveInst(un_op);
- operand_ptr.freezeIfRegister(&self.register_manager);
- defer operand_ptr.unfreezeIfRegister(&self.register_manager);
+
+ const operand_ptr_lock: ?RegisterLock = switch (operand_ptr) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (operand_ptr_lock) |reg| self.register_manager.unfreezeReg(reg);
+
const operand: MCValue = blk: {
if (self.reuseOperand(inst, un_op, 0, operand_ptr)) {
// The MCValue that holds the pointer can be re-used as the value.
@@ -4541,8 +4689,13 @@ fn airIsNonErrPtr(self: *Self, inst: Air.Inst.Index) !void {
const un_op = self.air.instructions.items(.data)[inst].un_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
const operand_ptr = try self.resolveInst(un_op);
- operand_ptr.freezeIfRegister(&self.register_manager);
- defer operand_ptr.unfreezeIfRegister(&self.register_manager);
+
+ const operand_ptr_lock: ?RegisterLock = switch (operand_ptr) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (operand_ptr_lock) |reg| self.register_manager.unfreezeReg(reg);
+
const operand: MCValue = blk: {
if (self.reuseOperand(inst, un_op, 0, operand_ptr)) {
// The MCValue that holds the pointer can be re-used as the value.
@@ -4610,8 +4763,8 @@ fn genCondSwitchMir(self: *Self, ty: Type, condition: MCValue, case: MCValue) !u
.register => |cond_reg| {
try self.spillCompareFlagsIfOccupied();
- self.register_manager.freezeRegs(&.{cond_reg});
- defer self.register_manager.unfreezeRegs(&.{cond_reg});
+ const cond_reg_lock = self.register_manager.freezeReg(cond_reg);
+ defer if (cond_reg_lock) |reg| self.register_manager.unfreezeReg(reg);
switch (case) {
.none => unreachable,
@@ -4670,8 +4823,8 @@ fn genCondSwitchMir(self: *Self, ty: Type, condition: MCValue, case: MCValue) !u
if (abi_size <= 8) {
const reg = try self.copyToTmpRegister(ty, condition);
- self.register_manager.freezeRegs(&.{reg});
- defer self.register_manager.unfreezeRegs(&.{reg});
+ const reg_lock = self.register_manager.freezeRegAssumeUnused(reg);
+ defer self.register_manager.unfreezeReg(reg_lock);
return self.genCondSwitchMir(ty, .{ .register = reg }, case);
}
@@ -5158,8 +5311,8 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: i32, mcv: MCValue, opts: Inl
.register_overflow_unsigned,
.register_overflow_signed,
=> |reg| {
- self.register_manager.freezeRegs(&.{reg});
- defer self.register_manager.unfreezeRegs(&.{reg});
+ const reg_lock = self.register_manager.freezeReg(reg);
+ defer if (reg_lock) |reg_locked| self.register_manager.unfreezeReg(reg_locked);
const wrapped_ty = ty.structFieldType(0);
try self.genSetStack(wrapped_ty, stack_offset, .{ .register = reg }, .{});
@@ -5260,8 +5413,8 @@ fn genSetStack(self: *Self, ty: Type, stack_offset: i32, mcv: MCValue, opts: Inl
const base_reg = opts.dest_stack_base orelse .rbp;
if (!math.isPowerOfTwo(abi_size)) {
- self.register_manager.freezeRegs(&.{reg});
- defer self.register_manager.unfreezeRegs(&.{reg});
+ const reg_lock = self.register_manager.freezeReg(reg);
+ defer if (reg_lock) |reg_locked| self.register_manager.unfreezeReg(reg_locked);
const tmp_reg = try self.copyToTmpRegister(ty, mcv);
@@ -5350,13 +5503,26 @@ fn genInlineMemcpy(
len: MCValue,
opts: InlineMemcpyOpts,
) InnerError!void {
- self.register_manager.freezeRegs(&.{ .rax, .rcx });
+ try self.register_manager.getReg(.rax, null);
+ try self.register_manager.getReg(.rcx, null);
- if (opts.source_stack_base) |reg| self.register_manager.freezeRegs(&.{reg});
- defer if (opts.source_stack_base) |reg| self.register_manager.unfreezeRegs(&.{reg});
+ var reg_locks: [2]RegisterLock = undefined;
+ self.register_manager.freezeRegsAssumeUnused(2, .{ .rax, .rcx }, ®_locks);
+ defer for (reg_locks) |reg| {
+ self.register_manager.unfreezeReg(reg);
+ };
- if (opts.dest_stack_base) |reg| self.register_manager.freezeRegs(&.{reg});
- defer if (opts.dest_stack_base) |reg| self.register_manager.unfreezeRegs(&.{reg});
+ const ssbase_lock: ?RegisterLock = if (opts.source_stack_base) |reg|
+ self.register_manager.freezeReg(reg)
+ else
+ null;
+ defer if (ssbase_lock) |reg| self.register_manager.unfreezeReg(reg);
+
+ const dsbase_lock: ?RegisterLock = if (opts.dest_stack_base) |reg|
+ self.register_manager.freezeReg(reg)
+ else
+ null;
+ defer if (dsbase_lock) |reg| self.register_manager.unfreezeReg(reg);
const dst_addr_reg = try self.register_manager.allocReg(null);
switch (dst_ptr) {
@@ -5390,8 +5556,8 @@ fn genInlineMemcpy(
return self.fail("TODO implement memcpy for setting stack when dest is {}", .{dst_ptr});
},
}
- self.register_manager.freezeRegs(&.{dst_addr_reg});
- defer self.register_manager.unfreezeRegs(&.{dst_addr_reg});
+ const dst_addr_reg_lock = self.register_manager.freezeRegAssumeUnused(dst_addr_reg);
+ defer self.register_manager.unfreezeReg(dst_addr_reg_lock);
const src_addr_reg = try self.register_manager.allocReg(null);
switch (src_ptr) {
@@ -5425,18 +5591,13 @@ fn genInlineMemcpy(
return self.fail("TODO implement memcpy for setting stack when src is {}", .{src_ptr});
},
}
- self.register_manager.freezeRegs(&.{src_addr_reg});
- defer self.register_manager.unfreezeRegs(&.{src_addr_reg});
+ const src_addr_reg_lock = self.register_manager.freezeRegAssumeUnused(src_addr_reg);
+ defer self.register_manager.unfreezeReg(src_addr_reg_lock);
const regs = try self.register_manager.allocRegs(2, .{ null, null });
const count_reg = regs[0].to64();
const tmp_reg = regs[1].to8();
- self.register_manager.unfreezeRegs(&.{ .rax, .rcx });
-
- try self.register_manager.getReg(.rax, null);
- try self.register_manager.getReg(.rcx, null);
-
try self.genSetReg(Type.usize, count_reg, len);
// mov rcx, 0
@@ -5540,7 +5701,9 @@ fn genInlineMemset(
len: MCValue,
opts: InlineMemcpyOpts,
) InnerError!void {
- self.register_manager.freezeRegs(&.{.rax});
+ try self.register_manager.getReg(.rax, null);
+ const rax_lock = self.register_manager.freezeRegAssumeUnused(.rax);
+ defer self.register_manager.unfreezeReg(rax_lock);
const addr_reg = try self.register_manager.allocReg(null);
switch (dst_ptr) {
@@ -5574,11 +5737,8 @@ fn genInlineMemset(
return self.fail("TODO implement memcpy for setting stack when dest is {}", .{dst_ptr});
},
}
- self.register_manager.freezeRegs(&.{addr_reg});
- defer self.register_manager.unfreezeRegs(&.{addr_reg});
-
- self.register_manager.unfreezeRegs(&.{.rax});
- try self.register_manager.getReg(.rax, null);
+ const addr_reg_lock = self.register_manager.freezeRegAssumeUnused(addr_reg);
+ defer self.register_manager.unfreezeReg(addr_reg_lock);
try self.genSetReg(Type.usize, .rax, len);
try self.genBinMathOpMir(.sub, Type.usize, .{ .register = .rax }, .{ .immediate = 1 });
@@ -6017,16 +6177,25 @@ fn airMemset(self: *Self, inst: Air.Inst.Index) !void {
const extra = self.air.extraData(Air.Bin, pl_op.payload).data;
const dst_ptr = try self.resolveInst(pl_op.operand);
- dst_ptr.freezeIfRegister(&self.register_manager);
- defer dst_ptr.unfreezeIfRegister(&self.register_manager);
+ const dst_ptr_lock: ?RegisterLock = switch (dst_ptr) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (dst_ptr_lock) |reg| self.register_manager.unfreezeReg(reg);
const src_val = try self.resolveInst(extra.lhs);
- src_val.freezeIfRegister(&self.register_manager);
- defer src_val.unfreezeIfRegister(&self.register_manager);
+ const src_val_lock: ?RegisterLock = switch (src_val) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (src_val_lock) |reg| self.register_manager.unfreezeReg(reg);
const len = try self.resolveInst(extra.rhs);
- len.freezeIfRegister(&self.register_manager);
- defer len.unfreezeIfRegister(&self.register_manager);
+ const len_lock: ?RegisterLock = switch (len) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (len_lock) |reg| self.register_manager.unfreezeReg(reg);
try self.genInlineMemset(dst_ptr, src_val, len, .{});
@@ -6038,17 +6207,26 @@ fn airMemcpy(self: *Self, inst: Air.Inst.Index) !void {
const extra = self.air.extraData(Air.Bin, pl_op.payload).data;
const dst_ptr = try self.resolveInst(pl_op.operand);
- dst_ptr.freezeIfRegister(&self.register_manager);
- defer dst_ptr.unfreezeIfRegister(&self.register_manager);
+ const dst_ptr_lock: ?RegisterLock = switch (dst_ptr) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (dst_ptr_lock) |reg| self.register_manager.unfreezeReg(reg);
const src_ty = self.air.typeOf(extra.lhs);
const src_ptr = try self.resolveInst(extra.lhs);
- src_ptr.freezeIfRegister(&self.register_manager);
- defer src_ptr.unfreezeIfRegister(&self.register_manager);
+ const src_ptr_lock: ?RegisterLock = switch (src_ptr) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (src_ptr_lock) |reg| self.register_manager.unfreezeReg(reg);
const len = try self.resolveInst(extra.rhs);
- len.freezeIfRegister(&self.register_manager);
- defer len.unfreezeIfRegister(&self.register_manager);
+ const len_lock: ?RegisterLock = switch (len) {
+ .register => |reg| self.register_manager.freezeRegAssumeUnused(reg),
+ else => null,
+ };
+ defer if (len_lock) |reg| self.register_manager.unfreezeReg(reg);
// TODO Is this the only condition for pointer dereference for memcpy?
const src: MCValue = blk: {
@@ -6070,8 +6248,11 @@ fn airMemcpy(self: *Self, inst: Air.Inst.Index) !void {
else => break :blk src_ptr,
}
};
- src.freezeIfRegister(&self.register_manager);
- defer src.unfreezeIfRegister(&self.register_manager);
+ const src_lock: ?RegisterLock = switch (src) {
+ .register => |reg| self.register_manager.freezeReg(reg),
+ else => null,
+ };
+ defer if (src_lock) |reg| self.register_manager.unfreezeReg(reg);
try self.genInlineMemcpy(dst_ptr, src, len, .{});
src/register_manager.zig
@@ -116,23 +116,52 @@ pub fn RegisterManager(
return self.frozen_registers & mask != 0;
}
- /// Prevents the registers from being allocated until they are
- /// unfrozen again
- pub fn freezeRegs(self: *Self, regs: []const Register) void {
- for (regs) |reg| {
- const mask = getRegisterMask(reg) orelse continue;
- self.frozen_registers |= mask;
- }
+ pub const RegisterLock = struct {
+ register: Register,
+ };
+
+ /// Prevents the register from being allocated until they are
+ /// unfrozen again.
+ /// Returns `RegisterLock` if the register was not already
+ /// frozen, or `null` otherwise.
+ /// Only the owner of the `RegisterLock` can unfreeze the
+ /// register later.
+ pub fn freezeReg(self: *Self, reg: Register) ?RegisterLock {
+ if (self.isRegFrozen(reg)) return null;
+ const mask = getRegisterMask(reg) orelse return null;
+ self.frozen_registers |= mask;
+ return RegisterLock{ .register = reg };
+ }
+
+ /// Like `freezeReg` but asserts the register was unused always
+ /// returning a valid lock.
+ pub fn freezeRegAssumeUnused(self: *Self, reg: Register) RegisterLock {
+ assert(!self.isRegFrozen(reg));
+ const mask = getRegisterMask(reg) orelse unreachable;
+ self.frozen_registers |= mask;
+ return RegisterLock{ .register = reg };
}
- /// Enables the allocation of the registers
- pub fn unfreezeRegs(self: *Self, regs: []const Register) void {
- for (regs) |reg| {
- const mask = getRegisterMask(reg) orelse continue;
- self.frozen_registers &= ~mask;
+ /// Like `freezeRegAssumeUnused` but locks multiple registers.
+ pub fn freezeRegsAssumeUnused(
+ self: *Self,
+ comptime count: comptime_int,
+ regs: [count]Register,
+ buf: *[count]RegisterLock,
+ ) void {
+ for (®s) |reg, i| {
+ buf[i] = self.freezeRegAssumeUnused(reg);
}
}
+ /// Unfreezes the register allowing its re-allocation and re-use.
+ /// Requires `RegisterLock` to unfreeze a register.
+ /// Call `freezeReg` to obtain the lock first.
+ pub fn unfreezeReg(self: *Self, lock: RegisterLock) void {
+ const mask = getRegisterMask(lock.register) orelse return;
+ self.frozen_registers &= ~mask;
+ }
+
/// Returns true when at least one register is frozen
pub fn frozenRegsExist(self: Self) bool {
return self.frozen_registers != 0;
@@ -419,8 +448,8 @@ test "allocReg: spilling" {
// Frozen registers
function.register_manager.freeReg(.r3);
{
- function.register_manager.freezeRegs(&.{.r2});
- defer function.register_manager.unfreezeRegs(&.{.r2});
+ const lock = function.register_manager.freezeReg(.r2);
+ defer if (lock) |reg| function.register_manager.unfreezeReg(reg);
try expectEqual(@as(?MockRegister1, .r3), try function.register_manager.allocReg(mock_instruction));
}
@@ -447,8 +476,8 @@ test "tryAllocRegs" {
function.register_manager.freeReg(.r2);
function.register_manager.freeReg(.r3);
{
- function.register_manager.freezeRegs(&.{.r1});
- defer function.register_manager.unfreezeRegs(&.{.r1});
+ const lock = function.register_manager.freezeReg(.r1);
+ defer if (lock) |reg| function.register_manager.unfreezeReg(reg);
try expectEqual([_]MockRegister2{ .r0, .r2, .r3 }, function.register_manager.tryAllocRegs(3, .{ null, null, null }).?);
}
@@ -486,8 +515,8 @@ test "allocRegs: normal usage" {
// contain any valuable data anymore and can be reused. For an
// example of that, see `selectively reducing register
// pressure`.
- function.register_manager.freezeRegs(&.{result_reg});
- defer function.register_manager.unfreezeRegs(&.{result_reg});
+ const lock = function.register_manager.freezeReg(result_reg);
+ defer if (lock) |reg| function.register_manager.unfreezeReg(reg);
const regs = try function.register_manager.allocRegs(2, .{ null, null });
try function.genAdd(result_reg, regs[0], regs[1]);
@@ -507,16 +536,14 @@ test "allocRegs: selectively reducing register pressure" {
{
const result_reg: MockRegister2 = .r1;
- function.register_manager.freezeRegs(&.{result_reg});
- defer function.register_manager.unfreezeRegs(&.{result_reg});
+ const lock = function.register_manager.freezeReg(result_reg);
// Here, we don't defer unfreeze because we manually unfreeze
// after genAdd
const regs = try function.register_manager.allocRegs(2, .{ null, null });
- function.register_manager.freezeRegs(&.{result_reg});
try function.genAdd(result_reg, regs[0], regs[1]);
- function.register_manager.unfreezeRegs(®s);
+ function.register_manager.unfreezeReg(lock.?);
const extra_summand_reg = try function.register_manager.allocReg(null);
try function.genAdd(result_reg, result_reg, extra_summand_reg);
test/behavior/align.zig
@@ -7,6 +7,7 @@ var foo: u8 align(4) = 100;
test "global variable alignment" {
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest; // TODO
+ if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest; // TODO
comptime try expect(@typeInfo(@TypeOf(&foo)).Pointer.alignment == 4);
comptime try expect(@TypeOf(&foo) == *align(4) u8);