master
1const std = @import("std");
2const math = std.math;
3const mem = std.mem;
4const assert = std.debug.assert;
5const Allocator = std.mem.Allocator;
6const Air = @import("Air.zig");
7const StaticBitSet = std.bit_set.StaticBitSet;
8const Type = @import("Type.zig");
9const Zcu = @import("Zcu.zig");
10const expect = std.testing.expect;
11const expectEqual = std.testing.expectEqual;
12const expectEqualSlices = std.testing.expectEqualSlices;
13const link = @import("link.zig");
14
15const log = std.log.scoped(.register_manager);
16
17pub const AllocationError = @import("codegen.zig").CodeGenError || error{OutOfRegisters};
18
19pub fn RegisterManager(
20 comptime Function: type,
21 comptime Register: type,
22 comptime tracked_registers: []const Register,
23) type {
24 // architectures which do not have a concept of registers should
25 // refrain from using RegisterManager
26 assert(tracked_registers.len > 0); // see note above
27
28 return struct {
29 /// Tracks the AIR instruction allocated to every register. If
30 /// no instruction is allocated to a register (i.e. the
31 /// register is free), the value in that slot is undefined.
32 ///
33 /// The key must be canonical register.
34 registers: TrackedRegisters = undefined,
35 /// Tracks which registers are free (in which case the
36 /// corresponding bit is set to 1)
37 free_registers: RegisterBitSet = .initFull(),
38 /// Tracks all registers allocated in the course of this
39 /// function
40 allocated_registers: RegisterBitSet = .initEmpty(),
41 /// Tracks registers which are locked from being allocated
42 locked_registers: RegisterBitSet = .initEmpty(),
43
44 const Self = @This();
45
46 pub const TrackedRegisters = [tracked_registers.len]Air.Inst.Index;
47 pub const TrackedIndex = std.math.IntFittingRange(0, tracked_registers.len - 1);
48 pub const RegisterBitSet = StaticBitSet(tracked_registers.len);
49
50 fn getFunction(self: *Self) *Function {
51 return @alignCast(@fieldParentPtr("register_manager", self));
52 }
53
54 fn markRegIndexAllocated(self: *Self, tracked_index: TrackedIndex) void {
55 self.allocated_registers.set(tracked_index);
56 }
57 fn markRegAllocated(self: *Self, reg: Register) void {
58 self.markRegIndexAllocated(indexOfRegIntoTracked(reg) orelse return);
59 }
60
61 fn markRegIndexUsed(self: *Self, tracked_index: TrackedIndex) void {
62 self.free_registers.unset(tracked_index);
63 }
64 fn markRegUsed(self: *Self, reg: Register) void {
65 self.markRegIndexUsed(indexOfRegIntoTracked(reg) orelse return);
66 }
67
68 fn markRegIndexFree(self: *Self, tracked_index: TrackedIndex) void {
69 self.free_registers.set(tracked_index);
70 }
71 fn markRegFree(self: *Self, reg: Register) void {
72 self.markRegIndexFree(indexOfRegIntoTracked(reg) orelse return);
73 }
74
75 pub fn indexOfReg(
76 comptime set: []const Register,
77 reg: Register,
78 ) ?std.math.IntFittingRange(0, set.len - 1) {
79 @setEvalBranchQuota(3000);
80
81 const Id = @TypeOf(reg.id());
82 comptime var min_id: Id = std.math.maxInt(Id);
83 comptime var max_id: Id = std.math.minInt(Id);
84 inline for (set) |elem| {
85 const elem_id = comptime elem.id();
86 min_id = @min(elem_id, min_id);
87 max_id = @max(elem_id, max_id);
88 }
89
90 comptime var map: [max_id - min_id + 1]std.math.IntFittingRange(0, set.len) = @splat(set.len);
91 inline for (set, 0..) |elem, elem_index| map[comptime elem.id() - min_id] = elem_index;
92
93 const id_index = reg.id() -% min_id;
94 if (id_index >= map.len) return null;
95 const set_index = map[id_index];
96 return if (set_index < set.len) @intCast(set_index) else null;
97 }
98
99 pub fn indexOfRegIntoTracked(reg: Register) ?TrackedIndex {
100 return indexOfReg(tracked_registers, reg);
101 }
102 pub inline fn indexOfKnownRegIntoTracked(comptime reg: Register) ?TrackedIndex {
103 return comptime indexOfRegIntoTracked(reg);
104 }
105
106 pub fn regAtTrackedIndex(tracked_index: TrackedIndex) Register {
107 return tracked_registers[tracked_index];
108 }
109
110 /// Returns true when this register is not tracked
111 pub fn isRegIndexFree(self: Self, tracked_index: TrackedIndex) bool {
112 return self.free_registers.isSet(tracked_index);
113 }
114 pub fn isRegFree(self: Self, reg: Register) bool {
115 return self.isRegIndexFree(indexOfRegIntoTracked(reg) orelse return true);
116 }
117 pub fn isKnownRegFree(self: Self, comptime reg: Register) bool {
118 return self.isRegIndexFree(indexOfKnownRegIntoTracked(reg) orelse return true);
119 }
120
121 /// Returns whether this register was allocated in the course
122 /// of this function.
123 ///
124 /// Returns false when this register is not tracked
125 pub fn isRegAllocated(self: Self, reg: Register) bool {
126 const index = indexOfRegIntoTracked(reg) orelse return false;
127 return self.allocated_registers.isSet(index);
128 }
129
130 /// Returns whether this register is locked
131 ///
132 /// Returns false when this register is not tracked
133 fn isRegIndexLocked(self: Self, tracked_index: TrackedIndex) bool {
134 return self.locked_registers.isSet(tracked_index);
135 }
136 pub fn isRegLocked(self: Self, reg: Register) bool {
137 return self.isRegIndexLocked(indexOfRegIntoTracked(reg) orelse return false);
138 }
139 pub fn isKnownRegLocked(self: Self, comptime reg: Register) bool {
140 return self.isRegIndexLocked(indexOfKnownRegIntoTracked(reg) orelse return false);
141 }
142
143 pub const RegisterLock = struct { tracked_index: TrackedIndex };
144
145 /// Prevents the register from being allocated until they are
146 /// unlocked again.
147 /// Returns `RegisterLock` if the register was not already
148 /// locked, or `null` otherwise.
149 /// Only the owner of the `RegisterLock` can unlock the
150 /// register later.
151 pub fn lockRegIndex(self: *Self, tracked_index: TrackedIndex) ?RegisterLock {
152 log.debug("locking {}", .{regAtTrackedIndex(tracked_index)});
153 if (self.isRegIndexLocked(tracked_index)) {
154 log.debug(" register already locked", .{});
155 return null;
156 }
157 self.locked_registers.set(tracked_index);
158 return RegisterLock{ .tracked_index = tracked_index };
159 }
160 pub fn lockReg(self: *Self, reg: Register) ?RegisterLock {
161 return self.lockRegIndex(indexOfRegIntoTracked(reg) orelse return null);
162 }
163
164 /// Like `lockReg` but asserts the register was unused always
165 /// returning a valid lock.
166 pub fn lockRegIndexAssumeUnused(self: *Self, tracked_index: TrackedIndex) RegisterLock {
167 log.debug("locking asserting free {}", .{regAtTrackedIndex(tracked_index)});
168 assert(!self.isRegIndexLocked(tracked_index));
169 self.locked_registers.set(tracked_index);
170 return RegisterLock{ .tracked_index = tracked_index };
171 }
172 pub fn lockRegAssumeUnused(self: *Self, reg: Register) RegisterLock {
173 return self.lockRegIndexAssumeUnused(indexOfRegIntoTracked(reg) orelse unreachable);
174 }
175 pub fn lockKnownRegAssumeUnused(self: *Self, comptime reg: Register) RegisterLock {
176 return self.lockRegIndexAssumeUnused(indexOfKnownRegIntoTracked(reg) orelse unreachable);
177 }
178
179 /// Like `lockReg` but locks multiple registers.
180 pub fn lockRegs(
181 self: *Self,
182 comptime count: comptime_int,
183 regs: [count]Register,
184 ) [count]?RegisterLock {
185 var results: [count]?RegisterLock = undefined;
186 for (&results, regs) |*result, reg| result.* = self.lockReg(reg);
187 return results;
188 }
189
190 /// Like `lockRegAssumeUnused` but locks multiple registers.
191 pub fn lockRegsAssumeUnused(
192 self: *Self,
193 comptime count: comptime_int,
194 regs: [count]Register,
195 ) [count]RegisterLock {
196 var results: [count]RegisterLock = undefined;
197 for (&results, regs) |*result, reg| result.* = self.lockRegAssumeUnused(reg);
198 return results;
199 }
200
201 /// Unlocks the register allowing its re-allocation and re-use.
202 /// Requires `RegisterLock` to unlock a register.
203 /// Call `lockReg` to obtain the lock first.
204 pub fn unlockReg(self: *Self, lock: RegisterLock) void {
205 log.debug("unlocking {}", .{regAtTrackedIndex(lock.tracked_index)});
206 self.locked_registers.unset(lock.tracked_index);
207 }
208
209 /// Returns true when at least one register is locked
210 pub fn lockedRegsExist(self: Self) bool {
211 return self.locked_registers.count() > 0;
212 }
213
214 /// Allocates a specified number of registers, optionally
215 /// tracking them. Returns `null` if not enough registers are
216 /// free.
217 pub fn tryAllocRegs(
218 self: *Self,
219 comptime count: comptime_int,
220 insts: [count]?Air.Inst.Index,
221 register_class: RegisterBitSet,
222 ) ?[count]Register {
223 comptime assert(count > 0 and count <= tracked_registers.len);
224
225 var free_and_unlocked_registers = self.locked_registers;
226 free_and_unlocked_registers.toggleAll();
227 free_and_unlocked_registers.setIntersection(self.free_registers);
228 free_and_unlocked_registers.setIntersection(register_class);
229
230 var regs: [count]Register = undefined;
231 var i: usize = 0;
232 var it = free_and_unlocked_registers.iterator(.{});
233 while (it.next()) |reg_index| {
234 regs[i] = regAtTrackedIndex(@intCast(reg_index));
235 i += 1;
236 if (i >= count) break;
237 }
238 if (i < count) return null;
239
240 for (regs, insts) |reg, inst| {
241 log.debug("tryAllocReg {} for inst {?f}", .{ reg, inst });
242 self.markRegAllocated(reg);
243
244 if (inst) |tracked_inst| {
245 // Track the register
246 const index = indexOfRegIntoTracked(reg).?; // indexOfReg() on a callee-preserved reg should never return null
247 self.registers[index] = tracked_inst;
248 self.markRegUsed(reg);
249 }
250 }
251
252 return regs;
253 }
254
255 /// Allocates a register and optionally tracks it with a
256 /// corresponding instruction. Returns `null` if all registers
257 /// are allocated.
258 pub fn tryAllocReg(self: *Self, inst: ?Air.Inst.Index, register_class: RegisterBitSet) ?Register {
259 return if (tryAllocRegs(self, 1, .{inst}, register_class)) |regs| regs[0] else null;
260 }
261
262 /// Allocates a specified number of registers, optionally
263 /// tracking them. Asserts that count is not
264 /// larger than the total number of registers available.
265 pub fn allocRegs(
266 self: *Self,
267 comptime count: comptime_int,
268 insts: [count]?Air.Inst.Index,
269 register_class: RegisterBitSet,
270 ) AllocationError![count]Register {
271 comptime assert(count > 0 and count <= tracked_registers.len);
272
273 const result = self.tryAllocRegs(count, insts, register_class) orelse blk: {
274 var unlocked_registers = self.locked_registers;
275 unlocked_registers.toggleAll();
276 unlocked_registers.setIntersection(register_class);
277
278 // We'll take over the first count registers. Spill
279 // the instructions that were previously there to a
280 // stack allocations.
281 var regs: [count]Register = undefined;
282 var i: usize = 0;
283 var it = unlocked_registers.iterator(.{});
284 while (it.next()) |reg_index| {
285 const tracked_index: TrackedIndex = @intCast(reg_index);
286 if (!self.isRegIndexFree(tracked_index) and
287 self.registers[tracked_index].unwrap() == .target) continue;
288 try self.getRegIndex(tracked_index, insts[i]);
289 regs[i] = regAtTrackedIndex(tracked_index);
290 i += 1;
291 if (i >= count) break;
292 }
293 if (i < count) return error.OutOfRegisters;
294
295 break :blk regs;
296 };
297
298 log.debug("allocated registers {any} for insts {any}", .{ result, insts });
299 return result;
300 }
301
302 /// Allocates a register and optionally tracks it with a
303 /// corresponding instruction.
304 pub fn allocReg(
305 self: *Self,
306 inst: ?Air.Inst.Index,
307 register_class: RegisterBitSet,
308 ) AllocationError!Register {
309 return (try self.allocRegs(1, .{inst}, register_class))[0];
310 }
311
312 /// Spills the register if it is currently allocated. If a
313 /// corresponding instruction is passed, will also track this
314 /// register.
315 pub fn getRegIndex(
316 self: *Self,
317 tracked_index: TrackedIndex,
318 inst: ?Air.Inst.Index,
319 ) AllocationError!void {
320 log.debug("getReg {} for inst {?f}", .{ regAtTrackedIndex(tracked_index), inst });
321 if (!self.isRegIndexFree(tracked_index)) {
322 // Move the instruction that was previously there to a
323 // stack allocation.
324 try self.getFunction().spillInstruction(
325 regAtTrackedIndex(tracked_index),
326 self.registers[tracked_index],
327 );
328 self.freeRegIndex(tracked_index);
329 }
330 self.getRegIndexAssumeFree(tracked_index, inst);
331 }
332 pub fn getReg(self: *Self, reg: Register, inst: ?Air.Inst.Index) AllocationError!void {
333 log.debug("getting reg: {}", .{reg});
334 return self.getRegIndex(indexOfRegIntoTracked(reg) orelse return, inst);
335 }
336 pub fn getKnownReg(
337 self: *Self,
338 comptime reg: Register,
339 inst: ?Air.Inst.Index,
340 ) AllocationError!void {
341 return self.getRegIndex(indexOfKnownRegIntoTracked(reg) orelse return, inst);
342 }
343
344 /// Allocates the specified register with the specified
345 /// instruction. Asserts that the register is free and no
346 /// spilling is necessary.
347 pub fn getRegIndexAssumeFree(
348 self: *Self,
349 tracked_index: TrackedIndex,
350 inst: ?Air.Inst.Index,
351 ) void {
352 log.debug("getRegAssumeFree {} for inst {?f}", .{ regAtTrackedIndex(tracked_index), inst });
353 self.markRegIndexAllocated(tracked_index);
354
355 assert(self.isRegIndexFree(tracked_index));
356 if (inst) |tracked_inst| {
357 self.registers[tracked_index] = tracked_inst;
358 self.markRegIndexUsed(tracked_index);
359 }
360 }
361 pub fn getRegAssumeFree(self: *Self, reg: Register, inst: ?Air.Inst.Index) void {
362 self.getRegIndexAssumeFree(indexOfRegIntoTracked(reg) orelse return, inst);
363 }
364
365 /// Marks the specified register as free
366 pub fn freeRegIndex(self: *Self, tracked_index: TrackedIndex) void {
367 log.debug("freeing register {}", .{regAtTrackedIndex(tracked_index)});
368 self.registers[tracked_index] = undefined;
369 self.markRegIndexFree(tracked_index);
370 }
371 pub fn freeReg(self: *Self, reg: Register) void {
372 self.freeRegIndex(indexOfRegIntoTracked(reg) orelse return);
373 }
374 };
375}
376
377const MockRegister1 = enum(u2) {
378 r0,
379 r1,
380 r2,
381 r3,
382
383 pub fn id(reg: MockRegister1) u2 {
384 return @intFromEnum(reg);
385 }
386
387 const allocatable_registers = [_]MockRegister1{ .r2, .r3 };
388
389 const RM = RegisterManager(
390 MockFunction1,
391 MockRegister1,
392 &MockRegister1.allocatable_registers,
393 );
394
395 const gp = blk: {
396 var set: RM.RegisterBitSet = .initEmpty();
397 set.setRangeValue(.{
398 .start = 0,
399 .end = allocatable_registers.len,
400 }, true);
401 break :blk set;
402 };
403};
404
405const MockRegister2 = enum(u2) {
406 r0,
407 r1,
408 r2,
409 r3,
410
411 pub fn id(reg: MockRegister2) u2 {
412 return @intFromEnum(reg);
413 }
414
415 const allocatable_registers = [_]MockRegister2{ .r0, .r1, .r2, .r3 };
416
417 const RM = RegisterManager(
418 MockFunction2,
419 MockRegister2,
420 &MockRegister2.allocatable_registers,
421 );
422
423 const gp = blk: {
424 var set: RM.RegisterBitSet = .initEmpty();
425 set.setRangeValue(.{
426 .start = 0,
427 .end = allocatable_registers.len,
428 }, true);
429 break :blk set;
430 };
431};
432
433const MockRegister3 = enum(u3) {
434 r0,
435 r1,
436 r2,
437 r3,
438 x0,
439 x1,
440 x2,
441 x3,
442
443 pub fn id(reg: MockRegister3) u3 {
444 return switch (@intFromEnum(reg)) {
445 0...3 => @as(u3, @as(u2, @truncate(@intFromEnum(reg)))),
446 4...7 => @intFromEnum(reg),
447 };
448 }
449
450 pub fn enc(reg: MockRegister3) u2 {
451 return @as(u2, @truncate(@intFromEnum(reg)));
452 }
453
454 const gp_regs = [_]MockRegister3{ .r0, .r1, .r2, .r3 };
455 const ext_regs = [_]MockRegister3{ .x0, .x1, .x2, .x3 };
456 const allocatable_registers = gp_regs ++ ext_regs;
457
458 const RM = RegisterManager(
459 MockFunction3,
460 MockRegister3,
461 &MockRegister3.allocatable_registers,
462 );
463
464 const gp = blk: {
465 var set: RM.RegisterBitSet = .initEmpty();
466 set.setRangeValue(.{
467 .start = 0,
468 .end = gp_regs.len,
469 }, true);
470 break :blk set;
471 };
472 const ext = blk: {
473 var set: RM.RegisterBitSet = .initEmpty();
474 set.setRangeValue(.{
475 .start = gp_regs.len,
476 .end = allocatable_registers.len,
477 }, true);
478 break :blk set;
479 };
480};
481
482fn MockFunction(comptime Register: type) type {
483 return struct {
484 allocator: Allocator,
485 register_manager: Register.RM = .{},
486 spilled: std.ArrayList(Register) = .empty,
487
488 const Self = @This();
489
490 pub fn deinit(self: *Self) void {
491 self.spilled.deinit(self.allocator);
492 }
493
494 pub fn spillInstruction(self: *Self, reg: Register, inst: Air.Inst.Index) !void {
495 _ = inst;
496 try self.spilled.append(self.allocator, reg);
497 }
498
499 pub fn genAdd(self: *Self, res: Register, lhs: Register, rhs: Register) !void {
500 _ = self;
501 _ = res;
502 _ = lhs;
503 _ = rhs;
504 }
505 };
506}
507
508const MockFunction1 = MockFunction(MockRegister1);
509const MockFunction2 = MockFunction(MockRegister2);
510const MockFunction3 = MockFunction(MockRegister3);
511
512test "default state" {
513 const allocator = std.testing.allocator;
514
515 var function = MockFunction1{
516 .allocator = allocator,
517 };
518 defer function.deinit();
519
520 try expect(!function.register_manager.isRegAllocated(.r2));
521 try expect(!function.register_manager.isRegAllocated(.r3));
522 try expect(function.register_manager.isRegFree(.r2));
523 try expect(function.register_manager.isRegFree(.r3));
524}
525
526test "tryAllocReg: no spilling" {
527 const allocator = std.testing.allocator;
528
529 var function = MockFunction1{
530 .allocator = allocator,
531 };
532 defer function.deinit();
533
534 const mock_instruction: Air.Inst.Index = 1;
535 const gp = MockRegister1.gp;
536
537 try expectEqual(@as(?MockRegister1, .r2), function.register_manager.tryAllocReg(mock_instruction, gp));
538 try expectEqual(@as(?MockRegister1, .r3), function.register_manager.tryAllocReg(mock_instruction, gp));
539 try expectEqual(@as(?MockRegister1, null), function.register_manager.tryAllocReg(mock_instruction, gp));
540
541 try expect(function.register_manager.isRegAllocated(.r2));
542 try expect(function.register_manager.isRegAllocated(.r3));
543 try expect(!function.register_manager.isRegFree(.r2));
544 try expect(!function.register_manager.isRegFree(.r3));
545
546 function.register_manager.freeReg(.r2);
547 function.register_manager.freeReg(.r3);
548
549 try expect(function.register_manager.isRegAllocated(.r2));
550 try expect(function.register_manager.isRegAllocated(.r3));
551 try expect(function.register_manager.isRegFree(.r2));
552 try expect(function.register_manager.isRegFree(.r3));
553}
554
555test "allocReg: spilling" {
556 const allocator = std.testing.allocator;
557
558 var function = MockFunction1{
559 .allocator = allocator,
560 };
561 defer function.deinit();
562
563 const mock_instruction: Air.Inst.Index = 1;
564 const gp = MockRegister1.gp;
565
566 try expectEqual(@as(?MockRegister1, .r2), try function.register_manager.allocReg(mock_instruction, gp));
567 try expectEqual(@as(?MockRegister1, .r3), try function.register_manager.allocReg(mock_instruction, gp));
568
569 // Spill a register
570 try expectEqual(@as(?MockRegister1, .r2), try function.register_manager.allocReg(mock_instruction, gp));
571 try expectEqualSlices(MockRegister1, &[_]MockRegister1{.r2}, function.spilled.items);
572
573 // No spilling necessary
574 function.register_manager.freeReg(.r3);
575 try expectEqual(@as(?MockRegister1, .r3), try function.register_manager.allocReg(mock_instruction, gp));
576 try expectEqualSlices(MockRegister1, &[_]MockRegister1{.r2}, function.spilled.items);
577
578 // Locked registers
579 function.register_manager.freeReg(.r3);
580 {
581 const lock = function.register_manager.lockReg(.r2);
582 defer if (lock) |reg| function.register_manager.unlockReg(reg);
583
584 try expectEqual(@as(?MockRegister1, .r3), try function.register_manager.allocReg(mock_instruction, gp));
585 }
586 try expect(!function.register_manager.lockedRegsExist());
587}
588
589test "tryAllocRegs" {
590 const allocator = std.testing.allocator;
591
592 var function = MockFunction2{
593 .allocator = allocator,
594 };
595 defer function.deinit();
596
597 const gp = MockRegister2.gp;
598
599 try expectEqual([_]MockRegister2{ .r0, .r1, .r2 }, function.register_manager.tryAllocRegs(3, .{
600 null,
601 null,
602 null,
603 }, gp).?);
604
605 try expect(function.register_manager.isRegAllocated(.r0));
606 try expect(function.register_manager.isRegAllocated(.r1));
607 try expect(function.register_manager.isRegAllocated(.r2));
608 try expect(!function.register_manager.isRegAllocated(.r3));
609
610 // Locked registers
611 function.register_manager.freeReg(.r0);
612 function.register_manager.freeReg(.r2);
613 function.register_manager.freeReg(.r3);
614 {
615 const lock = function.register_manager.lockReg(.r1);
616 defer if (lock) |reg| function.register_manager.unlockReg(reg);
617
618 try expectEqual([_]MockRegister2{ .r0, .r2, .r3 }, function.register_manager.tryAllocRegs(3, .{
619 null,
620 null,
621 null,
622 }, gp).?);
623 }
624 try expect(!function.register_manager.lockedRegsExist());
625
626 try expect(function.register_manager.isRegAllocated(.r0));
627 try expect(function.register_manager.isRegAllocated(.r1));
628 try expect(function.register_manager.isRegAllocated(.r2));
629 try expect(function.register_manager.isRegAllocated(.r3));
630}
631
632test "allocRegs: normal usage" {
633 // TODO: convert this into a decltest once that is supported
634
635 const allocator = std.testing.allocator;
636
637 var function = MockFunction2{
638 .allocator = allocator,
639 };
640 defer function.deinit();
641
642 const gp = MockRegister2.gp;
643
644 {
645 const result_reg: MockRegister2 = .r1;
646
647 // The result register is known and fixed at this point, we
648 // don't want to accidentally allocate lhs or rhs to the
649 // result register, this is why we lock it.
650 //
651 // Using defer unlock right after lock is a good idea in
652 // most cases as you probably are using the locked registers
653 // in the remainder of this scope and don't need to use it
654 // after the end of this scope. However, in some situations,
655 // it may make sense to manually unlock registers before the
656 // end of the scope when you are certain that they don't
657 // contain any valuable data anymore and can be reused. For an
658 // example of that, see `selectively reducing register
659 // pressure`.
660 const lock = function.register_manager.lockReg(result_reg);
661 defer if (lock) |reg| function.register_manager.unlockReg(reg);
662
663 const regs = try function.register_manager.allocRegs(2, .{ null, null }, gp);
664 try function.genAdd(result_reg, regs[0], regs[1]);
665 }
666}
667
668test "allocRegs: selectively reducing register pressure" {
669 // TODO: convert this into a decltest once that is supported
670
671 const allocator = std.testing.allocator;
672
673 var function = MockFunction2{
674 .allocator = allocator,
675 };
676 defer function.deinit();
677
678 const gp = MockRegister2.gp;
679
680 {
681 const result_reg: MockRegister2 = .r1;
682
683 const lock = function.register_manager.lockReg(result_reg);
684
685 // Here, we don't defer unlock because we manually unlock
686 // after genAdd
687 const regs = try function.register_manager.allocRegs(2, .{ null, null }, gp);
688
689 try function.genAdd(result_reg, regs[0], regs[1]);
690 function.register_manager.unlockReg(lock.?);
691
692 const extra_summand_reg = try function.register_manager.allocReg(null, gp);
693 try function.genAdd(result_reg, result_reg, extra_summand_reg);
694 }
695}
696
697test "getReg" {
698 const allocator = std.testing.allocator;
699
700 var function = MockFunction1{
701 .allocator = allocator,
702 };
703 defer function.deinit();
704
705 const mock_instruction: Air.Inst.Index = 1;
706
707 try function.register_manager.getReg(.r3, mock_instruction);
708
709 try expect(!function.register_manager.isRegAllocated(.r2));
710 try expect(function.register_manager.isRegAllocated(.r3));
711 try expect(function.register_manager.isRegFree(.r2));
712 try expect(!function.register_manager.isRegFree(.r3));
713
714 // Spill r3
715 try function.register_manager.getReg(.r3, mock_instruction);
716
717 try expect(!function.register_manager.isRegAllocated(.r2));
718 try expect(function.register_manager.isRegAllocated(.r3));
719 try expect(function.register_manager.isRegFree(.r2));
720 try expect(!function.register_manager.isRegFree(.r3));
721 try expectEqualSlices(MockRegister1, &[_]MockRegister1{.r3}, function.spilled.items);
722}
723
724test "allocReg with multiple, non-overlapping register classes" {
725 const allocator = std.testing.allocator;
726
727 var function = MockFunction3{
728 .allocator = allocator,
729 };
730 defer function.deinit();
731
732 const gp = MockRegister3.gp;
733 const ext = MockRegister3.ext;
734
735 const gp_reg = try function.register_manager.allocReg(null, gp);
736
737 try expect(function.register_manager.isRegAllocated(.r0));
738 try expect(!function.register_manager.isRegAllocated(.x0));
739
740 const ext_reg = try function.register_manager.allocReg(null, ext);
741
742 try expect(function.register_manager.isRegAllocated(.r0));
743 try expect(!function.register_manager.isRegAllocated(.r1));
744 try expect(function.register_manager.isRegAllocated(.x0));
745 try expect(!function.register_manager.isRegAllocated(.x1));
746 try expect(gp_reg.enc() == ext_reg.enc());
747
748 const ext_lock = function.register_manager.lockRegAssumeUnused(ext_reg);
749 defer function.register_manager.unlockReg(ext_lock);
750
751 const ext_reg2 = try function.register_manager.allocReg(null, ext);
752
753 try expect(function.register_manager.isRegAllocated(.r0));
754 try expect(function.register_manager.isRegAllocated(.x0));
755 try expect(!function.register_manager.isRegAllocated(.r1));
756 try expect(function.register_manager.isRegAllocated(.x1));
757 try expect(ext_reg2.enc() == MockRegister3.r1.enc());
758}