master
1//! This file contains the functionality for lowering x86_64 MIR to Instructions
2
3target: *const std.Target,
4allocator: std.mem.Allocator,
5mir: Mir,
6cc: std.builtin.CallingConvention,
7err_msg: ?*Zcu.ErrorMsg = null,
8src_loc: Zcu.LazySrcLoc,
9result_insts_len: ResultInstIndex = undefined,
10result_insts: [max_result_insts]Instruction = undefined,
11result_relocs_len: ResultRelocIndex = undefined,
12result_relocs: [max_result_relocs]Reloc = undefined,
13
14const max_result_insts = @max(
15 1, // non-pseudo instructions
16 2, // cmovcc: cmovcc \ cmovcc
17 3, // setcc: setcc \ setcc \ logicop
18 2, // jcc: jcc \ jcc
19 pseudo_probe_align_insts,
20 pseudo_probe_adjust_unrolled_max_insts,
21 pseudo_probe_adjust_setup_insts,
22 pseudo_probe_adjust_loop_insts,
23 abi.zigcc.callee_preserved_regs.len * 2, // push_regs/pop_regs
24 abi.Win64.callee_preserved_regs.len * 2, // push_regs/pop_regs
25 abi.SysV.callee_preserved_regs.len * 2, // push_regs/pop_regs
26);
27const max_result_relocs = @max(
28 1, // jmp/jcc/call/mov/lea: jmp/jcc/call/mov/lea
29 2, // jcc: jcc \ jcc
30 2, // test \ jcc \ probe \ sub \ jmp
31 1, // probe \ sub \ jcc
32);
33
34const ResultInstIndex = std.math.IntFittingRange(0, max_result_insts);
35const ResultRelocIndex = std.math.IntFittingRange(0, max_result_relocs);
36pub const InstOpIndex = std.math.IntFittingRange(
37 0,
38 @typeInfo(@FieldType(Instruction, "ops")).array.len,
39);
40
41pub const pseudo_probe_align_insts = 5; // test \ jcc \ probe \ sub \ jmp
42pub const pseudo_probe_adjust_unrolled_max_insts =
43 pseudo_probe_adjust_setup_insts + pseudo_probe_adjust_loop_insts;
44pub const pseudo_probe_adjust_setup_insts = 2; // mov \ sub
45pub const pseudo_probe_adjust_loop_insts = 3; // probe \ sub \ jcc
46
47pub const Error = error{
48 OutOfMemory,
49 LowerFail,
50 InvalidInstruction,
51 CannotEncode,
52 CodegenFail,
53} || codegen.GenerateSymbolError;
54
55pub const Reloc = struct {
56 lowered_inst_index: ResultInstIndex,
57 op_index: InstOpIndex,
58 target: Target,
59 off: i32,
60
61 const Target = union(enum) {
62 inst: Mir.Inst.Index,
63 table,
64 nav: InternPool.Nav.Index,
65 uav: InternPool.Key.Ptr.BaseAddr.Uav,
66 lazy_sym: link.File.LazySymbol,
67 extern_func: Mir.NullTerminatedString,
68 };
69};
70
71const Options = struct { allow_frame_locs: bool };
72
73/// The returned slice is overwritten by the next call to lowerMir.
74pub fn lowerMir(lower: *Lower, index: Mir.Inst.Index) Error!struct {
75 insts: []Instruction,
76 relocs: []const Reloc,
77} {
78 lower.result_insts = undefined;
79 lower.result_relocs = undefined;
80 errdefer lower.result_insts = undefined;
81 errdefer lower.result_relocs = undefined;
82 lower.result_insts_len = 0;
83 lower.result_relocs_len = 0;
84 defer lower.result_insts_len = undefined;
85 defer lower.result_relocs_len = undefined;
86
87 const inst = lower.mir.instructions.get(index);
88 switch (inst.tag) {
89 else => try lower.generic(inst),
90 .pseudo => switch (inst.ops) {
91 .pseudo_cmov_z_and_np_rr => {
92 assert(inst.data.rr.fixes == ._);
93 try lower.encode(.none, .cmovnz, &.{
94 .{ .reg = inst.data.rr.r2 },
95 .{ .reg = inst.data.rr.r1 },
96 });
97 try lower.encode(.none, .cmovnp, &.{
98 .{ .reg = inst.data.rr.r1 },
99 .{ .reg = inst.data.rr.r2 },
100 });
101 },
102 .pseudo_cmov_nz_or_p_rr => {
103 assert(inst.data.rr.fixes == ._);
104 try lower.encode(.none, .cmovnz, &.{
105 .{ .reg = inst.data.rr.r1 },
106 .{ .reg = inst.data.rr.r2 },
107 });
108 try lower.encode(.none, .cmovp, &.{
109 .{ .reg = inst.data.rr.r1 },
110 .{ .reg = inst.data.rr.r2 },
111 });
112 },
113 .pseudo_cmov_nz_or_p_rm => {
114 assert(inst.data.rx.fixes == ._);
115 try lower.encode(.none, .cmovnz, &.{
116 .{ .reg = inst.data.rx.r1 },
117 .{ .mem = lower.mem(1, inst.data.rx.payload) },
118 });
119 try lower.encode(.none, .cmovp, &.{
120 .{ .reg = inst.data.rx.r1 },
121 .{ .mem = lower.mem(1, inst.data.rx.payload) },
122 });
123 },
124 .pseudo_set_z_and_np_r => {
125 assert(inst.data.rr.fixes == ._);
126 try lower.encode(.none, .setz, &.{
127 .{ .reg = inst.data.rr.r1 },
128 });
129 try lower.encode(.none, .setnp, &.{
130 .{ .reg = inst.data.rr.r2 },
131 });
132 try lower.encode(.none, .@"and", &.{
133 .{ .reg = inst.data.rr.r1 },
134 .{ .reg = inst.data.rr.r2 },
135 });
136 },
137 .pseudo_set_z_and_np_m => {
138 assert(inst.data.rx.fixes == ._);
139 try lower.encode(.none, .setz, &.{
140 .{ .mem = lower.mem(0, inst.data.rx.payload) },
141 });
142 try lower.encode(.none, .setnp, &.{
143 .{ .reg = inst.data.rx.r1 },
144 });
145 try lower.encode(.none, .@"and", &.{
146 .{ .mem = lower.mem(0, inst.data.rx.payload) },
147 .{ .reg = inst.data.rx.r1 },
148 });
149 },
150 .pseudo_set_nz_or_p_r => {
151 assert(inst.data.rr.fixes == ._);
152 try lower.encode(.none, .setnz, &.{
153 .{ .reg = inst.data.rr.r1 },
154 });
155 try lower.encode(.none, .setp, &.{
156 .{ .reg = inst.data.rr.r2 },
157 });
158 try lower.encode(.none, .@"or", &.{
159 .{ .reg = inst.data.rr.r1 },
160 .{ .reg = inst.data.rr.r2 },
161 });
162 },
163 .pseudo_set_nz_or_p_m => {
164 assert(inst.data.rx.fixes == ._);
165 try lower.encode(.none, .setnz, &.{
166 .{ .mem = lower.mem(0, inst.data.rx.payload) },
167 });
168 try lower.encode(.none, .setp, &.{
169 .{ .reg = inst.data.rx.r1 },
170 });
171 try lower.encode(.none, .@"or", &.{
172 .{ .mem = lower.mem(0, inst.data.rx.payload) },
173 .{ .reg = inst.data.rx.r1 },
174 });
175 },
176 .pseudo_j_z_and_np_inst => {
177 assert(inst.data.inst.fixes == ._);
178 try lower.encode(.none, .jnz, &.{
179 .{ .imm = lower.reloc(0, .{ .inst = index + 1 }, 0) },
180 });
181 try lower.encode(.none, .jnp, &.{
182 .{ .imm = lower.reloc(0, .{ .inst = inst.data.inst.inst }, 0) },
183 });
184 },
185 .pseudo_j_nz_or_p_inst => {
186 assert(inst.data.inst.fixes == ._);
187 try lower.encode(.none, .jnz, &.{
188 .{ .imm = lower.reloc(0, .{ .inst = inst.data.inst.inst }, 0) },
189 });
190 try lower.encode(.none, .jp, &.{
191 .{ .imm = lower.reloc(0, .{ .inst = inst.data.inst.inst }, 0) },
192 });
193 },
194
195 .pseudo_probe_align_ri_s => {
196 try lower.encode(.none, .@"test", &.{
197 .{ .reg = inst.data.ri.r1 },
198 .{ .imm = .s(@bitCast(inst.data.ri.i)) },
199 });
200 try lower.encode(.none, .jz, &.{
201 .{ .imm = lower.reloc(0, .{ .inst = index + 1 }, 0) },
202 });
203 try lower.encode(.none, .lea, &.{
204 .{ .reg = inst.data.ri.r1 },
205 .{ .mem = Memory.initSib(.qword, .{
206 .base = .{ .reg = inst.data.ri.r1 },
207 .disp = -page_size,
208 }) },
209 });
210 try lower.encode(.none, .@"test", &.{
211 .{ .mem = Memory.initSib(.dword, .{
212 .base = .{ .reg = inst.data.ri.r1 },
213 }) },
214 .{ .reg = inst.data.ri.r1.to32() },
215 });
216 try lower.encode(.none, .jmp, &.{
217 .{ .imm = lower.reloc(0, .{ .inst = index }, 0) },
218 });
219 assert(lower.result_insts_len == pseudo_probe_align_insts);
220 },
221 .pseudo_probe_adjust_unrolled_ri_s => {
222 var offset = page_size;
223 while (offset < @as(i32, @bitCast(inst.data.ri.i))) : (offset += page_size) {
224 try lower.encode(.none, .@"test", &.{
225 .{ .mem = Memory.initSib(.dword, .{
226 .base = .{ .reg = inst.data.ri.r1 },
227 .disp = -offset,
228 }) },
229 .{ .reg = inst.data.ri.r1.to32() },
230 });
231 }
232 try lower.encode(.none, .sub, &.{
233 .{ .reg = inst.data.ri.r1 },
234 .{ .imm = .s(@bitCast(inst.data.ri.i)) },
235 });
236 assert(lower.result_insts_len <= pseudo_probe_adjust_unrolled_max_insts);
237 },
238 .pseudo_probe_adjust_setup_rri_s => {
239 try lower.encode(.none, .mov, &.{
240 .{ .reg = inst.data.rri.r2.to32() },
241 .{ .imm = .s(@bitCast(inst.data.rri.i)) },
242 });
243 try lower.encode(.none, .sub, &.{
244 .{ .reg = inst.data.rri.r1 },
245 .{ .reg = inst.data.rri.r2 },
246 });
247 assert(lower.result_insts_len == pseudo_probe_adjust_setup_insts);
248 },
249 .pseudo_probe_adjust_loop_rr => {
250 try lower.encode(.none, .@"test", &.{
251 .{ .mem = Memory.initSib(.dword, .{
252 .base = .{ .reg = inst.data.rr.r1 },
253 .scale_index = .{ .scale = 1, .index = inst.data.rr.r2 },
254 .disp = -page_size,
255 }) },
256 .{ .reg = inst.data.rr.r1.to32() },
257 });
258 try lower.encode(.none, .sub, &.{
259 .{ .reg = inst.data.rr.r2 },
260 .{ .imm = .s(page_size) },
261 });
262 try lower.encode(.none, .jae, &.{
263 .{ .imm = lower.reloc(0, .{ .inst = index }, 0) },
264 });
265 assert(lower.result_insts_len == pseudo_probe_adjust_loop_insts);
266 },
267 .pseudo_push_reg_list => try lower.pushPopRegList(.push, inst),
268 .pseudo_pop_reg_list => try lower.pushPopRegList(.pop, inst),
269
270 .pseudo_cfi_def_cfa_ri_s => try lower.encode(.directive, .@".cfi_def_cfa", &.{
271 .{ .reg = inst.data.ri.r1 },
272 .{ .imm = lower.imm(.ri_s, inst.data.ri.i) },
273 }),
274 .pseudo_cfi_def_cfa_register_r => try lower.encode(.directive, .@".cfi_def_cfa_register", &.{
275 .{ .reg = inst.data.r.r1 },
276 }),
277 .pseudo_cfi_def_cfa_offset_i_s => try lower.encode(.directive, .@".cfi_def_cfa_offset", &.{
278 .{ .imm = lower.imm(.i_s, inst.data.i.i) },
279 }),
280 .pseudo_cfi_adjust_cfa_offset_i_s => try lower.encode(.directive, .@".cfi_adjust_cfa_offset", &.{
281 .{ .imm = lower.imm(.i_s, inst.data.i.i) },
282 }),
283 .pseudo_cfi_offset_ri_s => try lower.encode(.directive, .@".cfi_offset", &.{
284 .{ .reg = inst.data.ri.r1 },
285 .{ .imm = lower.imm(.ri_s, inst.data.ri.i) },
286 }),
287 .pseudo_cfi_val_offset_ri_s => try lower.encode(.directive, .@".cfi_val_offset", &.{
288 .{ .reg = inst.data.ri.r1 },
289 .{ .imm = lower.imm(.ri_s, inst.data.ri.i) },
290 }),
291 .pseudo_cfi_rel_offset_ri_s => try lower.encode(.directive, .@".cfi_rel_offset", &.{
292 .{ .reg = inst.data.ri.r1 },
293 .{ .imm = lower.imm(.ri_s, inst.data.ri.i) },
294 }),
295 .pseudo_cfi_register_rr => try lower.encode(.directive, .@".cfi_register", &.{
296 .{ .reg = inst.data.rr.r1 },
297 .{ .reg = inst.data.rr.r2 },
298 }),
299 .pseudo_cfi_restore_r => try lower.encode(.directive, .@".cfi_restore", &.{
300 .{ .reg = inst.data.r.r1 },
301 }),
302 .pseudo_cfi_undefined_r => try lower.encode(.directive, .@".cfi_undefined", &.{
303 .{ .reg = inst.data.r.r1 },
304 }),
305 .pseudo_cfi_same_value_r => try lower.encode(.directive, .@".cfi_same_value", &.{
306 .{ .reg = inst.data.r.r1 },
307 }),
308 .pseudo_cfi_remember_state_none => try lower.encode(.directive, .@".cfi_remember_state", &.{}),
309 .pseudo_cfi_restore_state_none => try lower.encode(.directive, .@".cfi_restore_state", &.{}),
310 .pseudo_cfi_escape_bytes => try lower.encode(.directive, .@".cfi_escape", &.{
311 .{ .bytes = inst.data.bytes.get(lower.mir) },
312 }),
313
314 .pseudo_dbg_prologue_end_none,
315 .pseudo_dbg_line_stmt_line_column,
316 .pseudo_dbg_line_line_column,
317 .pseudo_dbg_epilogue_begin_none,
318 .pseudo_dbg_enter_block_none,
319 .pseudo_dbg_leave_block_none,
320 .pseudo_dbg_enter_inline_func,
321 .pseudo_dbg_leave_inline_func,
322 .pseudo_dbg_arg_none,
323 .pseudo_dbg_arg_i_s,
324 .pseudo_dbg_arg_i_u,
325 .pseudo_dbg_arg_i_64,
326 .pseudo_dbg_arg_ro,
327 .pseudo_dbg_arg_fa,
328 .pseudo_dbg_arg_m,
329 .pseudo_dbg_arg_val,
330 .pseudo_dbg_var_args_none,
331 .pseudo_dbg_var_none,
332 .pseudo_dbg_var_i_s,
333 .pseudo_dbg_var_i_u,
334 .pseudo_dbg_var_i_64,
335 .pseudo_dbg_var_ro,
336 .pseudo_dbg_var_fa,
337 .pseudo_dbg_var_m,
338 .pseudo_dbg_var_val,
339
340 .pseudo_dead_none,
341 => {},
342 else => unreachable,
343 },
344 }
345
346 return .{
347 .insts = lower.result_insts[0..lower.result_insts_len],
348 .relocs = lower.result_relocs[0..lower.result_relocs_len],
349 };
350}
351
352pub fn fail(lower: *Lower, comptime format: []const u8, args: anytype) Error {
353 @branchHint(.cold);
354 assert(lower.err_msg == null);
355 lower.err_msg = try .create(lower.allocator, lower.src_loc, format, args);
356 return error.LowerFail;
357}
358
359pub fn imm(lower: *const Lower, ops: Mir.Inst.Ops, i: u32) Immediate {
360 return switch (ops) {
361 .rri_s,
362 .ri_s,
363 .i_s,
364 .mi_s,
365 .rmi_s,
366 .pseudo_dbg_arg_i_s,
367 .pseudo_dbg_var_i_s,
368 => .s(@bitCast(i)),
369
370 .ii,
371 .ir,
372 .rrri,
373 .rri_u,
374 .ri_u,
375 .i_u,
376 .mi_u,
377 .rmi,
378 .rmi_u,
379 .mri,
380 .rrm,
381 .rrmi,
382 .pseudo_dbg_arg_i_u,
383 .pseudo_dbg_var_i_u,
384 => .u(i),
385
386 .ri_64,
387 => .u(lower.mir.extraData(Mir.Imm64, i).data.decode()),
388
389 .pseudo_dbg_arg_i_64,
390 .pseudo_dbg_var_i_64,
391 => unreachable,
392
393 else => unreachable,
394 };
395}
396
397fn mem(lower: *Lower, op_index: InstOpIndex, payload: u32) Memory {
398 var m = lower.mir.resolveMemoryExtra(payload).decode();
399 switch (m) {
400 .sib => |*sib| switch (sib.base) {
401 .none, .reg, .frame => {},
402 .table => sib.disp = lower.reloc(op_index, .table, sib.disp).signed,
403 .rip_inst => |inst_index| sib.disp = lower.reloc(op_index, .{ .inst = inst_index }, sib.disp).signed,
404 .nav => |nav| sib.disp = lower.reloc(op_index, .{ .nav = nav }, sib.disp).signed,
405 .uav => |uav| sib.disp = lower.reloc(op_index, .{ .uav = uav }, sib.disp).signed,
406 .lazy_sym => |lazy_sym| sib.disp = lower.reloc(op_index, .{ .lazy_sym = lazy_sym }, sib.disp).signed,
407 .extern_func => |extern_func| sib.disp = lower.reloc(op_index, .{ .extern_func = extern_func }, sib.disp).signed,
408 },
409 else => {},
410 }
411 return m;
412}
413
414fn reloc(lower: *Lower, op_index: InstOpIndex, target: Reloc.Target, off: i32) Immediate {
415 lower.result_relocs[lower.result_relocs_len] = .{
416 .lowered_inst_index = lower.result_insts_len,
417 .op_index = op_index,
418 .target = target,
419 .off = off,
420 };
421 lower.result_relocs_len += 1;
422 return .s(0);
423}
424
425fn encode(lower: *Lower, prefix: Prefix, mnemonic: Mnemonic, ops: []const Operand) Error!void {
426 lower.result_insts[lower.result_insts_len] = try .new(prefix, mnemonic, ops, lower.target);
427 lower.result_insts_len += 1;
428}
429
430const inst_tags_len = @typeInfo(Mir.Inst.Tag).@"enum".fields.len;
431const inst_fixes_len = @typeInfo(Mir.Inst.Fixes).@"enum".fields.len;
432/// Lookup table, indexed by `@intFromEnum(inst.tag) * inst_fixes_len + @intFromEnum(fixes)`.
433/// The value is the resulting `Mnemonic`, or `null` if the combination is not valid.
434const mnemonic_table: [inst_tags_len * inst_fixes_len]?Mnemonic = table: {
435 @setEvalBranchQuota(80_000);
436 var table: [inst_tags_len * inst_fixes_len]?Mnemonic = undefined;
437 for (0..inst_fixes_len) |fixes_i| {
438 const fixes: Mir.Inst.Fixes = @enumFromInt(fixes_i);
439 const prefix, const suffix = affix: {
440 const pattern = if (std.mem.indexOfScalar(u8, @tagName(fixes), ' ')) |i|
441 @tagName(fixes)[i + 1 ..]
442 else
443 @tagName(fixes);
444 const wildcard_idx = std.mem.indexOfScalar(u8, pattern, '_').?;
445 break :affix .{ pattern[0..wildcard_idx], pattern[wildcard_idx + 1 ..] };
446 };
447 for (0..inst_tags_len) |inst_tag_i| {
448 const inst_tag: Mir.Inst.Tag = @enumFromInt(inst_tag_i);
449 const name = prefix ++ @tagName(inst_tag) ++ suffix;
450 const idx = inst_tag_i * inst_fixes_len + fixes_i;
451 table[idx] = if (@hasField(Mnemonic, name)) @field(Mnemonic, name) else null;
452 }
453 }
454 break :table table;
455};
456
457fn generic(lower: *Lower, inst: Mir.Inst) Error!void {
458 @setEvalBranchQuota(2_000);
459 const fixes = switch (inst.ops) {
460 .none => inst.data.none.fixes,
461 .inst => inst.data.inst.fixes,
462 .i_s, .i_u => inst.data.i.fixes,
463 .ii => inst.data.ii.fixes,
464 .r => inst.data.r.fixes,
465 .rr => inst.data.rr.fixes,
466 .rrr => inst.data.rrr.fixes,
467 .rrrr => inst.data.rrrr.fixes,
468 .rrri => inst.data.rrri.fixes,
469 .rri_s, .rri_u => inst.data.rri.fixes,
470 .ri_s, .ri_u, .ri_64, .ir => inst.data.ri.fixes,
471 .rm, .rmi_s, .rmi_u, .mr => inst.data.rx.fixes,
472 .mrr, .rrm, .rmr => inst.data.rrx.fixes,
473 .rmi, .mri => inst.data.rix.fixes,
474 .rrmr => inst.data.rrrx.fixes,
475 .rrmi => inst.data.rrix.fixes,
476 .mi_u, .mi_s => inst.data.x.fixes,
477 .m => inst.data.x.fixes,
478 .nav, .uav, .lazy_sym, .extern_func => ._,
479 else => return lower.fail("TODO lower .{s}", .{@tagName(inst.ops)}),
480 };
481 try lower.encode(switch (fixes) {
482 inline else => |tag| comptime if (std.mem.indexOfScalar(u8, @tagName(tag), ' ')) |space|
483 @field(Prefix, @tagName(tag)[0..space])
484 else
485 .none,
486 }, mnemonic: {
487 if (mnemonic_table[@intFromEnum(inst.tag) * inst_fixes_len + @intFromEnum(fixes)]) |mnemonic| {
488 break :mnemonic mnemonic;
489 }
490 // This combination is invalid; make the theoretical mnemonic name and emit an error with it.
491 const fixes_name = @tagName(fixes);
492 const pattern = fixes_name[if (std.mem.indexOfScalar(u8, fixes_name, ' ')) |i| i + " ".len else 0..];
493 const wildcard_index = std.mem.indexOfScalar(u8, pattern, '_').?;
494 return lower.fail("unsupported mnemonic: '{s}{s}{s}'", .{
495 pattern[0..wildcard_index],
496 @tagName(inst.tag),
497 pattern[wildcard_index + "_".len ..],
498 });
499 }, switch (inst.ops) {
500 .none => &.{},
501 .inst => &.{
502 .{ .imm = lower.reloc(0, .{ .inst = inst.data.inst.inst }, 0) },
503 },
504 .i_s, .i_u => &.{
505 .{ .imm = lower.imm(inst.ops, inst.data.i.i) },
506 },
507 .ii => &.{
508 .{ .imm = lower.imm(inst.ops, inst.data.ii.i1) },
509 .{ .imm = lower.imm(inst.ops, inst.data.ii.i2) },
510 },
511 .ir => &.{
512 .{ .imm = lower.imm(inst.ops, inst.data.ri.i) },
513 .{ .reg = inst.data.ri.r1 },
514 },
515 .r => &.{
516 .{ .reg = inst.data.r.r1 },
517 },
518 .rr => &.{
519 .{ .reg = inst.data.rr.r1 },
520 .{ .reg = inst.data.rr.r2 },
521 },
522 .rrr => &.{
523 .{ .reg = inst.data.rrr.r1 },
524 .{ .reg = inst.data.rrr.r2 },
525 .{ .reg = inst.data.rrr.r3 },
526 },
527 .rrrr => &.{
528 .{ .reg = inst.data.rrrr.r1 },
529 .{ .reg = inst.data.rrrr.r2 },
530 .{ .reg = inst.data.rrrr.r3 },
531 .{ .reg = inst.data.rrrr.r4 },
532 },
533 .rrri => &.{
534 .{ .reg = inst.data.rrri.r1 },
535 .{ .reg = inst.data.rrri.r2 },
536 .{ .reg = inst.data.rrri.r3 },
537 .{ .imm = lower.imm(inst.ops, inst.data.rrri.i) },
538 },
539 .ri_s, .ri_u, .ri_64 => &.{
540 .{ .reg = inst.data.ri.r1 },
541 .{ .imm = lower.imm(inst.ops, inst.data.ri.i) },
542 },
543 .rri_s, .rri_u => &.{
544 .{ .reg = inst.data.rri.r1 },
545 .{ .reg = inst.data.rri.r2 },
546 .{ .imm = lower.imm(inst.ops, inst.data.rri.i) },
547 },
548 .m => &.{
549 .{ .mem = lower.mem(0, inst.data.x.payload) },
550 },
551 .mi_s, .mi_u => &.{
552 .{ .mem = lower.mem(0, inst.data.x.payload + 1) },
553 .{ .imm = lower.imm(
554 inst.ops,
555 lower.mir.extraData(Mir.Imm32, inst.data.x.payload).data.imm,
556 ) },
557 },
558 .rm => &.{
559 .{ .reg = inst.data.rx.r1 },
560 .{ .mem = lower.mem(1, inst.data.rx.payload) },
561 },
562 .rmr => &.{
563 .{ .reg = inst.data.rrx.r1 },
564 .{ .mem = lower.mem(1, inst.data.rrx.payload) },
565 .{ .reg = inst.data.rrx.r2 },
566 },
567 .rmi => &.{
568 .{ .reg = inst.data.rix.r1 },
569 .{ .mem = lower.mem(1, inst.data.rix.payload) },
570 .{ .imm = lower.imm(inst.ops, inst.data.rix.i) },
571 },
572 .rmi_s, .rmi_u => &.{
573 .{ .reg = inst.data.rx.r1 },
574 .{ .mem = lower.mem(1, inst.data.rx.payload + 1) },
575 .{ .imm = lower.imm(
576 inst.ops,
577 lower.mir.extraData(Mir.Imm32, inst.data.rx.payload).data.imm,
578 ) },
579 },
580 .mr => &.{
581 .{ .mem = lower.mem(0, inst.data.rx.payload) },
582 .{ .reg = inst.data.rx.r1 },
583 },
584 .mrr => &.{
585 .{ .mem = lower.mem(0, inst.data.rrx.payload) },
586 .{ .reg = inst.data.rrx.r1 },
587 .{ .reg = inst.data.rrx.r2 },
588 },
589 .mri => &.{
590 .{ .mem = lower.mem(0, inst.data.rix.payload) },
591 .{ .reg = inst.data.rix.r1 },
592 .{ .imm = lower.imm(inst.ops, inst.data.rix.i) },
593 },
594 .rrm => &.{
595 .{ .reg = inst.data.rrx.r1 },
596 .{ .reg = inst.data.rrx.r2 },
597 .{ .mem = lower.mem(2, inst.data.rrx.payload) },
598 },
599 .rrmr => &.{
600 .{ .reg = inst.data.rrrx.r1 },
601 .{ .reg = inst.data.rrrx.r2 },
602 .{ .mem = lower.mem(2, inst.data.rrrx.payload) },
603 .{ .reg = inst.data.rrrx.r3 },
604 },
605 .rrmi => &.{
606 .{ .reg = inst.data.rrix.r1 },
607 .{ .reg = inst.data.rrix.r2 },
608 .{ .mem = lower.mem(2, inst.data.rrix.payload) },
609 .{ .imm = lower.imm(inst.ops, inst.data.rrix.i) },
610 },
611 .nav => &.{
612 .{ .imm = lower.reloc(0, .{ .nav = inst.data.nav.index }, inst.data.nav.off) },
613 },
614 .uav => &.{
615 .{ .imm = lower.reloc(0, .{ .uav = inst.data.uav }, 0) },
616 },
617 .lazy_sym => &.{
618 .{ .imm = lower.reloc(0, .{ .lazy_sym = inst.data.lazy_sym }, 0) },
619 },
620 .extern_func => &.{
621 .{ .imm = lower.reloc(0, .{ .extern_func = inst.data.extern_func }, 0) },
622 },
623 else => return lower.fail("TODO lower {s} {s}", .{ @tagName(inst.tag), @tagName(inst.ops) }),
624 });
625}
626
627fn pushPopRegList(lower: *Lower, comptime mnemonic: Mnemonic, inst: Mir.Inst) Error!void {
628 const callee_preserved_regs = abi.getCalleePreservedRegs(lower.cc);
629 var off: i32 = switch (mnemonic) {
630 .push => 0,
631 .pop => undefined,
632 else => unreachable,
633 };
634 {
635 var it = inst.data.reg_list.iterator(.{ .direction = switch (mnemonic) {
636 .push => .reverse,
637 .pop => .forward,
638 else => unreachable,
639 } });
640 while (it.next()) |i| {
641 try lower.encode(.none, mnemonic, &.{.{
642 .reg = callee_preserved_regs[i],
643 }});
644 switch (mnemonic) {
645 .push => off -= 8,
646 .pop => {},
647 else => unreachable,
648 }
649 }
650 }
651 switch (mnemonic) {
652 .push => {
653 var it = inst.data.reg_list.iterator(.{});
654 while (it.next()) |i| {
655 try lower.encode(.directive, .@".cfi_rel_offset", &.{
656 .{ .reg = callee_preserved_regs[i] },
657 .{ .imm = .s(off) },
658 });
659 off += 8;
660 }
661 assert(off == 0);
662 },
663 .pop => {},
664 else => unreachable,
665 }
666}
667
668const page_size: i32 = 1 << 12;
669
670const abi = @import("abi.zig");
671const assert = std.debug.assert;
672const bits = @import("bits.zig");
673const codegen = @import("../../codegen.zig");
674const encoder = @import("encoder.zig");
675const link = @import("../../link.zig");
676const std = @import("std");
677
678const Immediate = Instruction.Immediate;
679const Instruction = encoder.Instruction;
680const InternPool = @import("../../InternPool.zig");
681const Lower = @This();
682const Memory = Instruction.Memory;
683const Mir = @import("Mir.zig");
684const Mnemonic = Instruction.Mnemonic;
685const Zcu = @import("../../Zcu.zig");
686const Operand = Instruction.Operand;
687const Prefix = Instruction.Prefix;
688const Register = bits.Register;
689const Type = @import("../../Type.zig");