master
1const std = @import("std");
2const builtin = @import("builtin");
3const assert = std.debug.assert;
4const mem = std.mem;
5const log = std.log.scoped(.c);
6const Allocator = mem.Allocator;
7const Writer = std.Io.Writer;
8
9const dev = @import("../dev.zig");
10const link = @import("../link.zig");
11const Zcu = @import("../Zcu.zig");
12const Module = @import("../Package/Module.zig");
13const Compilation = @import("../Compilation.zig");
14const Value = @import("../Value.zig");
15const Type = @import("../Type.zig");
16const C = link.File.C;
17const Decl = Zcu.Decl;
18const trace = @import("../tracy.zig").trace;
19const Air = @import("../Air.zig");
20const InternPool = @import("../InternPool.zig");
21const Alignment = InternPool.Alignment;
22
23const BigIntLimb = std.math.big.Limb;
24const BigInt = std.math.big.int;
25
26pub fn legalizeFeatures(_: *const std.Target) ?*const Air.Legalize.Features {
27 return comptime switch (dev.env.supports(.legalize)) {
28 inline false, true => |supports_legalize| &.init(.{
29 // we don't currently ask zig1 to use safe optimization modes
30 .expand_intcast_safe = supports_legalize,
31 .expand_int_from_float_safe = supports_legalize,
32 .expand_int_from_float_optimized_safe = supports_legalize,
33 .expand_add_safe = supports_legalize,
34 .expand_sub_safe = supports_legalize,
35 .expand_mul_safe = supports_legalize,
36
37 .expand_packed_load = true,
38 .expand_packed_store = true,
39 .expand_packed_struct_field_val = true,
40 .expand_packed_aggregate_init = true,
41 }),
42 };
43}
44
45/// For most backends, MIR is basically a sequence of machine code instructions, perhaps with some
46/// "pseudo instructions" thrown in. For the C backend, it is instead the generated C code for a
47/// single function. We also need to track some information to get merged into the global `link.C`
48/// state, including:
49/// * The UAVs used, so declarations can be emitted in `flush`
50/// * The types used, so declarations can be emitted in `flush`
51/// * The lazy functions used, so definitions can be emitted in `flush`
52pub const Mir = struct {
53 /// This map contains all the UAVs we saw generating this function.
54 /// `link.C` will merge them into its `uavs`/`aligned_uavs` fields.
55 /// Key is the value of the UAV; value is the UAV's alignment, or
56 /// `.none` for natural alignment. The specified alignment is never
57 /// less than the natural alignment.
58 uavs: std.AutoArrayHashMapUnmanaged(InternPool.Index, Alignment),
59 // These remaining fields are essentially just an owned version of `link.C.AvBlock`.
60 code_header: []u8,
61 code: []u8,
62 fwd_decl: []u8,
63 ctype_pool: CType.Pool,
64 lazy_fns: LazyFnMap,
65
66 pub fn deinit(mir: *Mir, gpa: Allocator) void {
67 mir.uavs.deinit(gpa);
68 gpa.free(mir.code_header);
69 gpa.free(mir.code);
70 gpa.free(mir.fwd_decl);
71 mir.ctype_pool.deinit(gpa);
72 mir.lazy_fns.deinit(gpa);
73 }
74};
75
76pub const Error = Writer.Error || std.mem.Allocator.Error || error{AnalysisFail};
77
78pub const CType = @import("c/Type.zig");
79
80pub const CValue = union(enum) {
81 none: void,
82 new_local: LocalIndex,
83 local: LocalIndex,
84 /// Address of a local.
85 local_ref: LocalIndex,
86 /// A constant instruction, to be rendered inline.
87 constant: Value,
88 /// Index into the parameters
89 arg: usize,
90 /// The array field of a parameter
91 arg_array: usize,
92 /// Index into a tuple's fields
93 field: usize,
94 /// By-value
95 nav: InternPool.Nav.Index,
96 nav_ref: InternPool.Nav.Index,
97 /// An undefined value (cannot be dereferenced)
98 undef: Type,
99 /// Rendered as an identifier (using fmtIdent)
100 identifier: []const u8,
101 /// Rendered as "payload." followed by as identifier (using fmtIdent)
102 payload_identifier: []const u8,
103 /// Rendered with fmtCTypePoolString
104 ctype_pool_string: CType.Pool.String,
105
106 fn eql(lhs: CValue, rhs: CValue) bool {
107 return switch (lhs) {
108 .none => rhs == .none,
109 .new_local, .local => |lhs_local| switch (rhs) {
110 .new_local, .local => |rhs_local| lhs_local == rhs_local,
111 else => false,
112 },
113 .local_ref => |lhs_local| switch (rhs) {
114 .local_ref => |rhs_local| lhs_local == rhs_local,
115 else => false,
116 },
117 .constant => |lhs_val| switch (rhs) {
118 .constant => |rhs_val| lhs_val.toIntern() == rhs_val.toIntern(),
119 else => false,
120 },
121 .arg => |lhs_arg_index| switch (rhs) {
122 .arg => |rhs_arg_index| lhs_arg_index == rhs_arg_index,
123 else => false,
124 },
125 .arg_array => |lhs_arg_index| switch (rhs) {
126 .arg_array => |rhs_arg_index| lhs_arg_index == rhs_arg_index,
127 else => false,
128 },
129 .field => |lhs_field_index| switch (rhs) {
130 .field => |rhs_field_index| lhs_field_index == rhs_field_index,
131 else => false,
132 },
133 .nav => |lhs_nav| switch (rhs) {
134 .nav => |rhs_nav| lhs_nav == rhs_nav,
135 else => false,
136 },
137 .nav_ref => |lhs_nav| switch (rhs) {
138 .nav_ref => |rhs_nav| lhs_nav == rhs_nav,
139 else => false,
140 },
141 .undef => |lhs_ty| switch (rhs) {
142 .undef => |rhs_ty| lhs_ty.toIntern() == rhs_ty.toIntern(),
143 else => false,
144 },
145 .identifier => |lhs_id| switch (rhs) {
146 .identifier => |rhs_id| std.mem.eql(u8, lhs_id, rhs_id),
147 else => false,
148 },
149 .payload_identifier => |lhs_id| switch (rhs) {
150 .payload_identifier => |rhs_id| std.mem.eql(u8, lhs_id, rhs_id),
151 else => false,
152 },
153 .ctype_pool_string => |lhs_str| switch (rhs) {
154 .ctype_pool_string => |rhs_str| lhs_str.index == rhs_str.index,
155 else => false,
156 },
157 };
158 }
159};
160
161const BlockData = struct {
162 block_id: u32,
163 result: CValue,
164};
165
166pub const CValueMap = std.AutoHashMap(Air.Inst.Ref, CValue);
167
168pub const LazyFnKey = union(enum) {
169 tag_name: InternPool.Index,
170 never_tail: InternPool.Nav.Index,
171 never_inline: InternPool.Nav.Index,
172};
173pub const LazyFnValue = struct {
174 fn_name: CType.Pool.String,
175};
176pub const LazyFnMap = std.AutoArrayHashMapUnmanaged(LazyFnKey, LazyFnValue);
177
178const Local = struct {
179 ctype: CType,
180 flags: packed struct(u32) {
181 alignas: CType.AlignAs,
182 _: u20 = undefined,
183 },
184
185 fn getType(local: Local) LocalType {
186 return .{ .ctype = local.ctype, .alignas = local.flags.alignas };
187 }
188};
189
190const LocalIndex = u16;
191const LocalType = struct { ctype: CType, alignas: CType.AlignAs };
192const LocalsList = std.AutoArrayHashMapUnmanaged(LocalIndex, void);
193const LocalsMap = std.AutoArrayHashMapUnmanaged(LocalType, LocalsList);
194
195const ValueRenderLocation = enum {
196 FunctionArgument,
197 Initializer,
198 StaticInitializer,
199 Other,
200
201 fn isInitializer(loc: ValueRenderLocation) bool {
202 return switch (loc) {
203 .Initializer, .StaticInitializer => true,
204 else => false,
205 };
206 }
207
208 fn toCTypeKind(loc: ValueRenderLocation) CType.Kind {
209 return switch (loc) {
210 .FunctionArgument => .parameter,
211 .Initializer, .Other => .complete,
212 .StaticInitializer => .global,
213 };
214 }
215};
216
217const BuiltinInfo = enum { none, bits };
218
219const reserved_idents = std.StaticStringMap(void).initComptime(.{
220 // C language
221 .{ "alignas", {
222 @setEvalBranchQuota(4000);
223 } },
224 .{ "alignof", {} },
225 .{ "asm", {} },
226 .{ "atomic_bool", {} },
227 .{ "atomic_char", {} },
228 .{ "atomic_char16_t", {} },
229 .{ "atomic_char32_t", {} },
230 .{ "atomic_int", {} },
231 .{ "atomic_int_fast16_t", {} },
232 .{ "atomic_int_fast32_t", {} },
233 .{ "atomic_int_fast64_t", {} },
234 .{ "atomic_int_fast8_t", {} },
235 .{ "atomic_int_least16_t", {} },
236 .{ "atomic_int_least32_t", {} },
237 .{ "atomic_int_least64_t", {} },
238 .{ "atomic_int_least8_t", {} },
239 .{ "atomic_intmax_t", {} },
240 .{ "atomic_intptr_t", {} },
241 .{ "atomic_llong", {} },
242 .{ "atomic_long", {} },
243 .{ "atomic_ptrdiff_t", {} },
244 .{ "atomic_schar", {} },
245 .{ "atomic_short", {} },
246 .{ "atomic_size_t", {} },
247 .{ "atomic_uchar", {} },
248 .{ "atomic_uint", {} },
249 .{ "atomic_uint_fast16_t", {} },
250 .{ "atomic_uint_fast32_t", {} },
251 .{ "atomic_uint_fast64_t", {} },
252 .{ "atomic_uint_fast8_t", {} },
253 .{ "atomic_uint_least16_t", {} },
254 .{ "atomic_uint_least32_t", {} },
255 .{ "atomic_uint_least64_t", {} },
256 .{ "atomic_uint_least8_t", {} },
257 .{ "atomic_uintmax_t", {} },
258 .{ "atomic_uintptr_t", {} },
259 .{ "atomic_ullong", {} },
260 .{ "atomic_ulong", {} },
261 .{ "atomic_ushort", {} },
262 .{ "atomic_wchar_t", {} },
263 .{ "auto", {} },
264 .{ "break", {} },
265 .{ "case", {} },
266 .{ "char", {} },
267 .{ "complex", {} },
268 .{ "const", {} },
269 .{ "continue", {} },
270 .{ "default", {} },
271 .{ "do", {} },
272 .{ "double", {} },
273 .{ "else", {} },
274 .{ "enum", {} },
275 .{ "extern", {} },
276 .{ "float", {} },
277 .{ "for", {} },
278 .{ "fortran", {} },
279 .{ "goto", {} },
280 .{ "if", {} },
281 .{ "imaginary", {} },
282 .{ "inline", {} },
283 .{ "int", {} },
284 .{ "int16_t", {} },
285 .{ "int32_t", {} },
286 .{ "int64_t", {} },
287 .{ "int8_t", {} },
288 .{ "intptr_t", {} },
289 .{ "long", {} },
290 .{ "noreturn", {} },
291 .{ "register", {} },
292 .{ "restrict", {} },
293 .{ "return", {} },
294 .{ "short", {} },
295 .{ "signed", {} },
296 .{ "size_t", {} },
297 .{ "sizeof", {} },
298 .{ "ssize_t", {} },
299 .{ "static", {} },
300 .{ "static_assert", {} },
301 .{ "struct", {} },
302 .{ "switch", {} },
303 .{ "thread_local", {} },
304 .{ "typedef", {} },
305 .{ "typeof", {} },
306 .{ "uint16_t", {} },
307 .{ "uint32_t", {} },
308 .{ "uint64_t", {} },
309 .{ "uint8_t", {} },
310 .{ "uintptr_t", {} },
311 .{ "union", {} },
312 .{ "unsigned", {} },
313 .{ "void", {} },
314 .{ "volatile", {} },
315 .{ "while", {} },
316
317 // stdarg.h
318 .{ "va_start", {} },
319 .{ "va_arg", {} },
320 .{ "va_end", {} },
321 .{ "va_copy", {} },
322
323 // stdbool.h
324 .{ "bool", {} },
325 .{ "false", {} },
326 .{ "true", {} },
327
328 // stddef.h
329 .{ "offsetof", {} },
330
331 // windows.h
332 .{ "max", {} },
333 .{ "min", {} },
334});
335
336fn isReservedIdent(ident: []const u8) bool {
337 if (ident.len >= 2 and ident[0] == '_') { // C language
338 switch (ident[1]) {
339 'A'...'Z', '_' => return true,
340 else => return false,
341 }
342 } else if (mem.startsWith(u8, ident, "DUMMYSTRUCTNAME") or
343 mem.startsWith(u8, ident, "DUMMYUNIONNAME"))
344 { // windows.h
345 return true;
346 } else return reserved_idents.has(ident);
347}
348
349fn formatIdentSolo(ident: []const u8, w: *Writer) Writer.Error!void {
350 return formatIdentOptions(ident, w, true);
351}
352
353fn formatIdentUnsolo(ident: []const u8, w: *Writer) Writer.Error!void {
354 return formatIdentOptions(ident, w, false);
355}
356
357fn formatIdentOptions(ident: []const u8, w: *Writer, solo: bool) Writer.Error!void {
358 if (solo and isReservedIdent(ident)) {
359 try w.writeAll("zig_e_");
360 }
361 for (ident, 0..) |c, i| {
362 switch (c) {
363 'a'...'z', 'A'...'Z', '_' => try w.writeByte(c),
364 '.' => try w.writeByte('_'),
365 '0'...'9' => if (i == 0) {
366 try w.print("_{x:2}", .{c});
367 } else {
368 try w.writeByte(c);
369 },
370 else => try w.print("_{x:2}", .{c}),
371 }
372 }
373}
374
375pub fn fmtIdentSolo(ident: []const u8) std.fmt.Alt([]const u8, formatIdentSolo) {
376 return .{ .data = ident };
377}
378
379pub fn fmtIdentUnsolo(ident: []const u8) std.fmt.Alt([]const u8, formatIdentUnsolo) {
380 return .{ .data = ident };
381}
382
383const CTypePoolStringFormatData = struct {
384 ctype_pool_string: CType.Pool.String,
385 ctype_pool: *const CType.Pool,
386 solo: bool,
387};
388fn formatCTypePoolString(data: CTypePoolStringFormatData, w: *Writer) Writer.Error!void {
389 if (data.ctype_pool_string.toSlice(data.ctype_pool)) |slice|
390 try formatIdentOptions(slice, w, data.solo)
391 else
392 try w.print("{f}", .{data.ctype_pool_string.fmt(data.ctype_pool)});
393}
394pub fn fmtCTypePoolString(
395 ctype_pool_string: CType.Pool.String,
396 ctype_pool: *const CType.Pool,
397 solo: bool,
398) std.fmt.Alt(CTypePoolStringFormatData, formatCTypePoolString) {
399 return .{ .data = .{
400 .ctype_pool_string = ctype_pool_string,
401 .ctype_pool = ctype_pool,
402 .solo = solo,
403 } };
404}
405
406// Returns true if `formatIdent` would make any edits to ident.
407// This must be kept in sync with `formatIdent`.
408pub fn isMangledIdent(ident: []const u8, solo: bool) bool {
409 if (solo and isReservedIdent(ident)) return true;
410 for (ident, 0..) |c, i| {
411 switch (c) {
412 'a'...'z', 'A'...'Z', '_' => {},
413 '0'...'9' => if (i == 0) return true,
414 else => return true,
415 }
416 }
417 return false;
418}
419
420/// This data is available when outputting .c code for a `InternPool.Index`
421/// that corresponds to `func`.
422/// It is not available when generating .h file.
423pub const Function = struct {
424 air: Air,
425 liveness: Air.Liveness,
426 value_map: CValueMap,
427 blocks: std.AutoHashMapUnmanaged(Air.Inst.Index, BlockData) = .empty,
428 next_arg_index: u32 = 0,
429 next_block_index: u32 = 0,
430 object: Object,
431 lazy_fns: LazyFnMap,
432 func_index: InternPool.Index,
433 /// All the locals, to be emitted at the top of the function.
434 locals: std.ArrayList(Local) = .empty,
435 /// Which locals are available for reuse, based on Type.
436 free_locals_map: LocalsMap = .{},
437 /// Locals which will not be freed by Liveness. This is used after a
438 /// Function body is lowered in order to make `free_locals_map` have
439 /// 100% of the locals within so that it can be used to render the block
440 /// of variable declarations at the top of a function, sorted descending
441 /// by type alignment.
442 /// The value is whether the alloc needs to be emitted in the header.
443 allocs: std.AutoArrayHashMapUnmanaged(LocalIndex, bool) = .empty,
444 /// Maps from `loop_switch_br` instructions to the allocated local used
445 /// for the switch cond. Dispatches should set this local to the new cond.
446 loop_switch_conds: std.AutoHashMapUnmanaged(Air.Inst.Index, LocalIndex) = .empty,
447
448 fn resolveInst(f: *Function, ref: Air.Inst.Ref) !CValue {
449 const gop = try f.value_map.getOrPut(ref);
450 if (gop.found_existing) return gop.value_ptr.*;
451
452 const pt = f.object.dg.pt;
453 const zcu = pt.zcu;
454 const val = (try f.air.value(ref, pt)).?;
455 const ty = f.typeOf(ref);
456
457 const result: CValue = if (lowersToArray(ty, zcu)) result: {
458 const ch = &f.object.code_header.writer;
459 const decl_c_value = try f.allocLocalValue(.{
460 .ctype = try f.ctypeFromType(ty, .complete),
461 .alignas = CType.AlignAs.fromAbiAlignment(ty.abiAlignment(zcu)),
462 });
463 const gpa = f.object.dg.gpa;
464 try f.allocs.put(gpa, decl_c_value.new_local, false);
465 try ch.writeAll("static ");
466 try f.object.dg.renderTypeAndName(ch, ty, decl_c_value, Const, .none, .complete);
467 try ch.writeAll(" = ");
468 try f.object.dg.renderValue(ch, val, .StaticInitializer);
469 try ch.writeAll(";\n ");
470 break :result .{ .local = decl_c_value.new_local };
471 } else .{ .constant = val };
472
473 gop.value_ptr.* = result;
474 return result;
475 }
476
477 fn wantSafety(f: *Function) bool {
478 return switch (f.object.dg.pt.zcu.optimizeMode()) {
479 .Debug, .ReleaseSafe => true,
480 .ReleaseFast, .ReleaseSmall => false,
481 };
482 }
483
484 /// Skips the reuse logic. This function should be used for any persistent allocation, i.e.
485 /// those which go into `allocs`. This function does not add the resulting local into `allocs`;
486 /// that responsibility lies with the caller.
487 fn allocLocalValue(f: *Function, local_type: LocalType) !CValue {
488 try f.locals.ensureUnusedCapacity(f.object.dg.gpa, 1);
489 defer f.locals.appendAssumeCapacity(.{
490 .ctype = local_type.ctype,
491 .flags = .{ .alignas = local_type.alignas },
492 });
493 return .{ .new_local = @intCast(f.locals.items.len) };
494 }
495
496 fn allocLocal(f: *Function, inst: ?Air.Inst.Index, ty: Type) !CValue {
497 return f.allocAlignedLocal(inst, .{
498 .ctype = try f.ctypeFromType(ty, .complete),
499 .alignas = CType.AlignAs.fromAbiAlignment(ty.abiAlignment(f.object.dg.pt.zcu)),
500 });
501 }
502
503 /// Only allocates the local; does not print anything. Will attempt to re-use locals, so should
504 /// not be used for persistent locals (i.e. those in `allocs`).
505 fn allocAlignedLocal(f: *Function, inst: ?Air.Inst.Index, local_type: LocalType) !CValue {
506 const result: CValue = result: {
507 if (f.free_locals_map.getPtr(local_type)) |locals_list| {
508 if (locals_list.pop()) |local_entry| {
509 break :result .{ .new_local = local_entry.key };
510 }
511 }
512 break :result try f.allocLocalValue(local_type);
513 };
514 if (inst) |i| {
515 log.debug("%{d}: allocating t{d}", .{ i, result.new_local });
516 } else {
517 log.debug("allocating t{d}", .{result.new_local});
518 }
519 return result;
520 }
521
522 fn writeCValue(f: *Function, w: *Writer, c_value: CValue, location: ValueRenderLocation) !void {
523 switch (c_value) {
524 .none => unreachable,
525 .new_local, .local => |i| try w.print("t{d}", .{i}),
526 .local_ref => |i| try w.print("&t{d}", .{i}),
527 .constant => |val| try f.object.dg.renderValue(w, val, location),
528 .arg => |i| try w.print("a{d}", .{i}),
529 .arg_array => |i| try f.writeCValueMember(w, .{ .arg = i }, .{ .identifier = "array" }),
530 .undef => |ty| try f.object.dg.renderUndefValue(w, ty, location),
531 else => try f.object.dg.writeCValue(w, c_value),
532 }
533 }
534
535 fn writeCValueDeref(f: *Function, w: *Writer, c_value: CValue) !void {
536 switch (c_value) {
537 .none => unreachable,
538 .new_local, .local, .constant => {
539 try w.writeAll("(*");
540 try f.writeCValue(w, c_value, .Other);
541 try w.writeByte(')');
542 },
543 .local_ref => |i| try w.print("t{d}", .{i}),
544 .arg => |i| try w.print("(*a{d})", .{i}),
545 .arg_array => |i| {
546 try w.writeAll("(*");
547 try f.writeCValueMember(w, .{ .arg = i }, .{ .identifier = "array" });
548 try w.writeByte(')');
549 },
550 else => try f.object.dg.writeCValueDeref(w, c_value),
551 }
552 }
553
554 fn writeCValueMember(
555 f: *Function,
556 w: *Writer,
557 c_value: CValue,
558 member: CValue,
559 ) Error!void {
560 switch (c_value) {
561 .new_local, .local, .local_ref, .constant, .arg, .arg_array => {
562 try f.writeCValue(w, c_value, .Other);
563 try w.writeByte('.');
564 try f.writeCValue(w, member, .Other);
565 },
566 else => return f.object.dg.writeCValueMember(w, c_value, member),
567 }
568 }
569
570 fn writeCValueDerefMember(f: *Function, w: *Writer, c_value: CValue, member: CValue) !void {
571 switch (c_value) {
572 .new_local, .local, .arg, .arg_array => {
573 try f.writeCValue(w, c_value, .Other);
574 try w.writeAll("->");
575 },
576 .constant => {
577 try w.writeByte('(');
578 try f.writeCValue(w, c_value, .Other);
579 try w.writeAll(")->");
580 },
581 .local_ref => {
582 try f.writeCValueDeref(w, c_value);
583 try w.writeByte('.');
584 },
585 else => return f.object.dg.writeCValueDerefMember(w, c_value, member),
586 }
587 try f.writeCValue(w, member, .Other);
588 }
589
590 fn fail(f: *Function, comptime format: []const u8, args: anytype) Error {
591 return f.object.dg.fail(format, args);
592 }
593
594 fn ctypeFromType(f: *Function, ty: Type, kind: CType.Kind) !CType {
595 return f.object.dg.ctypeFromType(ty, kind);
596 }
597
598 fn byteSize(f: *Function, ctype: CType) u64 {
599 return f.object.dg.byteSize(ctype);
600 }
601
602 fn renderType(f: *Function, w: *Writer, ctype: Type) !void {
603 return f.object.dg.renderType(w, ctype);
604 }
605
606 fn renderCType(f: *Function, w: *Writer, ctype: CType) !void {
607 return f.object.dg.renderCType(w, ctype);
608 }
609
610 fn renderIntCast(f: *Function, w: *Writer, dest_ty: Type, src: CValue, v: Vectorize, src_ty: Type, location: ValueRenderLocation) !void {
611 return f.object.dg.renderIntCast(w, dest_ty, .{ .c_value = .{ .f = f, .value = src, .v = v } }, src_ty, location);
612 }
613
614 fn fmtIntLiteralDec(f: *Function, val: Value) !std.fmt.Alt(FormatIntLiteralContext, formatIntLiteral) {
615 return f.object.dg.fmtIntLiteralDec(val, .Other);
616 }
617
618 fn fmtIntLiteralHex(f: *Function, val: Value) !std.fmt.Alt(FormatIntLiteralContext, formatIntLiteral) {
619 return f.object.dg.fmtIntLiteralHex(val, .Other);
620 }
621
622 fn getLazyFnName(f: *Function, key: LazyFnKey) ![]const u8 {
623 const gpa = f.object.dg.gpa;
624 const pt = f.object.dg.pt;
625 const zcu = pt.zcu;
626 const ip = &zcu.intern_pool;
627 const ctype_pool = &f.object.dg.ctype_pool;
628
629 const gop = try f.lazy_fns.getOrPut(gpa, key);
630 if (!gop.found_existing) {
631 errdefer _ = f.lazy_fns.pop();
632
633 gop.value_ptr.* = .{
634 .fn_name = switch (key) {
635 .tag_name,
636 => |enum_ty| try ctype_pool.fmt(gpa, "zig_{s}_{f}__{d}", .{
637 @tagName(key),
638 fmtIdentUnsolo(ip.loadEnumType(enum_ty).name.toSlice(ip)),
639 @intFromEnum(enum_ty),
640 }),
641 .never_tail,
642 .never_inline,
643 => |owner_nav| try ctype_pool.fmt(gpa, "zig_{s}_{f}__{d}", .{
644 @tagName(key),
645 fmtIdentUnsolo(ip.getNav(owner_nav).name.toSlice(ip)),
646 @intFromEnum(owner_nav),
647 }),
648 },
649 };
650 }
651 return gop.value_ptr.fn_name.toSlice(ctype_pool).?;
652 }
653
654 pub fn deinit(f: *Function) void {
655 const gpa = f.object.dg.gpa;
656 f.allocs.deinit(gpa);
657 f.locals.deinit(gpa);
658 deinitFreeLocalsMap(gpa, &f.free_locals_map);
659 f.blocks.deinit(gpa);
660 f.value_map.deinit();
661 f.lazy_fns.deinit(gpa);
662 f.loop_switch_conds.deinit(gpa);
663 }
664
665 fn typeOf(f: *Function, inst: Air.Inst.Ref) Type {
666 return f.air.typeOf(inst, &f.object.dg.pt.zcu.intern_pool);
667 }
668
669 fn typeOfIndex(f: *Function, inst: Air.Inst.Index) Type {
670 return f.air.typeOfIndex(inst, &f.object.dg.pt.zcu.intern_pool);
671 }
672
673 fn copyCValue(f: *Function, ctype: CType, dst: CValue, src: CValue) !void {
674 switch (dst) {
675 .new_local, .local => |dst_local_index| switch (src) {
676 .new_local, .local => |src_local_index| if (dst_local_index == src_local_index) return,
677 else => {},
678 },
679 else => {},
680 }
681 const w = &f.object.code.writer;
682 const a = try Assignment.start(f, w, ctype);
683 try f.writeCValue(w, dst, .Other);
684 try a.assign(f, w);
685 try f.writeCValue(w, src, .Other);
686 try a.end(f, w);
687 }
688
689 fn moveCValue(f: *Function, inst: Air.Inst.Index, ty: Type, src: CValue) !CValue {
690 switch (src) {
691 // Move the freshly allocated local to be owned by this instruction,
692 // by returning it here instead of freeing it.
693 .new_local => return src,
694 else => {
695 try freeCValue(f, inst, src);
696 const dst = try f.allocLocal(inst, ty);
697 try f.copyCValue(try f.ctypeFromType(ty, .complete), dst, src);
698 return dst;
699 },
700 }
701 }
702
703 fn freeCValue(f: *Function, inst: ?Air.Inst.Index, val: CValue) !void {
704 switch (val) {
705 .new_local => |local_index| try freeLocal(f, inst, local_index, null),
706 else => {},
707 }
708 }
709};
710
711/// This data is available when outputting .c code for a `Zcu`.
712/// It is not available when generating .h file.
713pub const Object = struct {
714 dg: DeclGen,
715 code_header: Writer.Allocating,
716 code: Writer.Allocating,
717 indent_counter: usize,
718
719 const indent_width = 1;
720 const indent_char = ' ';
721
722 fn newline(o: *Object) !void {
723 const w = &o.code.writer;
724 try w.writeByte('\n');
725 try w.splatByteAll(indent_char, o.indent_counter);
726 }
727 fn indent(o: *Object) void {
728 o.indent_counter += indent_width;
729 }
730 fn outdent(o: *Object) !void {
731 o.indent_counter -= indent_width;
732 const written = o.code.written();
733 switch (written[written.len - 1]) {
734 indent_char => o.code.shrinkRetainingCapacity(written.len - indent_width),
735 '\n' => try o.code.writer.splatByteAll(indent_char, o.indent_counter),
736 else => {
737 std.debug.print("\"{f}\"\n", .{std.zig.fmtString(written[written.len -| 100..])});
738 unreachable;
739 },
740 }
741 }
742};
743
744/// This data is available both when outputting .c code and when outputting an .h file.
745pub const DeclGen = struct {
746 gpa: Allocator,
747 pt: Zcu.PerThread,
748 mod: *Module,
749 pass: Pass,
750 is_naked_fn: bool,
751 expected_block: ?u32,
752 fwd_decl: Writer.Allocating,
753 error_msg: ?*Zcu.ErrorMsg,
754 ctype_pool: CType.Pool,
755 scratch: std.ArrayList(u32),
756 /// This map contains all the UAVs we saw generating this function.
757 /// `link.C` will merge them into its `uavs`/`aligned_uavs` fields.
758 /// Key is the value of the UAV; value is the UAV's alignment, or
759 /// `.none` for natural alignment. The specified alignment is never
760 /// less than the natural alignment.
761 uavs: std.AutoArrayHashMapUnmanaged(InternPool.Index, Alignment),
762
763 pub const Pass = union(enum) {
764 nav: InternPool.Nav.Index,
765 uav: InternPool.Index,
766 flush,
767 };
768
769 fn fail(dg: *DeclGen, comptime format: []const u8, args: anytype) Error {
770 @branchHint(.cold);
771 const zcu = dg.pt.zcu;
772 const src_loc = zcu.navSrcLoc(dg.pass.nav);
773 dg.error_msg = try Zcu.ErrorMsg.create(dg.gpa, src_loc, format, args);
774 return error.AnalysisFail;
775 }
776
777 fn renderUav(
778 dg: *DeclGen,
779 w: *Writer,
780 uav: InternPool.Key.Ptr.BaseAddr.Uav,
781 location: ValueRenderLocation,
782 ) Error!void {
783 const pt = dg.pt;
784 const zcu = pt.zcu;
785 const ip = &zcu.intern_pool;
786 const ctype_pool = &dg.ctype_pool;
787 const uav_val = Value.fromInterned(uav.val);
788 const uav_ty = uav_val.typeOf(zcu);
789
790 // Render an undefined pointer if we have a pointer to a zero-bit or comptime type.
791 const ptr_ty: Type = .fromInterned(uav.orig_ty);
792 if (ptr_ty.isPtrAtRuntime(zcu) and !uav_ty.isFnOrHasRuntimeBits(zcu)) {
793 return dg.writeCValue(w, .{ .undef = ptr_ty });
794 }
795
796 // Chase function values in order to be able to reference the original function.
797 switch (ip.indexToKey(uav.val)) {
798 .variable => unreachable,
799 .func => |func| return dg.renderNav(w, func.owner_nav, location),
800 .@"extern" => |@"extern"| return dg.renderNav(w, @"extern".owner_nav, location),
801 else => {},
802 }
803
804 // We shouldn't cast C function pointers as this is UB (when you call
805 // them). The analysis until now should ensure that the C function
806 // pointers are compatible. If they are not, then there is a bug
807 // somewhere and we should let the C compiler tell us about it.
808 const ptr_ctype = try dg.ctypeFromType(ptr_ty, .complete);
809 const elem_ctype = ptr_ctype.info(ctype_pool).pointer.elem_ctype;
810 const uav_ctype = try dg.ctypeFromType(uav_ty, .complete);
811 const need_cast = !elem_ctype.eql(uav_ctype) and
812 (elem_ctype.info(ctype_pool) != .function or uav_ctype.info(ctype_pool) != .function);
813 if (need_cast) {
814 try w.writeAll("((");
815 try dg.renderCType(w, ptr_ctype);
816 try w.writeByte(')');
817 }
818 try w.writeByte('&');
819 try renderUavName(w, uav_val);
820 if (need_cast) try w.writeByte(')');
821
822 // Indicate that the anon decl should be rendered to the output so that
823 // our reference above is not undefined.
824 const ptr_type = ip.indexToKey(uav.orig_ty).ptr_type;
825 const gop = try dg.uavs.getOrPut(dg.gpa, uav.val);
826 if (!gop.found_existing) gop.value_ptr.* = .none;
827 // If there is an explicit alignment, greater than the current one, use it.
828 // Note that we intentionally start at `.none`, so `gop.value_ptr.*` is never
829 // underaligned, so we don't need to worry about the `.none` case here.
830 if (ptr_type.flags.alignment != .none) {
831 // Resolve the current alignment so we can choose the bigger one.
832 const cur_alignment: Alignment = if (gop.value_ptr.* == .none) abi: {
833 break :abi Type.fromInterned(ptr_type.child).abiAlignment(zcu);
834 } else gop.value_ptr.*;
835 gop.value_ptr.* = cur_alignment.maxStrict(ptr_type.flags.alignment);
836 }
837 }
838
839 fn renderNav(
840 dg: *DeclGen,
841 w: *Writer,
842 nav_index: InternPool.Nav.Index,
843 location: ValueRenderLocation,
844 ) Error!void {
845 _ = location;
846 const pt = dg.pt;
847 const zcu = pt.zcu;
848 const ip = &zcu.intern_pool;
849 const ctype_pool = &dg.ctype_pool;
850
851 // Chase function values in order to be able to reference the original function.
852 const owner_nav = switch (ip.getNav(nav_index).status) {
853 .unresolved => unreachable,
854 .type_resolved => nav_index, // this can't be an extern or a function
855 .fully_resolved => |r| switch (ip.indexToKey(r.val)) {
856 .func => |f| f.owner_nav,
857 .@"extern" => |e| e.owner_nav,
858 else => nav_index,
859 },
860 };
861
862 // Render an undefined pointer if we have a pointer to a zero-bit or comptime type.
863 const nav_ty: Type = .fromInterned(ip.getNav(owner_nav).typeOf(ip));
864 const ptr_ty = try pt.navPtrType(owner_nav);
865 if (!nav_ty.isFnOrHasRuntimeBits(zcu)) {
866 return dg.writeCValue(w, .{ .undef = ptr_ty });
867 }
868
869 // We shouldn't cast C function pointers as this is UB (when you call
870 // them). The analysis until now should ensure that the C function
871 // pointers are compatible. If they are not, then there is a bug
872 // somewhere and we should let the C compiler tell us about it.
873 const ctype = try dg.ctypeFromType(ptr_ty, .complete);
874 const elem_ctype = ctype.info(ctype_pool).pointer.elem_ctype;
875 const nav_ctype = try dg.ctypeFromType(nav_ty, .complete);
876 const need_cast = !elem_ctype.eql(nav_ctype) and
877 (elem_ctype.info(ctype_pool) != .function or nav_ctype.info(ctype_pool) != .function);
878 if (need_cast) {
879 try w.writeAll("((");
880 try dg.renderCType(w, ctype);
881 try w.writeByte(')');
882 }
883 try w.writeByte('&');
884 try dg.renderNavName(w, owner_nav);
885 if (need_cast) try w.writeByte(')');
886 }
887
888 fn renderPointer(
889 dg: *DeclGen,
890 w: *Writer,
891 derivation: Value.PointerDeriveStep,
892 location: ValueRenderLocation,
893 ) Error!void {
894 const pt = dg.pt;
895 const zcu = pt.zcu;
896 switch (derivation) {
897 .comptime_alloc_ptr, .comptime_field_ptr => unreachable,
898 .int => |int| {
899 const ptr_ctype = try dg.ctypeFromType(int.ptr_ty, .complete);
900 const addr_val = try pt.intValue(.usize, int.addr);
901 try w.writeByte('(');
902 try dg.renderCType(w, ptr_ctype);
903 try w.print("){f}", .{try dg.fmtIntLiteralHex(addr_val, .Other)});
904 },
905
906 .nav_ptr => |nav| try dg.renderNav(w, nav, location),
907 .uav_ptr => |uav| try dg.renderUav(w, uav, location),
908
909 inline .eu_payload_ptr, .opt_payload_ptr => |info| {
910 try w.writeAll("&(");
911 try dg.renderPointer(w, info.parent.*, location);
912 try w.writeAll(")->payload");
913 },
914
915 .field_ptr => |field| {
916 const parent_ptr_ty = try field.parent.ptrType(pt);
917
918 // Ensure complete type definition is available before accessing fields.
919 _ = try dg.ctypeFromType(parent_ptr_ty.childType(zcu), .complete);
920
921 switch (fieldLocation(parent_ptr_ty, field.result_ptr_ty, field.field_idx, zcu)) {
922 .begin => {
923 const ptr_ctype = try dg.ctypeFromType(field.result_ptr_ty, .complete);
924 try w.writeByte('(');
925 try dg.renderCType(w, ptr_ctype);
926 try w.writeByte(')');
927 try dg.renderPointer(w, field.parent.*, location);
928 },
929 .field => |name| {
930 try w.writeAll("&(");
931 try dg.renderPointer(w, field.parent.*, location);
932 try w.writeAll(")->");
933 try dg.writeCValue(w, name);
934 },
935 .byte_offset => |byte_offset| {
936 const ptr_ctype = try dg.ctypeFromType(field.result_ptr_ty, .complete);
937 try w.writeByte('(');
938 try dg.renderCType(w, ptr_ctype);
939 try w.writeByte(')');
940 const offset_val = try pt.intValue(.usize, byte_offset);
941 try w.writeAll("((char *)");
942 try dg.renderPointer(w, field.parent.*, location);
943 try w.print(" + {f})", .{try dg.fmtIntLiteralDec(offset_val, .Other)});
944 },
945 }
946 },
947
948 .elem_ptr => |elem| if (!(try elem.parent.ptrType(pt)).childType(zcu).hasRuntimeBits(zcu)) {
949 // Element type is zero-bit, so lowers to `void`. The index is irrelevant; just cast the pointer.
950 const ptr_ctype = try dg.ctypeFromType(elem.result_ptr_ty, .complete);
951 try w.writeByte('(');
952 try dg.renderCType(w, ptr_ctype);
953 try w.writeByte(')');
954 try dg.renderPointer(w, elem.parent.*, location);
955 } else {
956 const index_val = try pt.intValue(.usize, elem.elem_idx);
957 // We want to do pointer arithmetic on a pointer to the element type.
958 // We might have a pointer-to-array. In this case, we must cast first.
959 const result_ctype = try dg.ctypeFromType(elem.result_ptr_ty, .complete);
960 const parent_ctype = try dg.ctypeFromType(try elem.parent.ptrType(pt), .complete);
961 if (result_ctype.eql(parent_ctype)) {
962 // The pointer already has an appropriate type - just do the arithmetic.
963 try w.writeByte('(');
964 try dg.renderPointer(w, elem.parent.*, location);
965 try w.print(" + {f})", .{try dg.fmtIntLiteralDec(index_val, .Other)});
966 } else {
967 // We probably have an array pointer `T (*)[n]`. Cast to an element pointer,
968 // and *then* apply the index.
969 try w.writeAll("((");
970 try dg.renderCType(w, result_ctype);
971 try w.writeByte(')');
972 try dg.renderPointer(w, elem.parent.*, location);
973 try w.print(" + {f})", .{try dg.fmtIntLiteralDec(index_val, .Other)});
974 }
975 },
976
977 .offset_and_cast => |oac| {
978 const ptr_ctype = try dg.ctypeFromType(oac.new_ptr_ty, .complete);
979 try w.writeByte('(');
980 try dg.renderCType(w, ptr_ctype);
981 try w.writeByte(')');
982 if (oac.byte_offset == 0) {
983 try dg.renderPointer(w, oac.parent.*, location);
984 } else {
985 const offset_val = try pt.intValue(.usize, oac.byte_offset);
986 try w.writeAll("((char *)");
987 try dg.renderPointer(w, oac.parent.*, location);
988 try w.print(" + {f})", .{try dg.fmtIntLiteralDec(offset_val, .Other)});
989 }
990 },
991 }
992 }
993
994 fn renderErrorName(dg: *DeclGen, w: *Writer, err_name: InternPool.NullTerminatedString) !void {
995 try w.print("zig_error_{f}", .{fmtIdentUnsolo(err_name.toSlice(&dg.pt.zcu.intern_pool))});
996 }
997
998 fn renderValue(
999 dg: *DeclGen,
1000 w: *Writer,
1001 val: Value,
1002 location: ValueRenderLocation,
1003 ) Error!void {
1004 const pt = dg.pt;
1005 const zcu = pt.zcu;
1006 const ip = &zcu.intern_pool;
1007 const target = &dg.mod.resolved_target.result;
1008 const ctype_pool = &dg.ctype_pool;
1009
1010 const initializer_type: ValueRenderLocation = switch (location) {
1011 .StaticInitializer => .StaticInitializer,
1012 else => .Initializer,
1013 };
1014
1015 const ty = val.typeOf(zcu);
1016 if (val.isUndef(zcu)) return dg.renderUndefValue(w, ty, location);
1017 const ctype = try dg.ctypeFromType(ty, location.toCTypeKind());
1018 switch (ip.indexToKey(val.toIntern())) {
1019 // types, not values
1020 .int_type,
1021 .ptr_type,
1022 .array_type,
1023 .vector_type,
1024 .opt_type,
1025 .anyframe_type,
1026 .error_union_type,
1027 .simple_type,
1028 .struct_type,
1029 .tuple_type,
1030 .union_type,
1031 .opaque_type,
1032 .enum_type,
1033 .func_type,
1034 .error_set_type,
1035 .inferred_error_set_type,
1036 // memoization, not values
1037 .memoized_call,
1038 => unreachable,
1039
1040 .undef => unreachable, // handled above
1041 .simple_value => |simple_value| switch (simple_value) {
1042 // non-runtime values
1043 .undefined => unreachable,
1044 .void => unreachable,
1045 .null => unreachable,
1046 .empty_tuple => unreachable,
1047 .@"unreachable" => unreachable,
1048
1049 .false => try w.writeAll("false"),
1050 .true => try w.writeAll("true"),
1051 },
1052 .variable,
1053 .@"extern",
1054 .func,
1055 .enum_literal,
1056 .empty_enum_value,
1057 => unreachable, // non-runtime values
1058 .int => |int| switch (int.storage) {
1059 .u64, .i64, .big_int => try w.print("{f}", .{try dg.fmtIntLiteralDec(val, location)}),
1060 .lazy_align, .lazy_size => {
1061 try w.writeAll("((");
1062 try dg.renderCType(w, ctype);
1063 try w.print("){f})", .{try dg.fmtIntLiteralHex(
1064 try pt.intValue(.usize, val.toUnsignedInt(zcu)),
1065 .Other,
1066 )});
1067 },
1068 },
1069 .err => |err| try dg.renderErrorName(w, err.name),
1070 .error_union => |error_union| switch (ctype.info(ctype_pool)) {
1071 .basic => switch (error_union.val) {
1072 .err_name => |err_name| try dg.renderErrorName(w, err_name),
1073 .payload => try w.writeByte('0'),
1074 },
1075 .pointer, .aligned, .array, .vector, .fwd_decl, .function => unreachable,
1076 .aggregate => |aggregate| {
1077 if (!location.isInitializer()) {
1078 try w.writeByte('(');
1079 try dg.renderCType(w, ctype);
1080 try w.writeByte(')');
1081 }
1082 try w.writeByte('{');
1083 for (0..aggregate.fields.len) |field_index| {
1084 if (field_index > 0) try w.writeByte(',');
1085 switch (aggregate.fields.at(field_index, ctype_pool).name.index) {
1086 .@"error" => switch (error_union.val) {
1087 .err_name => |err_name| try dg.renderErrorName(w, err_name),
1088 .payload => try w.writeByte('0'),
1089 },
1090 .payload => switch (error_union.val) {
1091 .err_name => try dg.renderUndefValue(
1092 w,
1093 ty.errorUnionPayload(zcu),
1094 initializer_type,
1095 ),
1096 .payload => |payload| try dg.renderValue(
1097 w,
1098 Value.fromInterned(payload),
1099 initializer_type,
1100 ),
1101 },
1102 else => unreachable,
1103 }
1104 }
1105 try w.writeByte('}');
1106 },
1107 },
1108 .enum_tag => |enum_tag| try dg.renderValue(w, Value.fromInterned(enum_tag.int), location),
1109 .float => {
1110 const bits = ty.floatBits(target);
1111 const f128_val = val.toFloat(f128, zcu);
1112
1113 // All unsigned ints matching float types are pre-allocated.
1114 const repr_ty = pt.intType(.unsigned, bits) catch unreachable;
1115
1116 assert(bits <= 128);
1117 var repr_val_limbs: [BigInt.calcTwosCompLimbCount(128)]BigIntLimb = undefined;
1118 var repr_val_big = BigInt.Mutable{
1119 .limbs = &repr_val_limbs,
1120 .len = undefined,
1121 .positive = undefined,
1122 };
1123
1124 switch (bits) {
1125 16 => repr_val_big.set(@as(u16, @bitCast(val.toFloat(f16, zcu)))),
1126 32 => repr_val_big.set(@as(u32, @bitCast(val.toFloat(f32, zcu)))),
1127 64 => repr_val_big.set(@as(u64, @bitCast(val.toFloat(f64, zcu)))),
1128 80 => repr_val_big.set(@as(u80, @bitCast(val.toFloat(f80, zcu)))),
1129 128 => repr_val_big.set(@as(u128, @bitCast(f128_val))),
1130 else => unreachable,
1131 }
1132
1133 var empty = true;
1134 if (std.math.isFinite(f128_val)) {
1135 try w.writeAll("zig_make_");
1136 try dg.renderTypeForBuiltinFnName(w, ty);
1137 try w.writeByte('(');
1138 switch (bits) {
1139 16 => try w.print("{x}", .{val.toFloat(f16, zcu)}),
1140 32 => try w.print("{x}", .{val.toFloat(f32, zcu)}),
1141 64 => try w.print("{x}", .{val.toFloat(f64, zcu)}),
1142 80 => try w.print("{x}", .{val.toFloat(f80, zcu)}),
1143 128 => try w.print("{x}", .{f128_val}),
1144 else => unreachable,
1145 }
1146 try w.writeAll(", ");
1147 empty = false;
1148 } else {
1149 // isSignalNan is equivalent to isNan currently, and MSVC doesn't have nans, so prefer nan
1150 const operation = if (std.math.isNan(f128_val))
1151 "nan"
1152 else if (std.math.isSignalNan(f128_val))
1153 "nans"
1154 else if (std.math.isInf(f128_val))
1155 "inf"
1156 else
1157 unreachable;
1158
1159 if (location == .StaticInitializer) {
1160 if (!std.math.isNan(f128_val) and std.math.isSignalNan(f128_val))
1161 return dg.fail("TODO: C backend: implement nans rendering in static initializers", .{});
1162
1163 // MSVC doesn't have a way to define a custom or signaling NaN value in a constant expression
1164
1165 // TODO: Re-enable this check, otherwise we're writing qnan bit patterns on msvc incorrectly
1166 // if (std.math.isNan(f128_val) and f128_val != std.math.nan(f128))
1167 // return dg.fail("Only quiet nans are supported in global variable initializers", .{});
1168 }
1169
1170 try w.writeAll("zig_");
1171 try w.writeAll(if (location == .StaticInitializer) "init" else "make");
1172 try w.writeAll("_special_");
1173 try dg.renderTypeForBuiltinFnName(w, ty);
1174 try w.writeByte('(');
1175 if (std.math.signbit(f128_val)) try w.writeByte('-');
1176 try w.writeAll(", ");
1177 try w.writeAll(operation);
1178 try w.writeAll(", ");
1179 if (std.math.isNan(f128_val)) switch (bits) {
1180 // We only actually need to pass the significand, but it will get
1181 // properly masked anyway, so just pass the whole value.
1182 16 => try w.print("\"0x{x}\"", .{@as(u16, @bitCast(val.toFloat(f16, zcu)))}),
1183 32 => try w.print("\"0x{x}\"", .{@as(u32, @bitCast(val.toFloat(f32, zcu)))}),
1184 64 => try w.print("\"0x{x}\"", .{@as(u64, @bitCast(val.toFloat(f64, zcu)))}),
1185 80 => try w.print("\"0x{x}\"", .{@as(u80, @bitCast(val.toFloat(f80, zcu)))}),
1186 128 => try w.print("\"0x{x}\"", .{@as(u128, @bitCast(f128_val))}),
1187 else => unreachable,
1188 };
1189 try w.writeAll(", ");
1190 empty = false;
1191 }
1192 try w.print("{f}", .{try dg.fmtIntLiteralHex(
1193 try pt.intValue_big(repr_ty, repr_val_big.toConst()),
1194 location,
1195 )});
1196 if (!empty) try w.writeByte(')');
1197 },
1198 .slice => |slice| {
1199 const aggregate = ctype.info(ctype_pool).aggregate;
1200 if (!location.isInitializer()) {
1201 try w.writeByte('(');
1202 try dg.renderCType(w, ctype);
1203 try w.writeByte(')');
1204 }
1205 try w.writeByte('{');
1206 for (0..aggregate.fields.len) |field_index| {
1207 if (field_index > 0) try w.writeByte(',');
1208 try dg.renderValue(w, Value.fromInterned(
1209 switch (aggregate.fields.at(field_index, ctype_pool).name.index) {
1210 .ptr => slice.ptr,
1211 .len => slice.len,
1212 else => unreachable,
1213 },
1214 ), initializer_type);
1215 }
1216 try w.writeByte('}');
1217 },
1218 .ptr => {
1219 var arena = std.heap.ArenaAllocator.init(zcu.gpa);
1220 defer arena.deinit();
1221 const derivation = try val.pointerDerivation(arena.allocator(), pt);
1222 try dg.renderPointer(w, derivation, location);
1223 },
1224 .opt => |opt| switch (ctype.info(ctype_pool)) {
1225 .basic => if (ctype.isBool()) try w.writeAll(switch (opt.val) {
1226 .none => "true",
1227 else => "false",
1228 }) else switch (opt.val) {
1229 .none => try w.writeByte('0'),
1230 else => |payload| switch (ip.indexToKey(payload)) {
1231 .undef => |err_ty| try dg.renderUndefValue(
1232 w,
1233 .fromInterned(err_ty),
1234 location,
1235 ),
1236 .err => |err| try dg.renderErrorName(w, err.name),
1237 else => unreachable,
1238 },
1239 },
1240 .pointer => switch (opt.val) {
1241 .none => try w.writeAll("NULL"),
1242 else => |payload| try dg.renderValue(w, Value.fromInterned(payload), location),
1243 },
1244 .aligned, .array, .vector, .fwd_decl, .function => unreachable,
1245 .aggregate => |aggregate| {
1246 switch (opt.val) {
1247 .none => {},
1248 else => |payload| switch (aggregate.fields.at(0, ctype_pool).name.index) {
1249 .is_null, .payload => {},
1250 .ptr, .len => return dg.renderValue(
1251 w,
1252 Value.fromInterned(payload),
1253 location,
1254 ),
1255 else => unreachable,
1256 },
1257 }
1258 if (!location.isInitializer()) {
1259 try w.writeByte('(');
1260 try dg.renderCType(w, ctype);
1261 try w.writeByte(')');
1262 }
1263 try w.writeByte('{');
1264 for (0..aggregate.fields.len) |field_index| {
1265 if (field_index > 0) try w.writeByte(',');
1266 switch (aggregate.fields.at(field_index, ctype_pool).name.index) {
1267 .is_null => try w.writeAll(switch (opt.val) {
1268 .none => "true",
1269 else => "false",
1270 }),
1271 .payload => switch (opt.val) {
1272 .none => try dg.renderUndefValue(
1273 w,
1274 ty.optionalChild(zcu),
1275 initializer_type,
1276 ),
1277 else => |payload| try dg.renderValue(
1278 w,
1279 Value.fromInterned(payload),
1280 initializer_type,
1281 ),
1282 },
1283 .ptr => try w.writeAll("NULL"),
1284 .len => try dg.renderUndefValue(w, .usize, initializer_type),
1285 else => unreachable,
1286 }
1287 }
1288 try w.writeByte('}');
1289 },
1290 },
1291 .aggregate => switch (ip.indexToKey(ty.toIntern())) {
1292 .array_type, .vector_type => {
1293 if (location == .FunctionArgument) {
1294 try w.writeByte('(');
1295 try dg.renderCType(w, ctype);
1296 try w.writeByte(')');
1297 }
1298 const ai = ty.arrayInfo(zcu);
1299 if (ai.elem_type.eql(.u8, zcu)) {
1300 var literal: StringLiteral = .init(w, @intCast(ty.arrayLenIncludingSentinel(zcu)));
1301 try literal.start();
1302 var index: usize = 0;
1303 while (index < ai.len) : (index += 1) {
1304 const elem_val = try val.elemValue(pt, index);
1305 const elem_val_u8: u8 = if (elem_val.isUndef(zcu))
1306 undefPattern(u8)
1307 else
1308 @intCast(elem_val.toUnsignedInt(zcu));
1309 try literal.writeChar(elem_val_u8);
1310 }
1311 if (ai.sentinel) |s| {
1312 const s_u8: u8 = @intCast(s.toUnsignedInt(zcu));
1313 if (s_u8 != 0) try literal.writeChar(s_u8);
1314 }
1315 try literal.end();
1316 } else {
1317 try w.writeByte('{');
1318 var index: usize = 0;
1319 while (index < ai.len) : (index += 1) {
1320 if (index > 0) try w.writeByte(',');
1321 const elem_val = try val.elemValue(pt, index);
1322 try dg.renderValue(w, elem_val, initializer_type);
1323 }
1324 if (ai.sentinel) |s| {
1325 if (index > 0) try w.writeByte(',');
1326 try dg.renderValue(w, s, initializer_type);
1327 }
1328 try w.writeByte('}');
1329 }
1330 },
1331 .tuple_type => |tuple| {
1332 if (!location.isInitializer()) {
1333 try w.writeByte('(');
1334 try dg.renderCType(w, ctype);
1335 try w.writeByte(')');
1336 }
1337
1338 try w.writeByte('{');
1339 var empty = true;
1340 for (0..tuple.types.len) |field_index| {
1341 const comptime_val = tuple.values.get(ip)[field_index];
1342 if (comptime_val != .none) continue;
1343 const field_ty: Type = .fromInterned(tuple.types.get(ip)[field_index]);
1344 if (!field_ty.hasRuntimeBitsIgnoreComptime(zcu)) continue;
1345
1346 if (!empty) try w.writeByte(',');
1347
1348 const field_val = Value.fromInterned(
1349 switch (ip.indexToKey(val.toIntern()).aggregate.storage) {
1350 .bytes => |bytes| try pt.intern(.{ .int = .{
1351 .ty = field_ty.toIntern(),
1352 .storage = .{ .u64 = bytes.at(field_index, ip) },
1353 } }),
1354 .elems => |elems| elems[field_index],
1355 .repeated_elem => |elem| elem,
1356 },
1357 );
1358 try dg.renderValue(w, field_val, initializer_type);
1359
1360 empty = false;
1361 }
1362 try w.writeByte('}');
1363 },
1364 .struct_type => {
1365 const loaded_struct = ip.loadStructType(ty.toIntern());
1366 switch (loaded_struct.layout) {
1367 .auto, .@"extern" => {
1368 if (!location.isInitializer()) {
1369 try w.writeByte('(');
1370 try dg.renderCType(w, ctype);
1371 try w.writeByte(')');
1372 }
1373
1374 try w.writeByte('{');
1375 var field_it = loaded_struct.iterateRuntimeOrder(ip);
1376 var need_comma = false;
1377 while (field_it.next()) |field_index| {
1378 const field_ty: Type = .fromInterned(loaded_struct.field_types.get(ip)[field_index]);
1379 if (!field_ty.hasRuntimeBitsIgnoreComptime(zcu)) continue;
1380
1381 if (need_comma) try w.writeByte(',');
1382 need_comma = true;
1383 const field_val = switch (ip.indexToKey(val.toIntern()).aggregate.storage) {
1384 .bytes => |bytes| try pt.intern(.{ .int = .{
1385 .ty = field_ty.toIntern(),
1386 .storage = .{ .u64 = bytes.at(field_index, ip) },
1387 } }),
1388 .elems => |elems| elems[field_index],
1389 .repeated_elem => |elem| elem,
1390 };
1391 try dg.renderValue(w, Value.fromInterned(field_val), initializer_type);
1392 }
1393 try w.writeByte('}');
1394 },
1395 .@"packed" => {
1396 // https://github.com/ziglang/zig/issues/24657 will eliminate most of the
1397 // following logic, leaving only the recursive `renderValue` call. Once
1398 // that proposal is implemented, a `packed struct` will literally be
1399 // represented in the InternPool by its comptime-known backing integer.
1400 var arena: std.heap.ArenaAllocator = .init(zcu.gpa);
1401 defer arena.deinit();
1402 const backing_ty: Type = .fromInterned(loaded_struct.backingIntTypeUnordered(ip));
1403 const buf = try arena.allocator().alloc(u8, @intCast(ty.abiSize(zcu)));
1404 val.writeToMemory(pt, buf) catch |err| switch (err) {
1405 error.IllDefinedMemoryLayout => unreachable,
1406 error.OutOfMemory => |e| return e,
1407 error.ReinterpretDeclRef, error.Unimplemented => return dg.fail("TODO: C backend: lower packed struct value", .{}),
1408 };
1409 const backing_val: Value = try .readUintFromMemory(backing_ty, pt, buf, arena.allocator());
1410 return dg.renderValue(w, backing_val, location);
1411 },
1412 }
1413 },
1414 else => unreachable,
1415 },
1416 .un => |un| {
1417 const loaded_union = ip.loadUnionType(ty.toIntern());
1418 if (loaded_union.flagsUnordered(ip).layout == .@"packed") {
1419 // https://github.com/ziglang/zig/issues/24657 will eliminate most of the
1420 // following logic, leaving only the recursive `renderValue` call. Once
1421 // that proposal is implemented, a `packed union` will literally be
1422 // represented in the InternPool by its comptime-known backing integer.
1423 var arena: std.heap.ArenaAllocator = .init(zcu.gpa);
1424 defer arena.deinit();
1425 const backing_ty = try ty.unionBackingType(pt);
1426 const buf = try arena.allocator().alloc(u8, @intCast(ty.abiSize(zcu)));
1427 val.writeToMemory(pt, buf) catch |err| switch (err) {
1428 error.IllDefinedMemoryLayout => unreachable,
1429 error.OutOfMemory => |e| return e,
1430 error.ReinterpretDeclRef, error.Unimplemented => return dg.fail("TODO: C backend: lower packed union value", .{}),
1431 };
1432 const backing_val: Value = try .readUintFromMemory(backing_ty, pt, buf, arena.allocator());
1433 return dg.renderValue(w, backing_val, location);
1434 }
1435 if (un.tag == .none) {
1436 const backing_ty = try ty.unionBackingType(pt);
1437 assert(loaded_union.flagsUnordered(ip).layout == .@"extern");
1438 if (location == .StaticInitializer) {
1439 return dg.fail("TODO: C backend: implement extern union backing type rendering in static initializers", .{});
1440 }
1441
1442 const ptr_ty = try pt.singleConstPtrType(ty);
1443 try w.writeAll("*((");
1444 try dg.renderType(w, ptr_ty);
1445 try w.writeAll(")(");
1446 try dg.renderType(w, backing_ty);
1447 try w.writeAll("){");
1448 try dg.renderValue(w, Value.fromInterned(un.val), location);
1449 try w.writeAll("})");
1450 } else {
1451 if (!location.isInitializer()) {
1452 try w.writeByte('(');
1453 try dg.renderCType(w, ctype);
1454 try w.writeByte(')');
1455 }
1456
1457 const field_index = zcu.unionTagFieldIndex(loaded_union, Value.fromInterned(un.tag)).?;
1458 const field_ty: Type = .fromInterned(loaded_union.field_types.get(ip)[field_index]);
1459 const field_name = loaded_union.loadTagType(ip).names.get(ip)[field_index];
1460
1461 const has_tag = loaded_union.hasTag(ip);
1462 if (has_tag) try w.writeByte('{');
1463 const aggregate = ctype.info(ctype_pool).aggregate;
1464 for (0..if (has_tag) aggregate.fields.len else 1) |outer_field_index| {
1465 if (outer_field_index > 0) try w.writeByte(',');
1466 switch (if (has_tag)
1467 aggregate.fields.at(outer_field_index, ctype_pool).name.index
1468 else
1469 .payload) {
1470 .tag => try dg.renderValue(
1471 w,
1472 Value.fromInterned(un.tag),
1473 initializer_type,
1474 ),
1475 .payload => {
1476 try w.writeByte('{');
1477 if (field_ty.hasRuntimeBits(zcu)) {
1478 try w.print(" .{f} = ", .{fmtIdentSolo(field_name.toSlice(ip))});
1479 try dg.renderValue(
1480 w,
1481 Value.fromInterned(un.val),
1482 initializer_type,
1483 );
1484 try w.writeByte(' ');
1485 } else for (0..loaded_union.field_types.len) |inner_field_index| {
1486 const inner_field_ty: Type = .fromInterned(
1487 loaded_union.field_types.get(ip)[inner_field_index],
1488 );
1489 if (!inner_field_ty.hasRuntimeBits(zcu)) continue;
1490 try dg.renderUndefValue(w, inner_field_ty, initializer_type);
1491 break;
1492 }
1493 try w.writeByte('}');
1494 },
1495 else => unreachable,
1496 }
1497 }
1498 if (has_tag) try w.writeByte('}');
1499 }
1500 },
1501 }
1502 }
1503
1504 fn renderUndefValue(
1505 dg: *DeclGen,
1506 w: *Writer,
1507 ty: Type,
1508 location: ValueRenderLocation,
1509 ) Error!void {
1510 const pt = dg.pt;
1511 const zcu = pt.zcu;
1512 const ip = &zcu.intern_pool;
1513 const target = &dg.mod.resolved_target.result;
1514 const ctype_pool = &dg.ctype_pool;
1515
1516 const initializer_type: ValueRenderLocation = switch (location) {
1517 .StaticInitializer => .StaticInitializer,
1518 else => .Initializer,
1519 };
1520
1521 const safety_on = switch (zcu.optimizeMode()) {
1522 .Debug, .ReleaseSafe => true,
1523 .ReleaseFast, .ReleaseSmall => false,
1524 };
1525
1526 const ctype = try dg.ctypeFromType(ty, location.toCTypeKind());
1527 switch (ty.toIntern()) {
1528 .c_longdouble_type,
1529 .f16_type,
1530 .f32_type,
1531 .f64_type,
1532 .f80_type,
1533 .f128_type,
1534 => {
1535 const bits = ty.floatBits(target);
1536 // All unsigned ints matching float types are pre-allocated.
1537 const repr_ty = dg.pt.intType(.unsigned, bits) catch unreachable;
1538
1539 try w.writeAll("zig_make_");
1540 try dg.renderTypeForBuiltinFnName(w, ty);
1541 try w.writeByte('(');
1542 switch (bits) {
1543 16 => try w.print("{x}", .{@as(f16, @bitCast(undefPattern(i16)))}),
1544 32 => try w.print("{x}", .{@as(f32, @bitCast(undefPattern(i32)))}),
1545 64 => try w.print("{x}", .{@as(f64, @bitCast(undefPattern(i64)))}),
1546 80 => try w.print("{x}", .{@as(f80, @bitCast(undefPattern(i80)))}),
1547 128 => try w.print("{x}", .{@as(f128, @bitCast(undefPattern(i128)))}),
1548 else => unreachable,
1549 }
1550 try w.writeAll(", ");
1551 try dg.renderUndefValue(w, repr_ty, .FunctionArgument);
1552 return w.writeByte(')');
1553 },
1554 .bool_type => try w.writeAll(if (safety_on) "0xaa" else "false"),
1555 else => switch (ip.indexToKey(ty.toIntern())) {
1556 .simple_type,
1557 .int_type,
1558 .enum_type,
1559 .error_set_type,
1560 .inferred_error_set_type,
1561 => return w.print("{f}", .{
1562 try dg.fmtIntLiteralHex(try pt.undefValue(ty), location),
1563 }),
1564 .ptr_type => |ptr_type| switch (ptr_type.flags.size) {
1565 .one, .many, .c => {
1566 try w.writeAll("((");
1567 try dg.renderCType(w, ctype);
1568 return w.print("){f})", .{
1569 try dg.fmtIntLiteralHex(.undef_usize, .Other),
1570 });
1571 },
1572 .slice => {
1573 if (!location.isInitializer()) {
1574 try w.writeByte('(');
1575 try dg.renderCType(w, ctype);
1576 try w.writeByte(')');
1577 }
1578
1579 try w.writeAll("{(");
1580 const ptr_ty = ty.slicePtrFieldType(zcu);
1581 try dg.renderType(w, ptr_ty);
1582 return w.print("){f}, {0f}}}", .{
1583 try dg.fmtIntLiteralHex(.undef_usize, .Other),
1584 });
1585 },
1586 },
1587 .opt_type => |child_type| switch (ctype.info(ctype_pool)) {
1588 .basic, .pointer => try dg.renderUndefValue(
1589 w,
1590 .fromInterned(if (ctype.isBool()) .bool_type else child_type),
1591 location,
1592 ),
1593 .aligned, .array, .vector, .fwd_decl, .function => unreachable,
1594 .aggregate => |aggregate| {
1595 switch (aggregate.fields.at(0, ctype_pool).name.index) {
1596 .is_null, .payload => {},
1597 .ptr, .len => return dg.renderUndefValue(
1598 w,
1599 .fromInterned(child_type),
1600 location,
1601 ),
1602 else => unreachable,
1603 }
1604 if (!location.isInitializer()) {
1605 try w.writeByte('(');
1606 try dg.renderCType(w, ctype);
1607 try w.writeByte(')');
1608 }
1609 try w.writeByte('{');
1610 for (0..aggregate.fields.len) |field_index| {
1611 if (field_index > 0) try w.writeByte(',');
1612 try dg.renderUndefValue(w, .fromInterned(
1613 switch (aggregate.fields.at(field_index, ctype_pool).name.index) {
1614 .is_null => .bool_type,
1615 .payload => child_type,
1616 else => unreachable,
1617 },
1618 ), initializer_type);
1619 }
1620 try w.writeByte('}');
1621 },
1622 },
1623 .struct_type => {
1624 const loaded_struct = ip.loadStructType(ty.toIntern());
1625 switch (loaded_struct.layout) {
1626 .auto, .@"extern" => {
1627 if (!location.isInitializer()) {
1628 try w.writeByte('(');
1629 try dg.renderCType(w, ctype);
1630 try w.writeByte(')');
1631 }
1632
1633 try w.writeByte('{');
1634 var field_it = loaded_struct.iterateRuntimeOrder(ip);
1635 var need_comma = false;
1636 while (field_it.next()) |field_index| {
1637 const field_ty: Type = .fromInterned(loaded_struct.field_types.get(ip)[field_index]);
1638 if (!field_ty.hasRuntimeBitsIgnoreComptime(zcu)) continue;
1639
1640 if (need_comma) try w.writeByte(',');
1641 need_comma = true;
1642 try dg.renderUndefValue(w, field_ty, initializer_type);
1643 }
1644 return w.writeByte('}');
1645 },
1646 .@"packed" => return dg.renderUndefValue(
1647 w,
1648 .fromInterned(loaded_struct.backingIntTypeUnordered(ip)),
1649 location,
1650 ),
1651 }
1652 },
1653 .tuple_type => |tuple_info| {
1654 if (!location.isInitializer()) {
1655 try w.writeByte('(');
1656 try dg.renderCType(w, ctype);
1657 try w.writeByte(')');
1658 }
1659
1660 try w.writeByte('{');
1661 var need_comma = false;
1662 for (0..tuple_info.types.len) |field_index| {
1663 if (tuple_info.values.get(ip)[field_index] != .none) continue;
1664 const field_ty: Type = .fromInterned(tuple_info.types.get(ip)[field_index]);
1665 if (!field_ty.hasRuntimeBitsIgnoreComptime(zcu)) continue;
1666
1667 if (need_comma) try w.writeByte(',');
1668 need_comma = true;
1669 try dg.renderUndefValue(w, field_ty, initializer_type);
1670 }
1671 return w.writeByte('}');
1672 },
1673 .union_type => {
1674 const loaded_union = ip.loadUnionType(ty.toIntern());
1675 switch (loaded_union.flagsUnordered(ip).layout) {
1676 .auto, .@"extern" => {
1677 if (!location.isInitializer()) {
1678 try w.writeByte('(');
1679 try dg.renderCType(w, ctype);
1680 try w.writeByte(')');
1681 }
1682
1683 const has_tag = loaded_union.hasTag(ip);
1684 if (has_tag) try w.writeByte('{');
1685 const aggregate = ctype.info(ctype_pool).aggregate;
1686 for (0..if (has_tag) aggregate.fields.len else 1) |outer_field_index| {
1687 if (outer_field_index > 0) try w.writeByte(',');
1688 switch (if (has_tag)
1689 aggregate.fields.at(outer_field_index, ctype_pool).name.index
1690 else
1691 .payload) {
1692 .tag => try dg.renderUndefValue(
1693 w,
1694 .fromInterned(loaded_union.enum_tag_ty),
1695 initializer_type,
1696 ),
1697 .payload => {
1698 try w.writeByte('{');
1699 for (0..loaded_union.field_types.len) |inner_field_index| {
1700 const inner_field_ty: Type = .fromInterned(
1701 loaded_union.field_types.get(ip)[inner_field_index],
1702 );
1703 if (!inner_field_ty.hasRuntimeBits(pt.zcu)) continue;
1704 try dg.renderUndefValue(
1705 w,
1706 inner_field_ty,
1707 initializer_type,
1708 );
1709 break;
1710 }
1711 try w.writeByte('}');
1712 },
1713 else => unreachable,
1714 }
1715 }
1716 if (has_tag) try w.writeByte('}');
1717 },
1718 .@"packed" => return dg.renderUndefValue(
1719 w,
1720 try ty.unionBackingType(pt),
1721 location,
1722 ),
1723 }
1724 },
1725 .error_union_type => |error_union_type| switch (ctype.info(ctype_pool)) {
1726 .basic => try dg.renderUndefValue(
1727 w,
1728 .fromInterned(error_union_type.error_set_type),
1729 location,
1730 ),
1731 .pointer, .aligned, .array, .vector, .fwd_decl, .function => unreachable,
1732 .aggregate => |aggregate| {
1733 if (!location.isInitializer()) {
1734 try w.writeByte('(');
1735 try dg.renderCType(w, ctype);
1736 try w.writeByte(')');
1737 }
1738 try w.writeByte('{');
1739 for (0..aggregate.fields.len) |field_index| {
1740 if (field_index > 0) try w.writeByte(',');
1741 try dg.renderUndefValue(
1742 w,
1743 .fromInterned(
1744 switch (aggregate.fields.at(field_index, ctype_pool).name.index) {
1745 .@"error" => error_union_type.error_set_type,
1746 .payload => error_union_type.payload_type,
1747 else => unreachable,
1748 },
1749 ),
1750 initializer_type,
1751 );
1752 }
1753 try w.writeByte('}');
1754 },
1755 },
1756 .array_type, .vector_type => {
1757 const ai = ty.arrayInfo(zcu);
1758 if (ai.elem_type.eql(.u8, zcu)) {
1759 var literal: StringLiteral = .init(w, @intCast(ty.arrayLenIncludingSentinel(zcu)));
1760 try literal.start();
1761 var index: u64 = 0;
1762 while (index < ai.len) : (index += 1) try literal.writeChar(0xaa);
1763 if (ai.sentinel) |s| {
1764 const s_u8: u8 = @intCast(s.toUnsignedInt(zcu));
1765 if (s_u8 != 0) try literal.writeChar(s_u8);
1766 }
1767 return literal.end();
1768 } else {
1769 if (!location.isInitializer()) {
1770 try w.writeByte('(');
1771 try dg.renderCType(w, ctype);
1772 try w.writeByte(')');
1773 }
1774
1775 try w.writeByte('{');
1776 var index: u64 = 0;
1777 while (index < ai.len) : (index += 1) {
1778 if (index > 0) try w.writeAll(", ");
1779 try dg.renderUndefValue(w, ty.childType(zcu), initializer_type);
1780 }
1781 if (ai.sentinel) |s| {
1782 if (index > 0) try w.writeAll(", ");
1783 try dg.renderValue(w, s, location);
1784 }
1785 return w.writeByte('}');
1786 }
1787 },
1788 .anyframe_type,
1789 .opaque_type,
1790 .func_type,
1791 => unreachable,
1792
1793 .undef,
1794 .simple_value,
1795 .variable,
1796 .@"extern",
1797 .func,
1798 .int,
1799 .err,
1800 .error_union,
1801 .enum_literal,
1802 .enum_tag,
1803 .empty_enum_value,
1804 .float,
1805 .ptr,
1806 .slice,
1807 .opt,
1808 .aggregate,
1809 .un,
1810 .memoized_call,
1811 => unreachable, // values, not types
1812 },
1813 }
1814 }
1815
1816 fn renderFunctionSignature(
1817 dg: *DeclGen,
1818 w: *Writer,
1819 fn_val: Value,
1820 fn_align: InternPool.Alignment,
1821 kind: CType.Kind,
1822 name: union(enum) {
1823 nav: InternPool.Nav.Index,
1824 fmt_ctype_pool_string: std.fmt.Alt(CTypePoolStringFormatData, formatCTypePoolString),
1825 @"export": struct {
1826 main_name: InternPool.NullTerminatedString,
1827 extern_name: InternPool.NullTerminatedString,
1828 },
1829 },
1830 ) !void {
1831 const zcu = dg.pt.zcu;
1832 const ip = &zcu.intern_pool;
1833
1834 const fn_ty = fn_val.typeOf(zcu);
1835 const fn_ctype = try dg.ctypeFromType(fn_ty, kind);
1836
1837 const fn_info = zcu.typeToFunc(fn_ty).?;
1838 if (fn_info.cc == .naked) {
1839 switch (kind) {
1840 .forward => try w.writeAll("zig_naked_decl "),
1841 .complete => try w.writeAll("zig_naked "),
1842 else => unreachable,
1843 }
1844 }
1845
1846 if (fn_val.getFunction(zcu)) |func| {
1847 const func_analysis = func.analysisUnordered(ip);
1848
1849 if (func_analysis.branch_hint == .cold)
1850 try w.writeAll("zig_cold ");
1851
1852 if (kind == .complete and func_analysis.disable_intrinsics or dg.mod.no_builtin)
1853 try w.writeAll("zig_no_builtin ");
1854 }
1855
1856 if (fn_info.return_type == .noreturn_type) try w.writeAll("zig_noreturn ");
1857
1858 var trailing = try renderTypePrefix(dg.pass, &dg.ctype_pool, zcu, w, fn_ctype, .suffix, .{});
1859
1860 if (toCallingConvention(fn_info.cc, zcu)) |call_conv| {
1861 try w.print("{f}zig_callconv({s})", .{ trailing, call_conv });
1862 trailing = .maybe_space;
1863 }
1864
1865 try w.print("{f}", .{trailing});
1866 switch (name) {
1867 .nav => |nav| try dg.renderNavName(w, nav),
1868 .fmt_ctype_pool_string => |fmt| try w.print("{f}", .{fmt}),
1869 .@"export" => |@"export"| try w.print("{f}", .{fmtIdentSolo(@"export".extern_name.toSlice(ip))}),
1870 }
1871
1872 try renderTypeSuffix(
1873 dg.pass,
1874 &dg.ctype_pool,
1875 zcu,
1876 w,
1877 fn_ctype,
1878 .suffix,
1879 CQualifiers.init(.{ .@"const" = switch (kind) {
1880 .forward => false,
1881 .complete => true,
1882 else => unreachable,
1883 } }),
1884 );
1885
1886 switch (kind) {
1887 .forward => {
1888 if (fn_align.toByteUnits()) |a| try w.print(" zig_align_fn({})", .{a});
1889 switch (name) {
1890 .nav, .fmt_ctype_pool_string => {},
1891 .@"export" => |@"export"| {
1892 const extern_name = @"export".extern_name.toSlice(ip);
1893 const is_mangled = isMangledIdent(extern_name, true);
1894 const is_export = @"export".extern_name != @"export".main_name;
1895 if (is_mangled and is_export) {
1896 try w.print(" zig_mangled_export({f}, {f}, {f})", .{
1897 fmtIdentSolo(extern_name),
1898 fmtStringLiteral(extern_name, null),
1899 fmtStringLiteral(@"export".main_name.toSlice(ip), null),
1900 });
1901 } else if (is_mangled) {
1902 try w.print(" zig_mangled({f}, {f})", .{
1903 fmtIdentSolo(extern_name), fmtStringLiteral(extern_name, null),
1904 });
1905 } else if (is_export) {
1906 try w.print(" zig_export({f}, {f})", .{
1907 fmtStringLiteral(@"export".main_name.toSlice(ip), null),
1908 fmtStringLiteral(extern_name, null),
1909 });
1910 }
1911 },
1912 }
1913 },
1914 .complete => {},
1915 else => unreachable,
1916 }
1917 }
1918
1919 fn ctypeFromType(dg: *DeclGen, ty: Type, kind: CType.Kind) !CType {
1920 defer std.debug.assert(dg.scratch.items.len == 0);
1921 return dg.ctype_pool.fromType(dg.gpa, &dg.scratch, ty, dg.pt, dg.mod, kind);
1922 }
1923
1924 fn byteSize(dg: *DeclGen, ctype: CType) u64 {
1925 return ctype.byteSize(&dg.ctype_pool, dg.mod);
1926 }
1927
1928 /// Renders a type as a single identifier, generating intermediate typedefs
1929 /// if necessary.
1930 ///
1931 /// This is guaranteed to be valid in both typedefs and declarations/definitions.
1932 ///
1933 /// There are three type formats in total that we support rendering:
1934 /// | Function | Example 1 (*u8) | Example 2 ([10]*u8) |
1935 /// |---------------------|-----------------|---------------------|
1936 /// | `renderTypeAndName` | "uint8_t *name" | "uint8_t *name[10]" |
1937 /// | `renderType` | "uint8_t *" | "uint8_t *[10]" |
1938 ///
1939 fn renderType(dg: *DeclGen, w: *Writer, t: Type) Error!void {
1940 try dg.renderCType(w, try dg.ctypeFromType(t, .complete));
1941 }
1942
1943 fn renderCType(dg: *DeclGen, w: *Writer, ctype: CType) Error!void {
1944 _ = try renderTypePrefix(dg.pass, &dg.ctype_pool, dg.pt.zcu, w, ctype, .suffix, .{});
1945 try renderTypeSuffix(dg.pass, &dg.ctype_pool, dg.pt.zcu, w, ctype, .suffix, .{});
1946 }
1947
1948 const IntCastContext = union(enum) {
1949 c_value: struct {
1950 f: *Function,
1951 value: CValue,
1952 v: Vectorize,
1953 },
1954 value: struct {
1955 value: Value,
1956 },
1957
1958 pub fn writeValue(self: *const IntCastContext, dg: *DeclGen, w: *Writer, location: ValueRenderLocation) !void {
1959 switch (self.*) {
1960 .c_value => |v| {
1961 try v.f.writeCValue(w, v.value, location);
1962 try v.v.elem(v.f, w);
1963 },
1964 .value => |v| try dg.renderValue(w, v.value, location),
1965 }
1966 }
1967 };
1968 fn intCastIsNoop(dg: *DeclGen, dest_ty: Type, src_ty: Type) bool {
1969 const pt = dg.pt;
1970 const zcu = pt.zcu;
1971 const dest_bits = dest_ty.bitSize(zcu);
1972 const dest_int_info = dest_ty.intInfo(pt.zcu);
1973
1974 const src_is_ptr = src_ty.isPtrAtRuntime(pt.zcu);
1975 const src_eff_ty: Type = if (src_is_ptr) switch (dest_int_info.signedness) {
1976 .unsigned => .usize,
1977 .signed => .isize,
1978 } else src_ty;
1979
1980 const src_bits = src_eff_ty.bitSize(zcu);
1981 const src_int_info = if (src_eff_ty.isAbiInt(pt.zcu)) src_eff_ty.intInfo(pt.zcu) else null;
1982 if (dest_bits <= 64 and src_bits <= 64) {
1983 const needs_cast = src_int_info == null or
1984 (toCIntBits(dest_int_info.bits) != toCIntBits(src_int_info.?.bits) or
1985 dest_int_info.signedness != src_int_info.?.signedness);
1986 return !needs_cast and !src_is_ptr;
1987 } else return false;
1988 }
1989 /// Renders a cast to an int type, from either an int or a pointer.
1990 ///
1991 /// Some platforms don't have 128 bit integers, so we need to use
1992 /// the zig_make_ and zig_lo_ macros in those cases.
1993 ///
1994 /// | Dest type bits | Src type | Result
1995 /// |------------------|------------------|---------------------------|
1996 /// | < 64 bit integer | pointer | (zig_<dest_ty>)(zig_<u|i>size)src
1997 /// | < 64 bit integer | < 64 bit integer | (zig_<dest_ty>)src
1998 /// | < 64 bit integer | > 64 bit integer | zig_lo(src)
1999 /// | > 64 bit integer | pointer | zig_make_<dest_ty>(0, (zig_<u|i>size)src)
2000 /// | > 64 bit integer | < 64 bit integer | zig_make_<dest_ty>(0, src)
2001 /// | > 64 bit integer | > 64 bit integer | zig_make_<dest_ty>(zig_hi_<src_ty>(src), zig_lo_<src_ty>(src))
2002 fn renderIntCast(
2003 dg: *DeclGen,
2004 w: *Writer,
2005 dest_ty: Type,
2006 context: IntCastContext,
2007 src_ty: Type,
2008 location: ValueRenderLocation,
2009 ) !void {
2010 const pt = dg.pt;
2011 const zcu = pt.zcu;
2012 const dest_bits = dest_ty.bitSize(zcu);
2013 const dest_int_info = dest_ty.intInfo(zcu);
2014
2015 const src_is_ptr = src_ty.isPtrAtRuntime(zcu);
2016 const src_eff_ty: Type = if (src_is_ptr) switch (dest_int_info.signedness) {
2017 .unsigned => .usize,
2018 .signed => .isize,
2019 } else src_ty;
2020
2021 const src_bits = src_eff_ty.bitSize(zcu);
2022 const src_int_info = if (src_eff_ty.isAbiInt(zcu)) src_eff_ty.intInfo(zcu) else null;
2023 if (dest_bits <= 64 and src_bits <= 64) {
2024 const needs_cast = src_int_info == null or
2025 (toCIntBits(dest_int_info.bits) != toCIntBits(src_int_info.?.bits) or
2026 dest_int_info.signedness != src_int_info.?.signedness);
2027
2028 if (needs_cast) {
2029 try w.writeByte('(');
2030 try dg.renderType(w, dest_ty);
2031 try w.writeByte(')');
2032 }
2033 if (src_is_ptr) {
2034 try w.writeByte('(');
2035 try dg.renderType(w, src_eff_ty);
2036 try w.writeByte(')');
2037 }
2038 try context.writeValue(dg, w, location);
2039 } else if (dest_bits <= 64 and src_bits > 64) {
2040 assert(!src_is_ptr);
2041 if (dest_bits < 64) {
2042 try w.writeByte('(');
2043 try dg.renderType(w, dest_ty);
2044 try w.writeByte(')');
2045 }
2046 try w.writeAll("zig_lo_");
2047 try dg.renderTypeForBuiltinFnName(w, src_eff_ty);
2048 try w.writeByte('(');
2049 try context.writeValue(dg, w, .FunctionArgument);
2050 try w.writeByte(')');
2051 } else if (dest_bits > 64 and src_bits <= 64) {
2052 try w.writeAll("zig_make_");
2053 try dg.renderTypeForBuiltinFnName(w, dest_ty);
2054 try w.writeAll("(0, ");
2055 if (src_is_ptr) {
2056 try w.writeByte('(');
2057 try dg.renderType(w, src_eff_ty);
2058 try w.writeByte(')');
2059 }
2060 try context.writeValue(dg, w, .FunctionArgument);
2061 try w.writeByte(')');
2062 } else {
2063 assert(!src_is_ptr);
2064 try w.writeAll("zig_make_");
2065 try dg.renderTypeForBuiltinFnName(w, dest_ty);
2066 try w.writeAll("(zig_hi_");
2067 try dg.renderTypeForBuiltinFnName(w, src_eff_ty);
2068 try w.writeByte('(');
2069 try context.writeValue(dg, w, .FunctionArgument);
2070 try w.writeAll("), zig_lo_");
2071 try dg.renderTypeForBuiltinFnName(w, src_eff_ty);
2072 try w.writeByte('(');
2073 try context.writeValue(dg, w, .FunctionArgument);
2074 try w.writeAll("))");
2075 }
2076 }
2077
2078 /// Renders a type and name in field declaration/definition format.
2079 ///
2080 /// There are three type formats in total that we support rendering:
2081 /// | Function | Example 1 (*u8) | Example 2 ([10]*u8) |
2082 /// |---------------------|-----------------|---------------------|
2083 /// | `renderTypeAndName` | "uint8_t *name" | "uint8_t *name[10]" |
2084 /// | `renderType` | "uint8_t *" | "uint8_t *[10]" |
2085 ///
2086 fn renderTypeAndName(
2087 dg: *DeclGen,
2088 w: *Writer,
2089 ty: Type,
2090 name: CValue,
2091 qualifiers: CQualifiers,
2092 alignment: Alignment,
2093 kind: CType.Kind,
2094 ) !void {
2095 try dg.renderCTypeAndName(
2096 w,
2097 try dg.ctypeFromType(ty, kind),
2098 name,
2099 qualifiers,
2100 CType.AlignAs.fromAlignment(.{
2101 .@"align" = alignment,
2102 .abi = ty.abiAlignment(dg.pt.zcu),
2103 }),
2104 );
2105 }
2106
2107 fn renderCTypeAndName(
2108 dg: *DeclGen,
2109 w: *Writer,
2110 ctype: CType,
2111 name: CValue,
2112 qualifiers: CQualifiers,
2113 alignas: CType.AlignAs,
2114 ) !void {
2115 const zcu = dg.pt.zcu;
2116 switch (alignas.abiOrder()) {
2117 .lt => try w.print("zig_under_align({}) ", .{alignas.toByteUnits()}),
2118 .eq => {},
2119 .gt => try w.print("zig_align({}) ", .{alignas.toByteUnits()}),
2120 }
2121
2122 try w.print("{f}", .{
2123 try renderTypePrefix(dg.pass, &dg.ctype_pool, zcu, w, ctype, .suffix, qualifiers),
2124 });
2125 try dg.writeName(w, name);
2126 try renderTypeSuffix(dg.pass, &dg.ctype_pool, zcu, w, ctype, .suffix, .{});
2127 if (ctype.isNonString(&dg.ctype_pool)) try w.writeAll(" zig_nonstring");
2128 }
2129
2130 fn writeName(dg: *DeclGen, w: *Writer, c_value: CValue) !void {
2131 switch (c_value) {
2132 .new_local, .local => |i| try w.print("t{d}", .{i}),
2133 .constant => |uav| try renderUavName(w, uav),
2134 .nav => |nav| try dg.renderNavName(w, nav),
2135 .identifier => |ident| try w.print("{f}", .{fmtIdentSolo(ident)}),
2136 else => unreachable,
2137 }
2138 }
2139
2140 fn writeCValue(dg: *DeclGen, w: *Writer, c_value: CValue) Error!void {
2141 switch (c_value) {
2142 .none, .new_local, .local, .local_ref => unreachable,
2143 .constant => |uav| try renderUavName(w, uav),
2144 .arg, .arg_array => unreachable,
2145 .field => |i| try w.print("f{d}", .{i}),
2146 .nav => |nav| try dg.renderNavName(w, nav),
2147 .nav_ref => |nav| {
2148 try w.writeByte('&');
2149 try dg.renderNavName(w, nav);
2150 },
2151 .undef => |ty| try dg.renderUndefValue(w, ty, .Other),
2152 .identifier => |ident| try w.print("{f}", .{fmtIdentSolo(ident)}),
2153 .payload_identifier => |ident| try w.print("{f}.{f}", .{
2154 fmtIdentSolo("payload"),
2155 fmtIdentSolo(ident),
2156 }),
2157 .ctype_pool_string => |string| try w.print("{f}", .{
2158 fmtCTypePoolString(string, &dg.ctype_pool, true),
2159 }),
2160 }
2161 }
2162
2163 fn writeCValueDeref(dg: *DeclGen, w: *Writer, c_value: CValue) !void {
2164 switch (c_value) {
2165 .none,
2166 .new_local,
2167 .local,
2168 .local_ref,
2169 .constant,
2170 .arg,
2171 .arg_array,
2172 .ctype_pool_string,
2173 => unreachable,
2174 .field => |i| try w.print("f{d}", .{i}),
2175 .nav => |nav| {
2176 try w.writeAll("(*");
2177 try dg.renderNavName(w, nav);
2178 try w.writeByte(')');
2179 },
2180 .nav_ref => |nav| try dg.renderNavName(w, nav),
2181 .undef => unreachable,
2182 .identifier => |ident| try w.print("(*{f})", .{fmtIdentSolo(ident)}),
2183 .payload_identifier => |ident| try w.print("(*{f}.{f})", .{
2184 fmtIdentSolo("payload"),
2185 fmtIdentSolo(ident),
2186 }),
2187 }
2188 }
2189
2190 fn writeCValueMember(
2191 dg: *DeclGen,
2192 w: *Writer,
2193 c_value: CValue,
2194 member: CValue,
2195 ) Error!void {
2196 try dg.writeCValue(w, c_value);
2197 try w.writeByte('.');
2198 try dg.writeCValue(w, member);
2199 }
2200
2201 fn writeCValueDerefMember(
2202 dg: *DeclGen,
2203 w: *Writer,
2204 c_value: CValue,
2205 member: CValue,
2206 ) !void {
2207 switch (c_value) {
2208 .none,
2209 .new_local,
2210 .local,
2211 .local_ref,
2212 .constant,
2213 .field,
2214 .undef,
2215 .arg,
2216 .arg_array,
2217 .ctype_pool_string,
2218 => unreachable,
2219 .nav, .identifier, .payload_identifier => {
2220 try dg.writeCValue(w, c_value);
2221 try w.writeAll("->");
2222 },
2223 .nav_ref => {
2224 try dg.writeCValueDeref(w, c_value);
2225 try w.writeByte('.');
2226 },
2227 }
2228 try dg.writeCValue(w, member);
2229 }
2230
2231 fn renderFwdDecl(
2232 dg: *DeclGen,
2233 nav_index: InternPool.Nav.Index,
2234 flags: packed struct {
2235 is_const: bool,
2236 is_threadlocal: bool,
2237 linkage: std.builtin.GlobalLinkage,
2238 visibility: std.builtin.SymbolVisibility,
2239 },
2240 ) !void {
2241 const zcu = dg.pt.zcu;
2242 const ip = &zcu.intern_pool;
2243 const nav = ip.getNav(nav_index);
2244 const fwd = &dg.fwd_decl.writer;
2245 try fwd.writeAll(switch (flags.linkage) {
2246 .internal => "static ",
2247 .strong, .weak, .link_once => "zig_extern ",
2248 });
2249 switch (flags.linkage) {
2250 .internal, .strong => {},
2251 .weak => try fwd.writeAll("zig_weak_linkage "),
2252 .link_once => return dg.fail("TODO: CBE: implement linkonce linkage?", .{}),
2253 }
2254 switch (flags.linkage) {
2255 .internal => {},
2256 .strong, .weak, .link_once => try fwd.print("zig_visibility({s}) ", .{@tagName(flags.visibility)}),
2257 }
2258 if (flags.is_threadlocal and !dg.mod.single_threaded) try fwd.writeAll("zig_threadlocal ");
2259 try dg.renderTypeAndName(
2260 fwd,
2261 .fromInterned(nav.typeOf(ip)),
2262 .{ .nav = nav_index },
2263 CQualifiers.init(.{ .@"const" = flags.is_const }),
2264 nav.getAlignment(),
2265 .complete,
2266 );
2267 try fwd.writeAll(";\n");
2268 }
2269
2270 fn renderNavName(dg: *DeclGen, w: *Writer, nav_index: InternPool.Nav.Index) !void {
2271 const zcu = dg.pt.zcu;
2272 const ip = &zcu.intern_pool;
2273 const nav = ip.getNav(nav_index);
2274 if (nav.getExtern(ip)) |@"extern"| {
2275 try w.print("{f}", .{
2276 fmtIdentSolo(ip.getNav(@"extern".owner_nav).name.toSlice(ip)),
2277 });
2278 } else {
2279 // MSVC has a limit of 4095 character token length limit, and fmtIdent can (worst case),
2280 // expand to 3x the length of its input, but let's cut it off at a much shorter limit.
2281 const fqn_slice = ip.getNav(nav_index).fqn.toSlice(ip);
2282 try w.print("{f}__{d}", .{
2283 fmtIdentUnsolo(fqn_slice[0..@min(fqn_slice.len, 100)]),
2284 @intFromEnum(nav_index),
2285 });
2286 }
2287 }
2288
2289 fn renderUavName(w: *Writer, uav: Value) !void {
2290 try w.print("__anon_{d}", .{@intFromEnum(uav.toIntern())});
2291 }
2292
2293 fn renderTypeForBuiltinFnName(dg: *DeclGen, w: *Writer, ty: Type) !void {
2294 try dg.renderCTypeForBuiltinFnName(w, try dg.ctypeFromType(ty, .complete));
2295 }
2296
2297 fn renderCTypeForBuiltinFnName(dg: *DeclGen, w: *Writer, ctype: CType) !void {
2298 switch (ctype.info(&dg.ctype_pool)) {
2299 else => |ctype_info| try w.print("{c}{d}", .{
2300 if (ctype.isBool())
2301 signAbbrev(.unsigned)
2302 else if (ctype.isInteger())
2303 signAbbrev(ctype.signedness(dg.mod))
2304 else if (ctype.isFloat())
2305 @as(u8, 'f')
2306 else if (ctype_info == .pointer)
2307 @as(u8, 'p')
2308 else
2309 return dg.fail("TODO: CBE: implement renderTypeForBuiltinFnName for {s} type", .{@tagName(ctype_info)}),
2310 if (ctype.isFloat()) ctype.floatActiveBits(dg.mod) else dg.byteSize(ctype) * 8,
2311 }),
2312 .array => try w.writeAll("big"),
2313 }
2314 }
2315
2316 fn renderBuiltinInfo(dg: *DeclGen, w: *Writer, ty: Type, info: BuiltinInfo) !void {
2317 const ctype = try dg.ctypeFromType(ty, .complete);
2318 const is_big = ctype.info(&dg.ctype_pool) == .array;
2319 switch (info) {
2320 .none => if (!is_big) return,
2321 .bits => {},
2322 }
2323
2324 const pt = dg.pt;
2325 const zcu = pt.zcu;
2326 const int_info: std.builtin.Type.Int = if (ty.isAbiInt(zcu)) ty.intInfo(zcu) else .{
2327 .signedness = .unsigned,
2328 .bits = @intCast(ty.bitSize(zcu)),
2329 };
2330
2331 if (is_big) try w.print(", {}", .{int_info.signedness == .signed});
2332 try w.print(", {f}", .{try dg.fmtIntLiteralDec(
2333 try pt.intValue(if (is_big) .u16 else .u8, int_info.bits),
2334 .FunctionArgument,
2335 )});
2336 }
2337
2338 fn fmtIntLiteral(
2339 dg: *DeclGen,
2340 val: Value,
2341 loc: ValueRenderLocation,
2342 base: u8,
2343 case: std.fmt.Case,
2344 ) !std.fmt.Alt(FormatIntLiteralContext, formatIntLiteral) {
2345 const zcu = dg.pt.zcu;
2346 const kind = loc.toCTypeKind();
2347 const ty = val.typeOf(zcu);
2348 return .{ .data = .{
2349 .dg = dg,
2350 .int_info = ty.intInfo(zcu),
2351 .kind = kind,
2352 .ctype = try dg.ctypeFromType(ty, kind),
2353 .val = val,
2354 .base = base,
2355 .case = case,
2356 } };
2357 }
2358
2359 fn fmtIntLiteralDec(
2360 dg: *DeclGen,
2361 val: Value,
2362 loc: ValueRenderLocation,
2363 ) !std.fmt.Alt(FormatIntLiteralContext, formatIntLiteral) {
2364 return fmtIntLiteral(dg, val, loc, 10, .lower);
2365 }
2366
2367 fn fmtIntLiteralHex(
2368 dg: *DeclGen,
2369 val: Value,
2370 loc: ValueRenderLocation,
2371 ) !std.fmt.Alt(FormatIntLiteralContext, formatIntLiteral) {
2372 return fmtIntLiteral(dg, val, loc, 16, .lower);
2373 }
2374};
2375
2376const CTypeFix = enum { prefix, suffix };
2377const CQualifiers = std.enums.EnumSet(enum { @"const", @"volatile", restrict });
2378const Const = CQualifiers.init(.{ .@"const" = true });
2379const RenderCTypeTrailing = enum {
2380 no_space,
2381 maybe_space,
2382
2383 pub fn format(self: @This(), w: *Writer) Writer.Error!void {
2384 switch (self) {
2385 .no_space => {},
2386 .maybe_space => try w.writeByte(' '),
2387 }
2388 }
2389};
2390fn renderAlignedTypeName(w: *Writer, ctype: CType) !void {
2391 try w.print("anon__aligned_{d}", .{@intFromEnum(ctype.index)});
2392}
2393fn renderFwdDeclTypeName(
2394 zcu: *Zcu,
2395 w: *Writer,
2396 ctype: CType,
2397 fwd_decl: CType.Info.FwdDecl,
2398 attributes: []const u8,
2399) !void {
2400 const ip = &zcu.intern_pool;
2401 try w.print("{s} {s}", .{ @tagName(fwd_decl.tag), attributes });
2402 switch (fwd_decl.name) {
2403 .anon => try w.print("anon__lazy_{d}", .{@intFromEnum(ctype.index)}),
2404 .index => |index| try w.print("{f}__{d}", .{
2405 fmtIdentUnsolo(Type.fromInterned(index).containerTypeName(ip).toSlice(&zcu.intern_pool)),
2406 @intFromEnum(index),
2407 }),
2408 }
2409}
2410fn renderTypePrefix(
2411 pass: DeclGen.Pass,
2412 ctype_pool: *const CType.Pool,
2413 zcu: *Zcu,
2414 w: *Writer,
2415 ctype: CType,
2416 parent_fix: CTypeFix,
2417 qualifiers: CQualifiers,
2418) Writer.Error!RenderCTypeTrailing {
2419 var trailing = RenderCTypeTrailing.maybe_space;
2420 switch (ctype.info(ctype_pool)) {
2421 .basic => |basic_info| try w.writeAll(@tagName(basic_info)),
2422
2423 .pointer => |pointer_info| {
2424 try w.print("{f}*", .{try renderTypePrefix(
2425 pass,
2426 ctype_pool,
2427 zcu,
2428 w,
2429 pointer_info.elem_ctype,
2430 .prefix,
2431 CQualifiers.init(.{
2432 .@"const" = pointer_info.@"const",
2433 .@"volatile" = pointer_info.@"volatile",
2434 }),
2435 )});
2436 trailing = .no_space;
2437 },
2438
2439 .aligned => switch (pass) {
2440 .nav => |nav| try w.print("nav__{d}_{d}", .{
2441 @intFromEnum(nav), @intFromEnum(ctype.index),
2442 }),
2443 .uav => |uav| try w.print("uav__{d}_{d}", .{
2444 @intFromEnum(uav), @intFromEnum(ctype.index),
2445 }),
2446 .flush => try renderAlignedTypeName(w, ctype),
2447 },
2448
2449 .array, .vector => |sequence_info| {
2450 const child_trailing = try renderTypePrefix(
2451 pass,
2452 ctype_pool,
2453 zcu,
2454 w,
2455 sequence_info.elem_ctype,
2456 .suffix,
2457 qualifiers,
2458 );
2459 switch (parent_fix) {
2460 .prefix => {
2461 try w.print("{f}(", .{child_trailing});
2462 return .no_space;
2463 },
2464 .suffix => return child_trailing,
2465 }
2466 },
2467
2468 .fwd_decl => |fwd_decl_info| switch (fwd_decl_info.name) {
2469 .anon => switch (pass) {
2470 .nav => |nav| try w.print("nav__{d}_{d}", .{
2471 @intFromEnum(nav), @intFromEnum(ctype.index),
2472 }),
2473 .uav => |uav| try w.print("uav__{d}_{d}", .{
2474 @intFromEnum(uav), @intFromEnum(ctype.index),
2475 }),
2476 .flush => try renderFwdDeclTypeName(zcu, w, ctype, fwd_decl_info, ""),
2477 },
2478 .index => try renderFwdDeclTypeName(zcu, w, ctype, fwd_decl_info, ""),
2479 },
2480
2481 .aggregate => |aggregate_info| switch (aggregate_info.name) {
2482 .anon => {
2483 try w.print("{s} {s}", .{
2484 @tagName(aggregate_info.tag),
2485 if (aggregate_info.@"packed") "zig_packed(" else "",
2486 });
2487 try renderFields(zcu, w, ctype_pool, aggregate_info, 1);
2488 if (aggregate_info.@"packed") try w.writeByte(')');
2489 },
2490 .fwd_decl => |fwd_decl| return renderTypePrefix(
2491 pass,
2492 ctype_pool,
2493 zcu,
2494 w,
2495 fwd_decl,
2496 parent_fix,
2497 qualifiers,
2498 ),
2499 },
2500
2501 .function => |function_info| {
2502 const child_trailing = try renderTypePrefix(
2503 pass,
2504 ctype_pool,
2505 zcu,
2506 w,
2507 function_info.return_ctype,
2508 .suffix,
2509 .{},
2510 );
2511 switch (parent_fix) {
2512 .prefix => {
2513 try w.print("{f}(", .{child_trailing});
2514 return .no_space;
2515 },
2516 .suffix => return child_trailing,
2517 }
2518 },
2519 }
2520 var qualifier_it = qualifiers.iterator();
2521 while (qualifier_it.next()) |qualifier| {
2522 try w.print("{f}{s}", .{ trailing, @tagName(qualifier) });
2523 trailing = .maybe_space;
2524 }
2525 return trailing;
2526}
2527fn renderTypeSuffix(
2528 pass: DeclGen.Pass,
2529 ctype_pool: *const CType.Pool,
2530 zcu: *Zcu,
2531 w: *Writer,
2532 ctype: CType,
2533 parent_fix: CTypeFix,
2534 qualifiers: CQualifiers,
2535) Writer.Error!void {
2536 switch (ctype.info(ctype_pool)) {
2537 .basic, .aligned, .fwd_decl, .aggregate => {},
2538 .pointer => |pointer_info| try renderTypeSuffix(
2539 pass,
2540 ctype_pool,
2541 zcu,
2542 w,
2543 pointer_info.elem_ctype,
2544 .prefix,
2545 .{},
2546 ),
2547 .array, .vector => |sequence_info| {
2548 switch (parent_fix) {
2549 .prefix => try w.writeByte(')'),
2550 .suffix => {},
2551 }
2552
2553 try w.print("[{}]", .{sequence_info.len});
2554 try renderTypeSuffix(pass, ctype_pool, zcu, w, sequence_info.elem_ctype, .suffix, .{});
2555 },
2556 .function => |function_info| {
2557 switch (parent_fix) {
2558 .prefix => try w.writeByte(')'),
2559 .suffix => {},
2560 }
2561
2562 try w.writeByte('(');
2563 var need_comma = false;
2564 for (0..function_info.param_ctypes.len) |param_index| {
2565 const param_type = function_info.param_ctypes.at(param_index, ctype_pool);
2566 if (need_comma) try w.writeAll(", ");
2567 need_comma = true;
2568 const trailing =
2569 try renderTypePrefix(pass, ctype_pool, zcu, w, param_type, .suffix, qualifiers);
2570 if (qualifiers.contains(.@"const")) try w.print("{f}a{d}", .{ trailing, param_index });
2571 try renderTypeSuffix(pass, ctype_pool, zcu, w, param_type, .suffix, .{});
2572 }
2573 if (function_info.varargs) {
2574 if (need_comma) try w.writeAll(", ");
2575 need_comma = true;
2576 try w.writeAll("...");
2577 }
2578 if (!need_comma) try w.writeAll("void");
2579 try w.writeByte(')');
2580
2581 try renderTypeSuffix(pass, ctype_pool, zcu, w, function_info.return_ctype, .suffix, .{});
2582 },
2583 }
2584}
2585fn renderFields(
2586 zcu: *Zcu,
2587 w: *Writer,
2588 ctype_pool: *const CType.Pool,
2589 aggregate_info: CType.Info.Aggregate,
2590 indent: usize,
2591) !void {
2592 try w.writeAll("{\n");
2593 for (0..aggregate_info.fields.len) |field_index| {
2594 const field_info = aggregate_info.fields.at(field_index, ctype_pool);
2595 try w.splatByteAll(' ', indent + 1);
2596 switch (field_info.alignas.abiOrder()) {
2597 .lt => {
2598 std.debug.assert(aggregate_info.@"packed");
2599 if (field_info.alignas.@"align" != .@"1") try w.print("zig_under_align({}) ", .{
2600 field_info.alignas.toByteUnits(),
2601 });
2602 },
2603 .eq => if (aggregate_info.@"packed" and field_info.alignas.@"align" != .@"1")
2604 try w.print("zig_align({}) ", .{field_info.alignas.toByteUnits()}),
2605 .gt => {
2606 std.debug.assert(field_info.alignas.@"align" != .@"1");
2607 try w.print("zig_align({}) ", .{field_info.alignas.toByteUnits()});
2608 },
2609 }
2610 const trailing = try renderTypePrefix(
2611 .flush,
2612 ctype_pool,
2613 zcu,
2614 w,
2615 field_info.ctype,
2616 .suffix,
2617 .{},
2618 );
2619 try w.print("{f}{f}", .{ trailing, fmtCTypePoolString(field_info.name, ctype_pool, true) });
2620 try renderTypeSuffix(.flush, ctype_pool, zcu, w, field_info.ctype, .suffix, .{});
2621 if (field_info.ctype.isNonString(ctype_pool)) try w.writeAll(" zig_nonstring");
2622 try w.writeAll(";\n");
2623 }
2624 try w.splatByteAll(' ', indent);
2625 try w.writeByte('}');
2626}
2627
2628pub fn genTypeDecl(
2629 zcu: *Zcu,
2630 w: *Writer,
2631 global_ctype_pool: *const CType.Pool,
2632 global_ctype: CType,
2633 pass: DeclGen.Pass,
2634 decl_ctype_pool: *const CType.Pool,
2635 decl_ctype: CType,
2636 found_existing: bool,
2637) !void {
2638 switch (global_ctype.info(global_ctype_pool)) {
2639 .basic, .pointer, .array, .vector, .function => {},
2640 .aligned => |aligned_info| {
2641 if (!found_existing) {
2642 std.debug.assert(aligned_info.alignas.abiOrder().compare(.lt));
2643 try w.print("typedef zig_under_align({d}) ", .{aligned_info.alignas.toByteUnits()});
2644 try w.print("{f}", .{try renderTypePrefix(
2645 .flush,
2646 global_ctype_pool,
2647 zcu,
2648 w,
2649 aligned_info.ctype,
2650 .suffix,
2651 .{},
2652 )});
2653 try renderAlignedTypeName(w, global_ctype);
2654 try renderTypeSuffix(.flush, global_ctype_pool, zcu, w, aligned_info.ctype, .suffix, .{});
2655 try w.writeAll(";\n");
2656 }
2657 switch (pass) {
2658 .nav, .uav => {
2659 try w.writeAll("typedef ");
2660 _ = try renderTypePrefix(.flush, global_ctype_pool, zcu, w, global_ctype, .suffix, .{});
2661 try w.writeByte(' ');
2662 _ = try renderTypePrefix(pass, decl_ctype_pool, zcu, w, decl_ctype, .suffix, .{});
2663 try w.writeAll(";\n");
2664 },
2665 .flush => {},
2666 }
2667 },
2668 .fwd_decl => |fwd_decl_info| switch (fwd_decl_info.name) {
2669 .anon => switch (pass) {
2670 .nav, .uav => {
2671 try w.writeAll("typedef ");
2672 _ = try renderTypePrefix(.flush, global_ctype_pool, zcu, w, global_ctype, .suffix, .{});
2673 try w.writeByte(' ');
2674 _ = try renderTypePrefix(pass, decl_ctype_pool, zcu, w, decl_ctype, .suffix, .{});
2675 try w.writeAll(";\n");
2676 },
2677 .flush => {},
2678 },
2679 .index => |index| if (!found_existing) {
2680 const ip = &zcu.intern_pool;
2681 const ty: Type = .fromInterned(index);
2682 _ = try renderTypePrefix(.flush, global_ctype_pool, zcu, w, global_ctype, .suffix, .{});
2683 try w.writeByte(';');
2684 const file_scope = ty.typeDeclInstAllowGeneratedTag(zcu).?.resolveFile(ip);
2685 if (!zcu.fileByIndex(file_scope).mod.?.strip) try w.print(" /* {f} */", .{
2686 ty.containerTypeName(ip).fmt(ip),
2687 });
2688 try w.writeByte('\n');
2689 },
2690 },
2691 .aggregate => |aggregate_info| switch (aggregate_info.name) {
2692 .anon => {},
2693 .fwd_decl => |fwd_decl| if (!found_existing) {
2694 try renderFwdDeclTypeName(
2695 zcu,
2696 w,
2697 fwd_decl,
2698 fwd_decl.info(global_ctype_pool).fwd_decl,
2699 if (aggregate_info.@"packed") "zig_packed(" else "",
2700 );
2701 try w.writeByte(' ');
2702 try renderFields(zcu, w, global_ctype_pool, aggregate_info, 0);
2703 if (aggregate_info.@"packed") try w.writeByte(')');
2704 try w.writeAll(";\n");
2705 },
2706 },
2707 }
2708}
2709
2710pub fn genGlobalAsm(zcu: *Zcu, w: *Writer) !void {
2711 for (zcu.global_assembly.values()) |asm_source| {
2712 try w.print("__asm({f});\n", .{fmtStringLiteral(asm_source, null)});
2713 }
2714}
2715
2716pub fn genErrDecls(o: *Object) Error!void {
2717 const pt = o.dg.pt;
2718 const zcu = pt.zcu;
2719 const ip = &zcu.intern_pool;
2720 const w = &o.code.writer;
2721
2722 var max_name_len: usize = 0;
2723 // do not generate an invalid empty enum when the global error set is empty
2724 const names = ip.global_error_set.getNamesFromMainThread();
2725 if (names.len > 0) {
2726 try w.writeAll("enum {");
2727 o.indent();
2728 try o.newline();
2729 for (names, 1..) |name_nts, value| {
2730 const name = name_nts.toSlice(ip);
2731 max_name_len = @max(name.len, max_name_len);
2732 const err_val = try pt.intern(.{ .err = .{
2733 .ty = .anyerror_type,
2734 .name = name_nts,
2735 } });
2736 try o.dg.renderValue(w, Value.fromInterned(err_val), .Other);
2737 try w.print(" = {d}u,", .{value});
2738 try o.newline();
2739 }
2740 try o.outdent();
2741 try w.writeAll("};");
2742 try o.newline();
2743 }
2744 const array_identifier = "zig_errorName";
2745 const name_prefix = array_identifier ++ "_";
2746 const name_buf = try o.dg.gpa.alloc(u8, name_prefix.len + max_name_len);
2747 defer o.dg.gpa.free(name_buf);
2748
2749 @memcpy(name_buf[0..name_prefix.len], name_prefix);
2750 for (names) |name| {
2751 const name_slice = name.toSlice(ip);
2752 @memcpy(name_buf[name_prefix.len..][0..name_slice.len], name_slice);
2753 const identifier = name_buf[0 .. name_prefix.len + name_slice.len];
2754
2755 const name_ty = try pt.arrayType(.{
2756 .len = name_slice.len,
2757 .child = .u8_type,
2758 .sentinel = .zero_u8,
2759 });
2760 const name_val = try pt.intern(.{ .aggregate = .{
2761 .ty = name_ty.toIntern(),
2762 .storage = .{ .bytes = name.toString() },
2763 } });
2764
2765 try w.writeAll("static ");
2766 try o.dg.renderTypeAndName(
2767 w,
2768 name_ty,
2769 .{ .identifier = identifier },
2770 Const,
2771 .none,
2772 .complete,
2773 );
2774 try w.writeAll(" = ");
2775 try o.dg.renderValue(w, Value.fromInterned(name_val), .StaticInitializer);
2776 try w.writeByte(';');
2777 try o.newline();
2778 }
2779
2780 const name_array_ty = try pt.arrayType(.{
2781 .len = 1 + names.len,
2782 .child = .slice_const_u8_sentinel_0_type,
2783 });
2784
2785 try w.writeAll("static ");
2786 try o.dg.renderTypeAndName(
2787 w,
2788 name_array_ty,
2789 .{ .identifier = array_identifier },
2790 Const,
2791 .none,
2792 .complete,
2793 );
2794 try w.writeAll(" = {");
2795 for (names, 1..) |name_nts, val| {
2796 const name = name_nts.toSlice(ip);
2797 if (val > 1) try w.writeAll(", ");
2798 try w.print("{{" ++ name_prefix ++ "{f}, {f}}}", .{
2799 fmtIdentUnsolo(name),
2800 try o.dg.fmtIntLiteralDec(try pt.intValue(.usize, name.len), .StaticInitializer),
2801 });
2802 }
2803 try w.writeAll("};");
2804 try o.newline();
2805}
2806
2807pub fn genLazyFn(o: *Object, lazy_ctype_pool: *const CType.Pool, lazy_fn: LazyFnMap.Entry) Error!void {
2808 const pt = o.dg.pt;
2809 const zcu = pt.zcu;
2810 const ip = &zcu.intern_pool;
2811 const ctype_pool = &o.dg.ctype_pool;
2812 const w = &o.code.writer;
2813 const key = lazy_fn.key_ptr.*;
2814 const val = lazy_fn.value_ptr;
2815 switch (key) {
2816 .tag_name => |enum_ty_ip| {
2817 const enum_ty: Type = .fromInterned(enum_ty_ip);
2818 const name_slice_ty: Type = .slice_const_u8_sentinel_0;
2819
2820 try w.writeAll("static ");
2821 try o.dg.renderType(w, name_slice_ty);
2822 try w.print(" {f}(", .{val.fn_name.fmt(lazy_ctype_pool)});
2823 try o.dg.renderTypeAndName(w, enum_ty, .{ .identifier = "tag" }, Const, .none, .complete);
2824 try w.writeAll(") {");
2825 o.indent();
2826 try o.newline();
2827 try w.writeAll("switch (tag) {");
2828 o.indent();
2829 try o.newline();
2830 const tag_names = enum_ty.enumFields(zcu);
2831 for (0..tag_names.len) |tag_index| {
2832 const tag_name = tag_names.get(ip)[tag_index];
2833 const tag_name_len = tag_name.length(ip);
2834 const tag_val = try pt.enumValueFieldIndex(enum_ty, @intCast(tag_index));
2835
2836 const name_ty = try pt.arrayType(.{
2837 .len = tag_name_len,
2838 .child = .u8_type,
2839 .sentinel = .zero_u8,
2840 });
2841 const name_val = try pt.intern(.{ .aggregate = .{
2842 .ty = name_ty.toIntern(),
2843 .storage = .{ .bytes = tag_name.toString() },
2844 } });
2845
2846 try w.print("case {f}: {{", .{
2847 try o.dg.fmtIntLiteralDec(try tag_val.intFromEnum(enum_ty, pt), .Other),
2848 });
2849 o.indent();
2850 try o.newline();
2851 try w.writeAll("static ");
2852 try o.dg.renderTypeAndName(w, name_ty, .{ .identifier = "name" }, Const, .none, .complete);
2853 try w.writeAll(" = ");
2854 try o.dg.renderValue(w, Value.fromInterned(name_val), .StaticInitializer);
2855 try w.writeByte(';');
2856 try o.newline();
2857 try w.writeAll("return (");
2858 try o.dg.renderType(w, name_slice_ty);
2859 try w.print("){{{f}, {f}}};", .{
2860 fmtIdentUnsolo("name"),
2861 try o.dg.fmtIntLiteralDec(try pt.intValue(.usize, tag_name_len), .Other),
2862 });
2863 try o.newline();
2864 try o.outdent();
2865 try w.writeByte('}');
2866 try o.newline();
2867 }
2868 try o.outdent();
2869 try w.writeByte('}');
2870 try o.newline();
2871 try airUnreach(o);
2872 try o.outdent();
2873 try w.writeByte('}');
2874 try o.newline();
2875 },
2876 .never_tail, .never_inline => |fn_nav_index| {
2877 const fn_val = zcu.navValue(fn_nav_index);
2878 const fn_ctype = try o.dg.ctypeFromType(fn_val.typeOf(zcu), .complete);
2879 const fn_info = fn_ctype.info(ctype_pool).function;
2880 const fn_name = fmtCTypePoolString(val.fn_name, lazy_ctype_pool, true);
2881
2882 const fwd = &o.dg.fwd_decl.writer;
2883 try fwd.print("static zig_{s} ", .{@tagName(key)});
2884 try o.dg.renderFunctionSignature(fwd, fn_val, ip.getNav(fn_nav_index).getAlignment(), .forward, .{
2885 .fmt_ctype_pool_string = fn_name,
2886 });
2887 try fwd.writeAll(";\n");
2888
2889 try w.print("zig_{s} ", .{@tagName(key)});
2890 try o.dg.renderFunctionSignature(w, fn_val, .none, .complete, .{
2891 .fmt_ctype_pool_string = fn_name,
2892 });
2893 try w.writeAll(" {");
2894 o.indent();
2895 try o.newline();
2896 try w.writeAll("return ");
2897 try o.dg.renderNavName(w, fn_nav_index);
2898 try w.writeByte('(');
2899 for (0..fn_info.param_ctypes.len) |arg| {
2900 if (arg > 0) try w.writeAll(", ");
2901 try w.print("a{d}", .{arg});
2902 }
2903 try w.writeAll(");");
2904 try o.newline();
2905 try o.outdent();
2906 try w.writeByte('}');
2907 try o.newline();
2908 },
2909 }
2910}
2911
2912pub fn generate(
2913 lf: *link.File,
2914 pt: Zcu.PerThread,
2915 src_loc: Zcu.LazySrcLoc,
2916 func_index: InternPool.Index,
2917 air: *const Air,
2918 liveness: *const ?Air.Liveness,
2919) @import("../codegen.zig").CodeGenError!Mir {
2920 const zcu = pt.zcu;
2921 const gpa = zcu.gpa;
2922
2923 _ = src_loc;
2924 assert(lf.tag == .c);
2925
2926 const func = zcu.funcInfo(func_index);
2927
2928 var function: Function = .{
2929 .value_map = .init(gpa),
2930 .air = air.*,
2931 .liveness = liveness.*.?,
2932 .func_index = func_index,
2933 .object = .{
2934 .dg = .{
2935 .gpa = gpa,
2936 .pt = pt,
2937 .mod = zcu.navFileScope(func.owner_nav).mod.?,
2938 .error_msg = null,
2939 .pass = .{ .nav = func.owner_nav },
2940 .is_naked_fn = Type.fromInterned(func.ty).fnCallingConvention(zcu) == .naked,
2941 .expected_block = null,
2942 .fwd_decl = .init(gpa),
2943 .ctype_pool = .empty,
2944 .scratch = .empty,
2945 .uavs = .empty,
2946 },
2947 .code_header = .init(gpa),
2948 .code = .init(gpa),
2949 .indent_counter = 0,
2950 },
2951 .lazy_fns = .empty,
2952 };
2953 defer {
2954 function.object.code_header.deinit();
2955 function.object.code.deinit();
2956 function.object.dg.fwd_decl.deinit();
2957 function.object.dg.ctype_pool.deinit(gpa);
2958 function.object.dg.scratch.deinit(gpa);
2959 function.object.dg.uavs.deinit(gpa);
2960 function.deinit();
2961 }
2962 try function.object.dg.ctype_pool.init(gpa);
2963
2964 genFunc(&function) catch |err| switch (err) {
2965 error.AnalysisFail => return zcu.codegenFailMsg(func.owner_nav, function.object.dg.error_msg.?),
2966 error.OutOfMemory => return error.OutOfMemory,
2967 error.WriteFailed => return error.OutOfMemory,
2968 };
2969
2970 var mir: Mir = .{
2971 .uavs = .empty,
2972 .code = &.{},
2973 .code_header = &.{},
2974 .fwd_decl = &.{},
2975 .ctype_pool = .empty,
2976 .lazy_fns = .empty,
2977 };
2978 errdefer mir.deinit(gpa);
2979 mir.uavs = function.object.dg.uavs.move();
2980 mir.code_header = try function.object.code_header.toOwnedSlice();
2981 mir.code = try function.object.code.toOwnedSlice();
2982 mir.fwd_decl = try function.object.dg.fwd_decl.toOwnedSlice();
2983 mir.ctype_pool = function.object.dg.ctype_pool.move();
2984 mir.lazy_fns = function.lazy_fns.move();
2985 return mir;
2986}
2987
2988pub fn genFunc(f: *Function) Error!void {
2989 const tracy = trace(@src());
2990 defer tracy.end();
2991
2992 const o = &f.object;
2993 const zcu = o.dg.pt.zcu;
2994 const ip = &zcu.intern_pool;
2995 const gpa = o.dg.gpa;
2996 const nav_index = o.dg.pass.nav;
2997 const nav_val = zcu.navValue(nav_index);
2998 const nav = ip.getNav(nav_index);
2999
3000 const fwd = &o.dg.fwd_decl.writer;
3001 try fwd.writeAll("static ");
3002 try o.dg.renderFunctionSignature(
3003 fwd,
3004 nav_val,
3005 nav.status.fully_resolved.alignment,
3006 .forward,
3007 .{ .nav = nav_index },
3008 );
3009 try fwd.writeAll(";\n");
3010
3011 const ch = &o.code_header.writer;
3012 if (nav.status.fully_resolved.@"linksection".toSlice(ip)) |s|
3013 try ch.print("zig_linksection_fn({f}) ", .{fmtStringLiteral(s, null)});
3014 try o.dg.renderFunctionSignature(
3015 ch,
3016 nav_val,
3017 .none,
3018 .complete,
3019 .{ .nav = nav_index },
3020 );
3021 try ch.writeAll(" {\n ");
3022
3023 f.free_locals_map.clearRetainingCapacity();
3024
3025 const main_body = f.air.getMainBody();
3026 o.indent();
3027 try genBodyResolveState(f, undefined, &.{}, main_body, true);
3028 try o.outdent();
3029 try o.code.writer.writeByte('}');
3030 try o.newline();
3031 if (o.dg.expected_block) |_|
3032 return f.fail("runtime code not allowed in naked function", .{});
3033
3034 // Take advantage of the free_locals map to bucket locals per type. All
3035 // locals corresponding to AIR instructions should be in there due to
3036 // Liveness analysis, however, locals from alloc instructions will be
3037 // missing. These are added now to complete the map. Then we can sort by
3038 // alignment, descending.
3039 const free_locals = &f.free_locals_map;
3040 assert(f.value_map.count() == 0); // there must not be any unfreed locals
3041 for (f.allocs.keys(), f.allocs.values()) |local_index, should_emit| {
3042 if (!should_emit) continue;
3043 const local = f.locals.items[local_index];
3044 log.debug("inserting local {d} into free_locals", .{local_index});
3045 const gop = try free_locals.getOrPut(gpa, local.getType());
3046 if (!gop.found_existing) gop.value_ptr.* = .{};
3047 try gop.value_ptr.putNoClobber(gpa, local_index, {});
3048 }
3049
3050 const SortContext = struct {
3051 keys: []const LocalType,
3052
3053 pub fn lessThan(ctx: @This(), lhs_index: usize, rhs_index: usize) bool {
3054 const lhs_ty = ctx.keys[lhs_index];
3055 const rhs_ty = ctx.keys[rhs_index];
3056 return lhs_ty.alignas.order(rhs_ty.alignas).compare(.gt);
3057 }
3058 };
3059 free_locals.sort(SortContext{ .keys = free_locals.keys() });
3060
3061 for (free_locals.values()) |list| {
3062 for (list.keys()) |local_index| {
3063 const local = f.locals.items[local_index];
3064 try o.dg.renderCTypeAndName(ch, local.ctype, .{ .local = local_index }, .{}, local.flags.alignas);
3065 try ch.writeAll(";\n ");
3066 }
3067 }
3068}
3069
3070pub fn genDecl(o: *Object) Error!void {
3071 const tracy = trace(@src());
3072 defer tracy.end();
3073
3074 const pt = o.dg.pt;
3075 const zcu = pt.zcu;
3076 const ip = &zcu.intern_pool;
3077 const nav = ip.getNav(o.dg.pass.nav);
3078 const nav_ty: Type = .fromInterned(nav.typeOf(ip));
3079
3080 if (!nav_ty.isFnOrHasRuntimeBitsIgnoreComptime(zcu)) return;
3081 switch (ip.indexToKey(nav.status.fully_resolved.val)) {
3082 .@"extern" => |@"extern"| {
3083 if (!ip.isFunctionType(nav_ty.toIntern())) return o.dg.renderFwdDecl(o.dg.pass.nav, .{
3084 .is_const = @"extern".is_const,
3085 .is_threadlocal = @"extern".is_threadlocal,
3086 .linkage = @"extern".linkage,
3087 .visibility = @"extern".visibility,
3088 });
3089
3090 const fwd = &o.dg.fwd_decl.writer;
3091 try fwd.writeAll("zig_extern ");
3092 try o.dg.renderFunctionSignature(
3093 fwd,
3094 Value.fromInterned(nav.status.fully_resolved.val),
3095 nav.status.fully_resolved.alignment,
3096 .forward,
3097 .{ .@"export" = .{
3098 .main_name = nav.name,
3099 .extern_name = nav.name,
3100 } },
3101 );
3102 try fwd.writeAll(";\n");
3103 },
3104 .variable => |variable| {
3105 try o.dg.renderFwdDecl(o.dg.pass.nav, .{
3106 .is_const = false,
3107 .is_threadlocal = variable.is_threadlocal,
3108 .linkage = .internal,
3109 .visibility = .default,
3110 });
3111 const w = &o.code.writer;
3112 if (variable.is_threadlocal and !o.dg.mod.single_threaded) try w.writeAll("zig_threadlocal ");
3113 if (nav.status.fully_resolved.@"linksection".toSlice(&zcu.intern_pool)) |s|
3114 try w.print("zig_linksection({f}) ", .{fmtStringLiteral(s, null)});
3115 try o.dg.renderTypeAndName(
3116 w,
3117 nav_ty,
3118 .{ .nav = o.dg.pass.nav },
3119 .{},
3120 nav.status.fully_resolved.alignment,
3121 .complete,
3122 );
3123 try w.writeAll(" = ");
3124 try o.dg.renderValue(w, Value.fromInterned(variable.init), .StaticInitializer);
3125 try w.writeByte(';');
3126 try o.newline();
3127 },
3128 else => try genDeclValue(
3129 o,
3130 Value.fromInterned(nav.status.fully_resolved.val),
3131 .{ .nav = o.dg.pass.nav },
3132 nav.status.fully_resolved.alignment,
3133 nav.status.fully_resolved.@"linksection",
3134 ),
3135 }
3136}
3137
3138pub fn genDeclValue(
3139 o: *Object,
3140 val: Value,
3141 decl_c_value: CValue,
3142 alignment: Alignment,
3143 @"linksection": InternPool.OptionalNullTerminatedString,
3144) Error!void {
3145 const zcu = o.dg.pt.zcu;
3146 const ty = val.typeOf(zcu);
3147
3148 const fwd = &o.dg.fwd_decl.writer;
3149 try fwd.writeAll("static ");
3150 try o.dg.renderTypeAndName(fwd, ty, decl_c_value, Const, alignment, .complete);
3151 try fwd.writeAll(";\n");
3152
3153 const w = &o.code.writer;
3154 if (@"linksection".toSlice(&zcu.intern_pool)) |s|
3155 try w.print("zig_linksection({f}) ", .{fmtStringLiteral(s, null)});
3156 try o.dg.renderTypeAndName(w, ty, decl_c_value, Const, alignment, .complete);
3157 try w.writeAll(" = ");
3158 try o.dg.renderValue(w, val, .StaticInitializer);
3159 try w.writeByte(';');
3160 try o.newline();
3161}
3162
3163pub fn genExports(dg: *DeclGen, exported: Zcu.Exported, export_indices: []const Zcu.Export.Index) !void {
3164 const zcu = dg.pt.zcu;
3165 const ip = &zcu.intern_pool;
3166 const fwd = &dg.fwd_decl.writer;
3167
3168 const main_name = export_indices[0].ptr(zcu).opts.name;
3169 try fwd.writeAll("#define ");
3170 switch (exported) {
3171 .nav => |nav| try dg.renderNavName(fwd, nav),
3172 .uav => |uav| try DeclGen.renderUavName(fwd, Value.fromInterned(uav)),
3173 }
3174 try fwd.writeByte(' ');
3175 try fwd.print("{f}", .{fmtIdentSolo(main_name.toSlice(ip))});
3176 try fwd.writeByte('\n');
3177
3178 const exported_val = exported.getValue(zcu);
3179 if (ip.isFunctionType(exported_val.typeOf(zcu).toIntern())) return for (export_indices) |export_index| {
3180 const @"export" = export_index.ptr(zcu);
3181 try fwd.writeAll("zig_extern ");
3182 if (@"export".opts.linkage == .weak) try fwd.writeAll("zig_weak_linkage_fn ");
3183 try dg.renderFunctionSignature(
3184 fwd,
3185 exported.getValue(zcu),
3186 exported.getAlign(zcu),
3187 .forward,
3188 .{ .@"export" = .{
3189 .main_name = main_name,
3190 .extern_name = @"export".opts.name,
3191 } },
3192 );
3193 try fwd.writeAll(";\n");
3194 };
3195 const is_const = switch (ip.indexToKey(exported_val.toIntern())) {
3196 .func => unreachable,
3197 .@"extern" => |@"extern"| @"extern".is_const,
3198 .variable => false,
3199 else => true,
3200 };
3201 for (export_indices) |export_index| {
3202 const @"export" = export_index.ptr(zcu);
3203 try fwd.writeAll("zig_extern ");
3204 if (@"export".opts.linkage == .weak) try fwd.writeAll("zig_weak_linkage ");
3205 if (@"export".opts.section.toSlice(ip)) |s| try fwd.print("zig_linksection({f}) ", .{
3206 fmtStringLiteral(s, null),
3207 });
3208 const extern_name = @"export".opts.name.toSlice(ip);
3209 const is_mangled = isMangledIdent(extern_name, true);
3210 const is_export = @"export".opts.name != main_name;
3211 try dg.renderTypeAndName(
3212 fwd,
3213 exported.getValue(zcu).typeOf(zcu),
3214 .{ .identifier = extern_name },
3215 CQualifiers.init(.{ .@"const" = is_const }),
3216 exported.getAlign(zcu),
3217 .complete,
3218 );
3219 if (is_mangled and is_export) {
3220 try fwd.print(" zig_mangled_export({f}, {f}, {f})", .{
3221 fmtIdentSolo(extern_name),
3222 fmtStringLiteral(extern_name, null),
3223 fmtStringLiteral(main_name.toSlice(ip), null),
3224 });
3225 } else if (is_mangled) {
3226 try fwd.print(" zig_mangled({f}, {f})", .{
3227 fmtIdentSolo(extern_name), fmtStringLiteral(extern_name, null),
3228 });
3229 } else if (is_export) {
3230 try fwd.print(" zig_export({f}, {f})", .{
3231 fmtStringLiteral(main_name.toSlice(ip), null),
3232 fmtStringLiteral(extern_name, null),
3233 });
3234 }
3235 try fwd.writeAll(";\n");
3236 }
3237}
3238
3239/// Generate code for an entire body which ends with a `noreturn` instruction. The states of
3240/// `value_map` and `free_locals_map` are undefined after the generation, and new locals may not
3241/// have been added to `free_locals_map`. For a version of this function that restores this state,
3242/// see `genBodyResolveState`.
3243fn genBody(f: *Function, body: []const Air.Inst.Index) Error!void {
3244 const w = &f.object.code.writer;
3245 if (body.len == 0) {
3246 try w.writeAll("{}");
3247 } else {
3248 try w.writeByte('{');
3249 f.object.indent();
3250 try f.object.newline();
3251 try genBodyInner(f, body);
3252 try f.object.outdent();
3253 try w.writeByte('}');
3254 }
3255}
3256
3257/// Generate code for an entire body which ends with a `noreturn` instruction. The states of
3258/// `value_map` and `free_locals_map` are restored to their original values, and any non-allocated
3259/// locals introduced within the body are correctly added to `free_locals_map`. Operands in
3260/// `leading_deaths` have their deaths processed before the body is generated.
3261/// A scope is introduced (using braces) only if `inner` is `false`.
3262/// If `leading_deaths` is empty, `inst` may be `undefined`.
3263fn genBodyResolveState(f: *Function, inst: Air.Inst.Index, leading_deaths: []const Air.Inst.Index, body: []const Air.Inst.Index, inner: bool) Error!void {
3264 if (body.len == 0) {
3265 // Don't go to the expense of cloning everything!
3266 if (!inner) try f.object.code.writer.writeAll("{}");
3267 return;
3268 }
3269
3270 // TODO: we can probably avoid the copies in some other common cases too.
3271
3272 const gpa = f.object.dg.gpa;
3273
3274 // Save the original value_map and free_locals_map so that we can restore them after the body.
3275 var old_value_map = try f.value_map.clone();
3276 defer old_value_map.deinit();
3277 var old_free_locals = try cloneFreeLocalsMap(gpa, &f.free_locals_map);
3278 defer deinitFreeLocalsMap(gpa, &old_free_locals);
3279
3280 // Remember how many locals there were before entering the body so that we can free any that
3281 // were newly introduced. Any new locals must necessarily be logically free after the then
3282 // branch is complete.
3283 const pre_locals_len: LocalIndex = @intCast(f.locals.items.len);
3284
3285 for (leading_deaths) |death| {
3286 try die(f, inst, death.toRef());
3287 }
3288
3289 if (inner) {
3290 try genBodyInner(f, body);
3291 } else {
3292 try genBody(f, body);
3293 }
3294
3295 f.value_map.deinit();
3296 f.value_map = old_value_map.move();
3297 deinitFreeLocalsMap(gpa, &f.free_locals_map);
3298 f.free_locals_map = old_free_locals.move();
3299
3300 // Now, use the lengths we stored earlier to detect any locals the body generated, and free
3301 // them, unless they were used to store allocs.
3302
3303 for (pre_locals_len..f.locals.items.len) |local_i| {
3304 const local_index: LocalIndex = @intCast(local_i);
3305 if (f.allocs.contains(local_index)) {
3306 continue;
3307 }
3308 try freeLocal(f, inst, local_index, null);
3309 }
3310}
3311
3312fn genBodyInner(f: *Function, body: []const Air.Inst.Index) Error!void {
3313 const zcu = f.object.dg.pt.zcu;
3314 const ip = &zcu.intern_pool;
3315 const air_tags = f.air.instructions.items(.tag);
3316 const air_datas = f.air.instructions.items(.data);
3317
3318 for (body) |inst| {
3319 if (f.object.dg.expected_block) |_|
3320 return f.fail("runtime code not allowed in naked function", .{});
3321 if (f.liveness.isUnused(inst) and !f.air.mustLower(inst, ip))
3322 continue;
3323
3324 const result_value = switch (air_tags[@intFromEnum(inst)]) {
3325 // zig fmt: off
3326 .inferred_alloc, .inferred_alloc_comptime => unreachable,
3327
3328 // No "scalarize" legalizations are enabled, so these instructions never appear.
3329 .legalize_vec_elem_val => unreachable,
3330 .legalize_vec_store_elem => unreachable,
3331 // No soft float legalizations are enabled.
3332 .legalize_compiler_rt_call => unreachable,
3333
3334 .arg => try airArg(f, inst),
3335
3336 .breakpoint => try airBreakpoint(f),
3337 .ret_addr => try airRetAddr(f, inst),
3338 .frame_addr => try airFrameAddress(f, inst),
3339
3340 .ptr_add => try airPtrAddSub(f, inst, '+'),
3341 .ptr_sub => try airPtrAddSub(f, inst, '-'),
3342
3343 // TODO use a different strategy for add, sub, mul, div
3344 // that communicates to the optimizer that wrapping is UB.
3345 .add => try airBinOp(f, inst, "+", "add", .none),
3346 .sub => try airBinOp(f, inst, "-", "sub", .none),
3347 .mul => try airBinOp(f, inst, "*", "mul", .none),
3348
3349 .neg => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].un_op, "neg", .none),
3350 .div_float => try airBinBuiltinCall(f, inst, "div", .none),
3351
3352 .div_trunc, .div_exact => try airBinOp(f, inst, "/", "div_trunc", .none),
3353 .rem => blk: {
3354 const bin_op = air_datas[@intFromEnum(inst)].bin_op;
3355 const lhs_scalar_ty = f.typeOf(bin_op.lhs).scalarType(zcu);
3356 // For binary operations @TypeOf(lhs)==@TypeOf(rhs),
3357 // so we only check one.
3358 break :blk if (lhs_scalar_ty.isInt(zcu))
3359 try airBinOp(f, inst, "%", "rem", .none)
3360 else
3361 try airBinBuiltinCall(f, inst, "fmod", .none);
3362 },
3363 .div_floor => try airBinBuiltinCall(f, inst, "div_floor", .none),
3364 .mod => try airBinBuiltinCall(f, inst, "mod", .none),
3365 .abs => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].ty_op.operand, "abs", .none),
3366
3367 .add_wrap => try airBinBuiltinCall(f, inst, "addw", .bits),
3368 .sub_wrap => try airBinBuiltinCall(f, inst, "subw", .bits),
3369 .mul_wrap => try airBinBuiltinCall(f, inst, "mulw", .bits),
3370
3371 .add_sat => try airBinBuiltinCall(f, inst, "adds", .bits),
3372 .sub_sat => try airBinBuiltinCall(f, inst, "subs", .bits),
3373 .mul_sat => try airBinBuiltinCall(f, inst, "muls", .bits),
3374 .shl_sat => try airBinBuiltinCall(f, inst, "shls", .bits),
3375
3376 .sqrt => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].un_op, "sqrt", .none),
3377 .sin => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].un_op, "sin", .none),
3378 .cos => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].un_op, "cos", .none),
3379 .tan => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].un_op, "tan", .none),
3380 .exp => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].un_op, "exp", .none),
3381 .exp2 => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].un_op, "exp2", .none),
3382 .log => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].un_op, "log", .none),
3383 .log2 => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].un_op, "log2", .none),
3384 .log10 => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].un_op, "log10", .none),
3385 .floor => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].un_op, "floor", .none),
3386 .ceil => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].un_op, "ceil", .none),
3387 .round => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].un_op, "round", .none),
3388 .trunc_float => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].un_op, "trunc", .none),
3389
3390 .mul_add => try airMulAdd(f, inst),
3391
3392 .add_with_overflow => try airOverflow(f, inst, "add", .bits),
3393 .sub_with_overflow => try airOverflow(f, inst, "sub", .bits),
3394 .mul_with_overflow => try airOverflow(f, inst, "mul", .bits),
3395 .shl_with_overflow => try airOverflow(f, inst, "shl", .bits),
3396
3397 .min => try airMinMax(f, inst, '<', "min"),
3398 .max => try airMinMax(f, inst, '>', "max"),
3399
3400 .slice => try airSlice(f, inst),
3401
3402 .cmp_gt => try airCmpOp(f, inst, air_datas[@intFromEnum(inst)].bin_op, .gt),
3403 .cmp_gte => try airCmpOp(f, inst, air_datas[@intFromEnum(inst)].bin_op, .gte),
3404 .cmp_lt => try airCmpOp(f, inst, air_datas[@intFromEnum(inst)].bin_op, .lt),
3405 .cmp_lte => try airCmpOp(f, inst, air_datas[@intFromEnum(inst)].bin_op, .lte),
3406
3407 .cmp_eq => try airEquality(f, inst, .eq),
3408 .cmp_neq => try airEquality(f, inst, .neq),
3409
3410 .cmp_vector => blk: {
3411 const ty_pl = air_datas[@intFromEnum(inst)].ty_pl;
3412 const extra = f.air.extraData(Air.VectorCmp, ty_pl.payload).data;
3413 break :blk try airCmpOp(f, inst, extra, extra.compareOperator());
3414 },
3415 .cmp_lt_errors_len => try airCmpLtErrorsLen(f, inst),
3416
3417 // bool_and and bool_or are non-short-circuit operations
3418 .bool_and, .bit_and => try airBinOp(f, inst, "&", "and", .none),
3419 .bool_or, .bit_or => try airBinOp(f, inst, "|", "or", .none),
3420 .xor => try airBinOp(f, inst, "^", "xor", .none),
3421 .shr, .shr_exact => try airBinBuiltinCall(f, inst, "shr", .none),
3422 .shl, => try airBinBuiltinCall(f, inst, "shlw", .bits),
3423 .shl_exact => try airBinOp(f, inst, "<<", "shl", .none),
3424 .not => try airNot (f, inst),
3425
3426 .optional_payload => try airOptionalPayload(f, inst, false),
3427 .optional_payload_ptr => try airOptionalPayload(f, inst, true),
3428 .optional_payload_ptr_set => try airOptionalPayloadPtrSet(f, inst),
3429 .wrap_optional => try airWrapOptional(f, inst),
3430
3431 .is_err => try airIsErr(f, inst, false, "!="),
3432 .is_non_err => try airIsErr(f, inst, false, "=="),
3433 .is_err_ptr => try airIsErr(f, inst, true, "!="),
3434 .is_non_err_ptr => try airIsErr(f, inst, true, "=="),
3435
3436 .is_null => try airIsNull(f, inst, .eq, false),
3437 .is_non_null => try airIsNull(f, inst, .neq, false),
3438 .is_null_ptr => try airIsNull(f, inst, .eq, true),
3439 .is_non_null_ptr => try airIsNull(f, inst, .neq, true),
3440
3441 .alloc => try airAlloc(f, inst),
3442 .ret_ptr => try airRetPtr(f, inst),
3443 .assembly => try airAsm(f, inst),
3444 .bitcast => try airBitcast(f, inst),
3445 .intcast => try airIntCast(f, inst),
3446 .trunc => try airTrunc(f, inst),
3447 .load => try airLoad(f, inst),
3448 .store => try airStore(f, inst, false),
3449 .store_safe => try airStore(f, inst, true),
3450 .struct_field_ptr => try airStructFieldPtr(f, inst),
3451 .array_to_slice => try airArrayToSlice(f, inst),
3452 .cmpxchg_weak => try airCmpxchg(f, inst, "weak"),
3453 .cmpxchg_strong => try airCmpxchg(f, inst, "strong"),
3454 .atomic_rmw => try airAtomicRmw(f, inst),
3455 .atomic_load => try airAtomicLoad(f, inst),
3456 .memset => try airMemset(f, inst, false),
3457 .memset_safe => try airMemset(f, inst, true),
3458 .memcpy => try airMemcpy(f, inst, "memcpy("),
3459 .memmove => try airMemcpy(f, inst, "memmove("),
3460 .set_union_tag => try airSetUnionTag(f, inst),
3461 .get_union_tag => try airGetUnionTag(f, inst),
3462 .clz => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].ty_op.operand, "clz", .bits),
3463 .ctz => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].ty_op.operand, "ctz", .bits),
3464 .popcount => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].ty_op.operand, "popcount", .bits),
3465 .byte_swap => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].ty_op.operand, "byte_swap", .bits),
3466 .bit_reverse => try airUnBuiltinCall(f, inst, air_datas[@intFromEnum(inst)].ty_op.operand, "bit_reverse", .bits),
3467 .tag_name => try airTagName(f, inst),
3468 .error_name => try airErrorName(f, inst),
3469 .splat => try airSplat(f, inst),
3470 .select => try airSelect(f, inst),
3471 .shuffle_one => try airShuffleOne(f, inst),
3472 .shuffle_two => try airShuffleTwo(f, inst),
3473 .reduce => try airReduce(f, inst),
3474 .aggregate_init => try airAggregateInit(f, inst),
3475 .union_init => try airUnionInit(f, inst),
3476 .prefetch => try airPrefetch(f, inst),
3477 .addrspace_cast => return f.fail("TODO: C backend: implement addrspace_cast", .{}),
3478
3479 .@"try" => try airTry(f, inst),
3480 .try_cold => try airTry(f, inst),
3481 .try_ptr => try airTryPtr(f, inst),
3482 .try_ptr_cold => try airTryPtr(f, inst),
3483
3484 .dbg_stmt => try airDbgStmt(f, inst),
3485 .dbg_empty_stmt => try airDbgEmptyStmt(f, inst),
3486 .dbg_var_ptr, .dbg_var_val, .dbg_arg_inline => try airDbgVar(f, inst),
3487
3488 .float_from_int,
3489 .int_from_float,
3490 .fptrunc,
3491 .fpext,
3492 => try airFloatCast(f, inst),
3493
3494 .atomic_store_unordered => try airAtomicStore(f, inst, toMemoryOrder(.unordered)),
3495 .atomic_store_monotonic => try airAtomicStore(f, inst, toMemoryOrder(.monotonic)),
3496 .atomic_store_release => try airAtomicStore(f, inst, toMemoryOrder(.release)),
3497 .atomic_store_seq_cst => try airAtomicStore(f, inst, toMemoryOrder(.seq_cst)),
3498
3499 .struct_field_ptr_index_0 => try airStructFieldPtrIndex(f, inst, 0),
3500 .struct_field_ptr_index_1 => try airStructFieldPtrIndex(f, inst, 1),
3501 .struct_field_ptr_index_2 => try airStructFieldPtrIndex(f, inst, 2),
3502 .struct_field_ptr_index_3 => try airStructFieldPtrIndex(f, inst, 3),
3503
3504 .field_parent_ptr => try airFieldParentPtr(f, inst),
3505
3506 .struct_field_val => try airStructFieldVal(f, inst),
3507 .slice_ptr => try airSliceField(f, inst, false, "ptr"),
3508 .slice_len => try airSliceField(f, inst, false, "len"),
3509
3510 .ptr_slice_ptr_ptr => try airSliceField(f, inst, true, "ptr"),
3511 .ptr_slice_len_ptr => try airSliceField(f, inst, true, "len"),
3512
3513 .ptr_elem_val => try airPtrElemVal(f, inst),
3514 .ptr_elem_ptr => try airPtrElemPtr(f, inst),
3515 .slice_elem_val => try airSliceElemVal(f, inst),
3516 .slice_elem_ptr => try airSliceElemPtr(f, inst),
3517 .array_elem_val => try airArrayElemVal(f, inst),
3518
3519 .unwrap_errunion_payload => try airUnwrapErrUnionPay(f, inst, false),
3520 .unwrap_errunion_payload_ptr => try airUnwrapErrUnionPay(f, inst, true),
3521 .unwrap_errunion_err => try airUnwrapErrUnionErr(f, inst),
3522 .unwrap_errunion_err_ptr => try airUnwrapErrUnionErr(f, inst),
3523 .wrap_errunion_payload => try airWrapErrUnionPay(f, inst),
3524 .wrap_errunion_err => try airWrapErrUnionErr(f, inst),
3525 .errunion_payload_ptr_set => try airErrUnionPayloadPtrSet(f, inst),
3526 .err_return_trace => try airErrReturnTrace(f, inst),
3527 .set_err_return_trace => try airSetErrReturnTrace(f, inst),
3528 .save_err_return_trace_index => try airSaveErrReturnTraceIndex(f, inst),
3529
3530 .wasm_memory_size => try airWasmMemorySize(f, inst),
3531 .wasm_memory_grow => try airWasmMemoryGrow(f, inst),
3532
3533 .add_optimized,
3534 .sub_optimized,
3535 .mul_optimized,
3536 .div_float_optimized,
3537 .div_trunc_optimized,
3538 .div_floor_optimized,
3539 .div_exact_optimized,
3540 .rem_optimized,
3541 .mod_optimized,
3542 .neg_optimized,
3543 .cmp_lt_optimized,
3544 .cmp_lte_optimized,
3545 .cmp_eq_optimized,
3546 .cmp_gte_optimized,
3547 .cmp_gt_optimized,
3548 .cmp_neq_optimized,
3549 .cmp_vector_optimized,
3550 .reduce_optimized,
3551 .int_from_float_optimized,
3552 => return f.fail("TODO implement optimized float mode", .{}),
3553
3554 .add_safe,
3555 .sub_safe,
3556 .mul_safe,
3557 .intcast_safe,
3558 .int_from_float_safe,
3559 .int_from_float_optimized_safe,
3560 => return f.fail("TODO implement safety_checked_instructions", .{}),
3561
3562 .is_named_enum_value => return f.fail("TODO: C backend: implement is_named_enum_value", .{}),
3563 .error_set_has_value => return f.fail("TODO: C backend: implement error_set_has_value", .{}),
3564
3565 .runtime_nav_ptr => try airRuntimeNavPtr(f, inst),
3566
3567 .c_va_start => try airCVaStart(f, inst),
3568 .c_va_arg => try airCVaArg(f, inst),
3569 .c_va_end => try airCVaEnd(f, inst),
3570 .c_va_copy => try airCVaCopy(f, inst),
3571
3572 .work_item_id,
3573 .work_group_size,
3574 .work_group_id,
3575 => unreachable,
3576
3577 // Instructions that are known to always be `noreturn` based on their tag.
3578 .br => return airBr(f, inst),
3579 .repeat => return airRepeat(f, inst),
3580 .switch_dispatch => return airSwitchDispatch(f, inst),
3581 .cond_br => return airCondBr(f, inst),
3582 .switch_br => return airSwitchBr(f, inst, false),
3583 .loop_switch_br => return airSwitchBr(f, inst, true),
3584 .loop => return airLoop(f, inst),
3585 .ret => return airRet(f, inst, false),
3586 .ret_safe => return airRet(f, inst, false), // TODO
3587 .ret_load => return airRet(f, inst, true),
3588 .trap => return airTrap(f, &f.object.code.writer),
3589 .unreach => return airUnreach(&f.object),
3590
3591 // Instructions which may be `noreturn`.
3592 .block => res: {
3593 const res = try airBlock(f, inst);
3594 if (f.typeOfIndex(inst).isNoReturn(zcu)) return;
3595 break :res res;
3596 },
3597 .dbg_inline_block => res: {
3598 const res = try airDbgInlineBlock(f, inst);
3599 if (f.typeOfIndex(inst).isNoReturn(zcu)) return;
3600 break :res res;
3601 },
3602 // TODO: calls should be in this category! The AIR we emit for them is a bit weird.
3603 // The instruction has type `noreturn`, but there are instructions (and maybe a safety
3604 // check) following nonetheless. The `unreachable` or safety check should be emitted by
3605 // backends instead.
3606 .call => try airCall(f, inst, .auto),
3607 .call_always_tail => .none,
3608 .call_never_tail => try airCall(f, inst, .never_tail),
3609 .call_never_inline => try airCall(f, inst, .never_inline),
3610
3611 // zig fmt: on
3612 };
3613 if (result_value == .new_local) {
3614 log.debug("map %{d} to t{d}", .{ inst, result_value.new_local });
3615 }
3616 try f.value_map.putNoClobber(inst.toRef(), switch (result_value) {
3617 .none => continue,
3618 .new_local => |local_index| .{ .local = local_index },
3619 else => result_value,
3620 });
3621 }
3622 unreachable;
3623}
3624
3625fn airSliceField(f: *Function, inst: Air.Inst.Index, is_ptr: bool, field_name: []const u8) !CValue {
3626 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
3627
3628 const inst_ty = f.typeOfIndex(inst);
3629 const operand = try f.resolveInst(ty_op.operand);
3630 try reap(f, inst, &.{ty_op.operand});
3631
3632 const w = &f.object.code.writer;
3633 const local = try f.allocLocal(inst, inst_ty);
3634 const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete));
3635 try f.writeCValue(w, local, .Other);
3636 try a.assign(f, w);
3637 if (is_ptr) {
3638 try w.writeByte('&');
3639 try f.writeCValueDerefMember(w, operand, .{ .identifier = field_name });
3640 } else try f.writeCValueMember(w, operand, .{ .identifier = field_name });
3641 try a.end(f, w);
3642 return local;
3643}
3644
3645fn airPtrElemVal(f: *Function, inst: Air.Inst.Index) !CValue {
3646 const zcu = f.object.dg.pt.zcu;
3647 const inst_ty = f.typeOfIndex(inst);
3648 const bin_op = f.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
3649 if (!inst_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
3650 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
3651 return .none;
3652 }
3653
3654 const ptr = try f.resolveInst(bin_op.lhs);
3655 const index = try f.resolveInst(bin_op.rhs);
3656 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
3657
3658 const w = &f.object.code.writer;
3659 const local = try f.allocLocal(inst, inst_ty);
3660 const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete));
3661 try f.writeCValue(w, local, .Other);
3662 try a.assign(f, w);
3663 try f.writeCValue(w, ptr, .Other);
3664 try w.writeByte('[');
3665 try f.writeCValue(w, index, .Other);
3666 try w.writeByte(']');
3667 try a.end(f, w);
3668 return local;
3669}
3670
3671fn airPtrElemPtr(f: *Function, inst: Air.Inst.Index) !CValue {
3672 const pt = f.object.dg.pt;
3673 const zcu = pt.zcu;
3674 const ty_pl = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
3675 const bin_op = f.air.extraData(Air.Bin, ty_pl.payload).data;
3676
3677 const inst_ty = f.typeOfIndex(inst);
3678 const ptr_ty = f.typeOf(bin_op.lhs);
3679 const elem_has_bits = ptr_ty.elemType2(zcu).hasRuntimeBitsIgnoreComptime(zcu);
3680
3681 const ptr = try f.resolveInst(bin_op.lhs);
3682 const index = try f.resolveInst(bin_op.rhs);
3683 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
3684
3685 const w = &f.object.code.writer;
3686 const local = try f.allocLocal(inst, inst_ty);
3687 const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete));
3688 try f.writeCValue(w, local, .Other);
3689 try a.assign(f, w);
3690 try w.writeByte('(');
3691 try f.renderType(w, inst_ty);
3692 try w.writeByte(')');
3693 if (elem_has_bits) try w.writeByte('&');
3694 if (elem_has_bits and ptr_ty.ptrSize(zcu) == .one) {
3695 // It's a pointer to an array, so we need to de-reference.
3696 try f.writeCValueDeref(w, ptr);
3697 } else try f.writeCValue(w, ptr, .Other);
3698 if (elem_has_bits) {
3699 try w.writeByte('[');
3700 try f.writeCValue(w, index, .Other);
3701 try w.writeByte(']');
3702 }
3703 try a.end(f, w);
3704 return local;
3705}
3706
3707fn airSliceElemVal(f: *Function, inst: Air.Inst.Index) !CValue {
3708 const zcu = f.object.dg.pt.zcu;
3709 const inst_ty = f.typeOfIndex(inst);
3710 const bin_op = f.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
3711 if (!inst_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
3712 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
3713 return .none;
3714 }
3715
3716 const slice = try f.resolveInst(bin_op.lhs);
3717 const index = try f.resolveInst(bin_op.rhs);
3718 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
3719
3720 const w = &f.object.code.writer;
3721 const local = try f.allocLocal(inst, inst_ty);
3722 const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete));
3723 try f.writeCValue(w, local, .Other);
3724 try a.assign(f, w);
3725 try f.writeCValueMember(w, slice, .{ .identifier = "ptr" });
3726 try w.writeByte('[');
3727 try f.writeCValue(w, index, .Other);
3728 try w.writeByte(']');
3729 try a.end(f, w);
3730 return local;
3731}
3732
3733fn airSliceElemPtr(f: *Function, inst: Air.Inst.Index) !CValue {
3734 const pt = f.object.dg.pt;
3735 const zcu = pt.zcu;
3736 const ty_pl = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
3737 const bin_op = f.air.extraData(Air.Bin, ty_pl.payload).data;
3738
3739 const inst_ty = f.typeOfIndex(inst);
3740 const slice_ty = f.typeOf(bin_op.lhs);
3741 const elem_ty = slice_ty.elemType2(zcu);
3742 const elem_has_bits = elem_ty.hasRuntimeBitsIgnoreComptime(zcu);
3743
3744 const slice = try f.resolveInst(bin_op.lhs);
3745 const index = try f.resolveInst(bin_op.rhs);
3746 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
3747
3748 const w = &f.object.code.writer;
3749 const local = try f.allocLocal(inst, inst_ty);
3750 const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete));
3751 try f.writeCValue(w, local, .Other);
3752 try a.assign(f, w);
3753 if (elem_has_bits) try w.writeByte('&');
3754 try f.writeCValueMember(w, slice, .{ .identifier = "ptr" });
3755 if (elem_has_bits) {
3756 try w.writeByte('[');
3757 try f.writeCValue(w, index, .Other);
3758 try w.writeByte(']');
3759 }
3760 try a.end(f, w);
3761 return local;
3762}
3763
3764fn airArrayElemVal(f: *Function, inst: Air.Inst.Index) !CValue {
3765 const zcu = f.object.dg.pt.zcu;
3766 const bin_op = f.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
3767 const inst_ty = f.typeOfIndex(inst);
3768 if (!inst_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
3769 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
3770 return .none;
3771 }
3772
3773 const array = try f.resolveInst(bin_op.lhs);
3774 const index = try f.resolveInst(bin_op.rhs);
3775 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
3776
3777 const w = &f.object.code.writer;
3778 const local = try f.allocLocal(inst, inst_ty);
3779 const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete));
3780 try f.writeCValue(w, local, .Other);
3781 try a.assign(f, w);
3782 try f.writeCValue(w, array, .Other);
3783 try w.writeByte('[');
3784 try f.writeCValue(w, index, .Other);
3785 try w.writeByte(']');
3786 try a.end(f, w);
3787 return local;
3788}
3789
3790fn airAlloc(f: *Function, inst: Air.Inst.Index) !CValue {
3791 const pt = f.object.dg.pt;
3792 const zcu = pt.zcu;
3793 const inst_ty = f.typeOfIndex(inst);
3794 const elem_ty = inst_ty.childType(zcu);
3795 if (!elem_ty.isFnOrHasRuntimeBitsIgnoreComptime(zcu)) return .{ .undef = inst_ty };
3796
3797 const local = try f.allocLocalValue(.{
3798 .ctype = try f.ctypeFromType(elem_ty, .complete),
3799 .alignas = CType.AlignAs.fromAlignment(.{
3800 .@"align" = inst_ty.ptrInfo(zcu).flags.alignment,
3801 .abi = elem_ty.abiAlignment(zcu),
3802 }),
3803 });
3804 log.debug("%{d}: allocated unfreeable t{d}", .{ inst, local.new_local });
3805 try f.allocs.put(zcu.gpa, local.new_local, true);
3806
3807 switch (elem_ty.zigTypeTag(zcu)) {
3808 .@"struct", .@"union" => switch (elem_ty.containerLayout(zcu)) {
3809 .@"packed" => {
3810 // For packed aggregates, we zero-initialize to try and work around a design flaw
3811 // related to how `packed`, `undefined`, and RLS interact. See comment in `airStore`
3812 // for details.
3813 const w = &f.object.code.writer;
3814 try w.print("memset(&t{d}, 0x00, sizeof(", .{local.new_local});
3815 try f.renderType(w, elem_ty);
3816 try w.writeAll("));");
3817 try f.object.newline();
3818 },
3819 .auto, .@"extern" => {},
3820 },
3821 else => {},
3822 }
3823
3824 return .{ .local_ref = local.new_local };
3825}
3826
3827fn airRetPtr(f: *Function, inst: Air.Inst.Index) !CValue {
3828 const pt = f.object.dg.pt;
3829 const zcu = pt.zcu;
3830 const inst_ty = f.typeOfIndex(inst);
3831 const elem_ty = inst_ty.childType(zcu);
3832 if (!elem_ty.isFnOrHasRuntimeBitsIgnoreComptime(zcu)) return .{ .undef = inst_ty };
3833
3834 const local = try f.allocLocalValue(.{
3835 .ctype = try f.ctypeFromType(elem_ty, .complete),
3836 .alignas = CType.AlignAs.fromAlignment(.{
3837 .@"align" = inst_ty.ptrInfo(zcu).flags.alignment,
3838 .abi = elem_ty.abiAlignment(zcu),
3839 }),
3840 });
3841 log.debug("%{d}: allocated unfreeable t{d}", .{ inst, local.new_local });
3842 try f.allocs.put(zcu.gpa, local.new_local, true);
3843
3844 switch (elem_ty.zigTypeTag(zcu)) {
3845 .@"struct", .@"union" => switch (elem_ty.containerLayout(zcu)) {
3846 .@"packed" => {
3847 // For packed aggregates, we zero-initialize to try and work around a design flaw
3848 // related to how `packed`, `undefined`, and RLS interact. See comment in `airStore`
3849 // for details.
3850 const w = &f.object.code.writer;
3851 try w.print("memset(&t{d}, 0x00, sizeof(", .{local.new_local});
3852 try f.renderType(w, elem_ty);
3853 try w.writeAll("));");
3854 try f.object.newline();
3855 },
3856 .auto, .@"extern" => {},
3857 },
3858 else => {},
3859 }
3860
3861 return .{ .local_ref = local.new_local };
3862}
3863
3864fn airArg(f: *Function, inst: Air.Inst.Index) !CValue {
3865 const inst_ty = f.typeOfIndex(inst);
3866 const inst_ctype = try f.ctypeFromType(inst_ty, .parameter);
3867
3868 const i = f.next_arg_index;
3869 f.next_arg_index += 1;
3870 const result: CValue = if (inst_ctype.eql(try f.ctypeFromType(inst_ty, .complete)))
3871 .{ .arg = i }
3872 else
3873 .{ .arg_array = i };
3874
3875 if (f.liveness.isUnused(inst)) {
3876 const w = &f.object.code.writer;
3877 try w.writeByte('(');
3878 try f.renderType(w, .void);
3879 try w.writeByte(')');
3880 try f.writeCValue(w, result, .Other);
3881 try w.writeByte(';');
3882 try f.object.newline();
3883 return .none;
3884 }
3885
3886 return result;
3887}
3888
3889fn airLoad(f: *Function, inst: Air.Inst.Index) !CValue {
3890 const pt = f.object.dg.pt;
3891 const zcu = pt.zcu;
3892 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
3893
3894 const ptr_ty = f.typeOf(ty_op.operand);
3895 const ptr_scalar_ty = ptr_ty.scalarType(zcu);
3896 const ptr_info = ptr_scalar_ty.ptrInfo(zcu);
3897 const src_ty: Type = .fromInterned(ptr_info.child);
3898
3899 // `Air.Legalize.Feature.expand_packed_load` should ensure that the only
3900 // bit-pointers we see here are vector element pointers.
3901 assert(ptr_info.packed_offset.host_size == 0 or ptr_info.flags.vector_index != .none);
3902
3903 if (!src_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
3904 try reap(f, inst, &.{ty_op.operand});
3905 return .none;
3906 }
3907
3908 const operand = try f.resolveInst(ty_op.operand);
3909
3910 try reap(f, inst, &.{ty_op.operand});
3911
3912 const is_aligned = if (ptr_info.flags.alignment != .none)
3913 ptr_info.flags.alignment.order(src_ty.abiAlignment(zcu)).compare(.gte)
3914 else
3915 true;
3916 const is_array = lowersToArray(src_ty, zcu);
3917 const need_memcpy = !is_aligned or is_array;
3918
3919 const w = &f.object.code.writer;
3920 const local = try f.allocLocal(inst, src_ty);
3921 const v = try Vectorize.start(f, inst, w, ptr_ty);
3922
3923 if (need_memcpy) {
3924 try w.writeAll("memcpy(");
3925 if (!is_array) try w.writeByte('&');
3926 try f.writeCValue(w, local, .Other);
3927 try v.elem(f, w);
3928 try w.writeAll(", (const char *)");
3929 try f.writeCValue(w, operand, .Other);
3930 try v.elem(f, w);
3931 try w.writeAll(", sizeof(");
3932 try f.renderType(w, src_ty);
3933 try w.writeAll("))");
3934 } else {
3935 try f.writeCValue(w, local, .Other);
3936 try v.elem(f, w);
3937 try w.writeAll(" = ");
3938 try f.writeCValueDeref(w, operand);
3939 try v.elem(f, w);
3940 }
3941 try w.writeByte(';');
3942 try f.object.newline();
3943 try v.end(f, inst, w);
3944
3945 return local;
3946}
3947
3948fn airRet(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !void {
3949 const pt = f.object.dg.pt;
3950 const zcu = pt.zcu;
3951 const un_op = f.air.instructions.items(.data)[@intFromEnum(inst)].un_op;
3952 const w = &f.object.code.writer;
3953 const op_inst = un_op.toIndex();
3954 const op_ty = f.typeOf(un_op);
3955 const ret_ty = if (is_ptr) op_ty.childType(zcu) else op_ty;
3956 const ret_ctype = try f.ctypeFromType(ret_ty, .parameter);
3957
3958 if (op_inst != null and f.air.instructions.items(.tag)[@intFromEnum(op_inst.?)] == .call_always_tail) {
3959 try reap(f, inst, &.{un_op});
3960 _ = try airCall(f, op_inst.?, .always_tail);
3961 } else if (ret_ctype.index != .void) {
3962 const operand = try f.resolveInst(un_op);
3963 try reap(f, inst, &.{un_op});
3964 var deref = is_ptr;
3965 const is_array = lowersToArray(ret_ty, zcu);
3966 const ret_val = if (is_array) ret_val: {
3967 const array_local = try f.allocAlignedLocal(inst, .{
3968 .ctype = ret_ctype,
3969 .alignas = CType.AlignAs.fromAbiAlignment(ret_ty.abiAlignment(zcu)),
3970 });
3971 try w.writeAll("memcpy(");
3972 try f.writeCValueMember(w, array_local, .{ .identifier = "array" });
3973 try w.writeAll(", ");
3974 if (deref)
3975 try f.writeCValueDeref(w, operand)
3976 else
3977 try f.writeCValue(w, operand, .FunctionArgument);
3978 deref = false;
3979 try w.writeAll(", sizeof(");
3980 try f.renderType(w, ret_ty);
3981 try w.writeAll("));");
3982 try f.object.newline();
3983 break :ret_val array_local;
3984 } else operand;
3985
3986 try w.writeAll("return ");
3987 if (deref)
3988 try f.writeCValueDeref(w, ret_val)
3989 else
3990 try f.writeCValue(w, ret_val, .Other);
3991 try w.writeAll(";\n");
3992 if (is_array) {
3993 try freeLocal(f, inst, ret_val.new_local, null);
3994 }
3995 } else {
3996 try reap(f, inst, &.{un_op});
3997 // Not even allowed to return void in a naked function.
3998 if (!f.object.dg.is_naked_fn) try w.writeAll("return;\n");
3999 }
4000}
4001
4002fn airIntCast(f: *Function, inst: Air.Inst.Index) !CValue {
4003 const pt = f.object.dg.pt;
4004 const zcu = pt.zcu;
4005 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
4006
4007 const operand = try f.resolveInst(ty_op.operand);
4008 try reap(f, inst, &.{ty_op.operand});
4009
4010 const inst_ty = f.typeOfIndex(inst);
4011 const inst_scalar_ty = inst_ty.scalarType(zcu);
4012 const operand_ty = f.typeOf(ty_op.operand);
4013 const scalar_ty = operand_ty.scalarType(zcu);
4014
4015 if (f.object.dg.intCastIsNoop(inst_scalar_ty, scalar_ty)) return f.moveCValue(inst, inst_ty, operand);
4016
4017 const w = &f.object.code.writer;
4018 const local = try f.allocLocal(inst, inst_ty);
4019 const v = try Vectorize.start(f, inst, w, operand_ty);
4020 const a = try Assignment.start(f, w, try f.ctypeFromType(scalar_ty, .complete));
4021 try f.writeCValue(w, local, .Other);
4022 try v.elem(f, w);
4023 try a.assign(f, w);
4024 try f.renderIntCast(w, inst_scalar_ty, operand, v, scalar_ty, .Other);
4025 try a.end(f, w);
4026 try v.end(f, inst, w);
4027 return local;
4028}
4029
4030fn airTrunc(f: *Function, inst: Air.Inst.Index) !CValue {
4031 const pt = f.object.dg.pt;
4032 const zcu = pt.zcu;
4033 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
4034
4035 const operand = try f.resolveInst(ty_op.operand);
4036 try reap(f, inst, &.{ty_op.operand});
4037
4038 const inst_ty = f.typeOfIndex(inst);
4039 const inst_scalar_ty = inst_ty.scalarType(zcu);
4040 const dest_int_info = inst_scalar_ty.intInfo(zcu);
4041 const dest_bits = dest_int_info.bits;
4042 const dest_c_bits = toCIntBits(dest_bits) orelse
4043 return f.fail("TODO: C backend: implement integer types larger than 128 bits", .{});
4044 const operand_ty = f.typeOf(ty_op.operand);
4045 const scalar_ty = operand_ty.scalarType(zcu);
4046 const scalar_int_info = scalar_ty.intInfo(zcu);
4047
4048 const need_cast = dest_c_bits < 64;
4049 const need_lo = scalar_int_info.bits > 64 and dest_bits <= 64;
4050 const need_mask = dest_bits < 8 or !std.math.isPowerOfTwo(dest_bits);
4051 if (!need_cast and !need_lo and !need_mask) return f.moveCValue(inst, inst_ty, operand);
4052
4053 const w = &f.object.code.writer;
4054 const local = try f.allocLocal(inst, inst_ty);
4055 const v = try Vectorize.start(f, inst, w, operand_ty);
4056 const a = try Assignment.start(f, w, try f.ctypeFromType(inst_scalar_ty, .complete));
4057 try f.writeCValue(w, local, .Other);
4058 try v.elem(f, w);
4059 try a.assign(f, w);
4060 if (need_cast) {
4061 try w.writeByte('(');
4062 try f.renderType(w, inst_scalar_ty);
4063 try w.writeByte(')');
4064 }
4065 if (need_lo) {
4066 try w.writeAll("zig_lo_");
4067 try f.object.dg.renderTypeForBuiltinFnName(w, scalar_ty);
4068 try w.writeByte('(');
4069 }
4070 if (!need_mask) {
4071 try f.writeCValue(w, operand, .Other);
4072 try v.elem(f, w);
4073 } else switch (dest_int_info.signedness) {
4074 .unsigned => {
4075 try w.writeAll("zig_and_");
4076 try f.object.dg.renderTypeForBuiltinFnName(w, scalar_ty);
4077 try w.writeByte('(');
4078 try f.writeCValue(w, operand, .FunctionArgument);
4079 try v.elem(f, w);
4080 try w.print(", {f})", .{
4081 try f.fmtIntLiteralHex(try inst_scalar_ty.maxIntScalar(pt, scalar_ty)),
4082 });
4083 },
4084 .signed => {
4085 const c_bits = toCIntBits(scalar_int_info.bits) orelse
4086 return f.fail("TODO: C backend: implement integer types larger than 128 bits", .{});
4087 const shift_val = try pt.intValue(.u8, c_bits - dest_bits);
4088
4089 try w.writeAll("zig_shr_");
4090 try f.object.dg.renderTypeForBuiltinFnName(w, scalar_ty);
4091 if (c_bits == 128) {
4092 try w.print("(zig_bitCast_i{d}(", .{c_bits});
4093 } else {
4094 try w.print("((int{d}_t)", .{c_bits});
4095 }
4096 try w.print("zig_shl_u{d}(", .{c_bits});
4097 if (c_bits == 128) {
4098 try w.print("zig_bitCast_u{d}(", .{c_bits});
4099 } else {
4100 try w.print("(uint{d}_t)", .{c_bits});
4101 }
4102 try f.writeCValue(w, operand, .FunctionArgument);
4103 try v.elem(f, w);
4104 if (c_bits == 128) try w.writeByte(')');
4105 try w.print(", {f})", .{try f.fmtIntLiteralDec(shift_val)});
4106 if (c_bits == 128) try w.writeByte(')');
4107 try w.print(", {f})", .{try f.fmtIntLiteralDec(shift_val)});
4108 },
4109 }
4110 if (need_lo) try w.writeByte(')');
4111 try a.end(f, w);
4112 try v.end(f, inst, w);
4113 return local;
4114}
4115
4116fn airStore(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
4117 const pt = f.object.dg.pt;
4118 const zcu = pt.zcu;
4119 // *a = b;
4120 const bin_op = f.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
4121
4122 const ptr_ty = f.typeOf(bin_op.lhs);
4123 const ptr_scalar_ty = ptr_ty.scalarType(zcu);
4124 const ptr_info = ptr_scalar_ty.ptrInfo(zcu);
4125
4126 // `Air.Legalize.Feature.expand_packed_store` should ensure that the only
4127 // bit-pointers we see here are vector element pointers.
4128 assert(ptr_info.packed_offset.host_size == 0 or ptr_info.flags.vector_index != .none);
4129
4130 const ptr_val = try f.resolveInst(bin_op.lhs);
4131 const src_ty = f.typeOf(bin_op.rhs);
4132
4133 const val_is_undef = if (try f.air.value(bin_op.rhs, pt)) |v| v.isUndef(zcu) else false;
4134
4135 const w = &f.object.code.writer;
4136 if (val_is_undef) {
4137 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
4138 if (safety and ptr_info.packed_offset.host_size == 0) {
4139 // If the thing we're initializing is a packed struct/union, we set to 0 instead of
4140 // 0xAA. This is a hack to work around a problem with partially-undefined packed
4141 // aggregates. If we used 0xAA here, then a later initialization through RLS would
4142 // not zero the high padding bits (for a packed type which is not 8/16/32/64/etc bits),
4143 // so we would get a miscompilation. Using 0x00 here avoids this bug in some cases. It
4144 // is *not* a correct fix; for instance it misses any case where packed structs are
4145 // nested in other aggregates. A proper fix for this will involve changing the language,
4146 // such as to remove RLS. This just prevents miscompilations in *some* common cases.
4147 const byte_str: []const u8 = switch (src_ty.zigTypeTag(zcu)) {
4148 else => "0xaa",
4149 .@"struct", .@"union" => switch (src_ty.containerLayout(zcu)) {
4150 .auto, .@"extern" => "0xaa",
4151 .@"packed" => "0x00",
4152 },
4153 };
4154 try w.writeAll("memset(");
4155 try f.writeCValue(w, ptr_val, .FunctionArgument);
4156 try w.print(", {s}, sizeof(", .{byte_str});
4157 try f.renderType(w, .fromInterned(ptr_info.child));
4158 try w.writeAll("));");
4159 try f.object.newline();
4160 }
4161 return .none;
4162 }
4163
4164 const is_aligned = if (ptr_info.flags.alignment != .none)
4165 ptr_info.flags.alignment.order(src_ty.abiAlignment(zcu)).compare(.gte)
4166 else
4167 true;
4168 const is_array = lowersToArray(.fromInterned(ptr_info.child), zcu);
4169 const need_memcpy = !is_aligned or is_array;
4170
4171 const src_val = try f.resolveInst(bin_op.rhs);
4172 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
4173
4174 const src_scalar_ctype = try f.ctypeFromType(src_ty.scalarType(zcu), .complete);
4175 if (need_memcpy) {
4176 // For this memcpy to safely work we need the rhs to have the same
4177 // underlying type as the lhs (i.e. they must both be arrays of the same underlying type).
4178 assert(src_ty.eql(.fromInterned(ptr_info.child), zcu));
4179
4180 // If the source is a constant, writeCValue will emit a brace initialization
4181 // so work around this by initializing into new local.
4182 // TODO this should be done by manually initializing elements of the dest array
4183 const array_src = if (src_val == .constant) blk: {
4184 const new_local = try f.allocLocal(inst, src_ty);
4185 try f.writeCValue(w, new_local, .Other);
4186 try w.writeAll(" = ");
4187 try f.writeCValue(w, src_val, .Other);
4188 try w.writeByte(';');
4189 try f.object.newline();
4190
4191 break :blk new_local;
4192 } else src_val;
4193
4194 const v = try Vectorize.start(f, inst, w, ptr_ty);
4195 try w.writeAll("memcpy((char *)");
4196 try f.writeCValue(w, ptr_val, .FunctionArgument);
4197 try v.elem(f, w);
4198 try w.writeAll(", ");
4199 if (!is_array) try w.writeByte('&');
4200 try f.writeCValue(w, array_src, .FunctionArgument);
4201 try v.elem(f, w);
4202 try w.writeAll(", sizeof(");
4203 try f.renderType(w, src_ty);
4204 try w.writeAll("))");
4205 try f.freeCValue(inst, array_src);
4206 try w.writeByte(';');
4207 try f.object.newline();
4208 try v.end(f, inst, w);
4209 } else {
4210 switch (ptr_val) {
4211 .local_ref => |ptr_local_index| switch (src_val) {
4212 .new_local, .local => |src_local_index| if (ptr_local_index == src_local_index)
4213 return .none,
4214 else => {},
4215 },
4216 else => {},
4217 }
4218 const v = try Vectorize.start(f, inst, w, ptr_ty);
4219 const a = try Assignment.start(f, w, src_scalar_ctype);
4220 try f.writeCValueDeref(w, ptr_val);
4221 try v.elem(f, w);
4222 try a.assign(f, w);
4223 try f.writeCValue(w, src_val, .Other);
4224 try v.elem(f, w);
4225 try a.end(f, w);
4226 try v.end(f, inst, w);
4227 }
4228 return .none;
4229}
4230
4231fn airOverflow(f: *Function, inst: Air.Inst.Index, operation: []const u8, info: BuiltinInfo) !CValue {
4232 const pt = f.object.dg.pt;
4233 const zcu = pt.zcu;
4234 const ty_pl = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
4235 const bin_op = f.air.extraData(Air.Bin, ty_pl.payload).data;
4236
4237 const lhs = try f.resolveInst(bin_op.lhs);
4238 const rhs = try f.resolveInst(bin_op.rhs);
4239 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
4240
4241 const inst_ty = f.typeOfIndex(inst);
4242 const operand_ty = f.typeOf(bin_op.lhs);
4243 const scalar_ty = operand_ty.scalarType(zcu);
4244
4245 const w = &f.object.code.writer;
4246 const local = try f.allocLocal(inst, inst_ty);
4247 const v = try Vectorize.start(f, inst, w, operand_ty);
4248 try f.writeCValueMember(w, local, .{ .field = 1 });
4249 try v.elem(f, w);
4250 try w.writeAll(" = zig_");
4251 try w.writeAll(operation);
4252 try w.writeAll("o_");
4253 try f.object.dg.renderTypeForBuiltinFnName(w, scalar_ty);
4254 try w.writeAll("(&");
4255 try f.writeCValueMember(w, local, .{ .field = 0 });
4256 try v.elem(f, w);
4257 try w.writeAll(", ");
4258 try f.writeCValue(w, lhs, .FunctionArgument);
4259 try v.elem(f, w);
4260 try w.writeAll(", ");
4261 try f.writeCValue(w, rhs, .FunctionArgument);
4262 if (f.typeOf(bin_op.rhs).isVector(zcu)) try v.elem(f, w);
4263 try f.object.dg.renderBuiltinInfo(w, scalar_ty, info);
4264 try w.writeAll(");");
4265 try f.object.newline();
4266 try v.end(f, inst, w);
4267
4268 return local;
4269}
4270
4271fn airNot(f: *Function, inst: Air.Inst.Index) !CValue {
4272 const pt = f.object.dg.pt;
4273 const zcu = pt.zcu;
4274 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
4275 const operand_ty = f.typeOf(ty_op.operand);
4276 const scalar_ty = operand_ty.scalarType(zcu);
4277 if (scalar_ty.toIntern() != .bool_type) return try airUnBuiltinCall(f, inst, ty_op.operand, "not", .bits);
4278
4279 const op = try f.resolveInst(ty_op.operand);
4280 try reap(f, inst, &.{ty_op.operand});
4281
4282 const inst_ty = f.typeOfIndex(inst);
4283
4284 const w = &f.object.code.writer;
4285 const local = try f.allocLocal(inst, inst_ty);
4286 const v = try Vectorize.start(f, inst, w, operand_ty);
4287 try f.writeCValue(w, local, .Other);
4288 try v.elem(f, w);
4289 try w.writeAll(" = ");
4290 try w.writeByte('!');
4291 try f.writeCValue(w, op, .Other);
4292 try v.elem(f, w);
4293 try w.writeByte(';');
4294 try f.object.newline();
4295 try v.end(f, inst, w);
4296
4297 return local;
4298}
4299
4300fn airBinOp(
4301 f: *Function,
4302 inst: Air.Inst.Index,
4303 operator: []const u8,
4304 operation: []const u8,
4305 info: BuiltinInfo,
4306) !CValue {
4307 const pt = f.object.dg.pt;
4308 const zcu = pt.zcu;
4309 const bin_op = f.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
4310 const operand_ty = f.typeOf(bin_op.lhs);
4311 const scalar_ty = operand_ty.scalarType(zcu);
4312 if ((scalar_ty.isInt(zcu) and scalar_ty.bitSize(zcu) > 64) or scalar_ty.isRuntimeFloat())
4313 return try airBinBuiltinCall(f, inst, operation, info);
4314
4315 const lhs = try f.resolveInst(bin_op.lhs);
4316 const rhs = try f.resolveInst(bin_op.rhs);
4317 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
4318
4319 const inst_ty = f.typeOfIndex(inst);
4320
4321 const w = &f.object.code.writer;
4322 const local = try f.allocLocal(inst, inst_ty);
4323 const v = try Vectorize.start(f, inst, w, operand_ty);
4324 try f.writeCValue(w, local, .Other);
4325 try v.elem(f, w);
4326 try w.writeAll(" = ");
4327 try f.writeCValue(w, lhs, .Other);
4328 try v.elem(f, w);
4329 try w.writeByte(' ');
4330 try w.writeAll(operator);
4331 try w.writeByte(' ');
4332 try f.writeCValue(w, rhs, .Other);
4333 try v.elem(f, w);
4334 try w.writeByte(';');
4335 try f.object.newline();
4336 try v.end(f, inst, w);
4337
4338 return local;
4339}
4340
4341fn airCmpOp(
4342 f: *Function,
4343 inst: Air.Inst.Index,
4344 data: anytype,
4345 operator: std.math.CompareOperator,
4346) !CValue {
4347 const pt = f.object.dg.pt;
4348 const zcu = pt.zcu;
4349 const lhs_ty = f.typeOf(data.lhs);
4350 const scalar_ty = lhs_ty.scalarType(zcu);
4351
4352 const scalar_bits = scalar_ty.bitSize(zcu);
4353 if (scalar_ty.isInt(zcu) and scalar_bits > 64)
4354 return airCmpBuiltinCall(
4355 f,
4356 inst,
4357 data,
4358 operator,
4359 .cmp,
4360 if (scalar_bits > 128) .bits else .none,
4361 );
4362 if (scalar_ty.isRuntimeFloat())
4363 return airCmpBuiltinCall(f, inst, data, operator, .operator, .none);
4364
4365 const inst_ty = f.typeOfIndex(inst);
4366 const lhs = try f.resolveInst(data.lhs);
4367 const rhs = try f.resolveInst(data.rhs);
4368 try reap(f, inst, &.{ data.lhs, data.rhs });
4369
4370 const rhs_ty = f.typeOf(data.rhs);
4371 const need_cast = lhs_ty.isSinglePointer(zcu) or rhs_ty.isSinglePointer(zcu);
4372 const w = &f.object.code.writer;
4373 const local = try f.allocLocal(inst, inst_ty);
4374 const v = try Vectorize.start(f, inst, w, lhs_ty);
4375 const a = try Assignment.start(f, w, try f.ctypeFromType(scalar_ty, .complete));
4376 try f.writeCValue(w, local, .Other);
4377 try v.elem(f, w);
4378 try a.assign(f, w);
4379 if (lhs != .undef and lhs.eql(rhs)) try w.writeAll(switch (operator) {
4380 .lt, .neq, .gt => "false",
4381 .lte, .eq, .gte => "true",
4382 }) else {
4383 if (need_cast) try w.writeAll("(void*)");
4384 try f.writeCValue(w, lhs, .Other);
4385 try v.elem(f, w);
4386 try w.writeAll(compareOperatorC(operator));
4387 if (need_cast) try w.writeAll("(void*)");
4388 try f.writeCValue(w, rhs, .Other);
4389 try v.elem(f, w);
4390 }
4391 try a.end(f, w);
4392 try v.end(f, inst, w);
4393
4394 return local;
4395}
4396
4397fn airEquality(
4398 f: *Function,
4399 inst: Air.Inst.Index,
4400 operator: std.math.CompareOperator,
4401) !CValue {
4402 const pt = f.object.dg.pt;
4403 const zcu = pt.zcu;
4404 const ctype_pool = &f.object.dg.ctype_pool;
4405 const bin_op = f.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
4406
4407 const operand_ty = f.typeOf(bin_op.lhs);
4408 const operand_bits = operand_ty.bitSize(zcu);
4409 if (operand_ty.isAbiInt(zcu) and operand_bits > 64)
4410 return airCmpBuiltinCall(
4411 f,
4412 inst,
4413 bin_op,
4414 operator,
4415 .cmp,
4416 if (operand_bits > 128) .bits else .none,
4417 );
4418 if (operand_ty.isRuntimeFloat())
4419 return airCmpBuiltinCall(f, inst, bin_op, operator, .operator, .none);
4420
4421 const lhs = try f.resolveInst(bin_op.lhs);
4422 const rhs = try f.resolveInst(bin_op.rhs);
4423 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
4424
4425 const w = &f.object.code.writer;
4426 const local = try f.allocLocal(inst, .bool);
4427 const a = try Assignment.start(f, w, .bool);
4428 try f.writeCValue(w, local, .Other);
4429 try a.assign(f, w);
4430
4431 const operand_ctype = try f.ctypeFromType(operand_ty, .complete);
4432 if (lhs != .undef and lhs.eql(rhs)) try w.writeAll(switch (operator) {
4433 .lt, .lte, .gte, .gt => unreachable,
4434 .neq => "false",
4435 .eq => "true",
4436 }) else switch (operand_ctype.info(ctype_pool)) {
4437 .basic, .pointer => {
4438 try f.writeCValue(w, lhs, .Other);
4439 try w.writeAll(compareOperatorC(operator));
4440 try f.writeCValue(w, rhs, .Other);
4441 },
4442 .aligned, .array, .vector, .fwd_decl, .function => unreachable,
4443 .aggregate => |aggregate| if (aggregate.fields.len == 2 and
4444 (aggregate.fields.at(0, ctype_pool).name.index == .is_null or
4445 aggregate.fields.at(1, ctype_pool).name.index == .is_null))
4446 {
4447 try f.writeCValueMember(w, lhs, .{ .identifier = "is_null" });
4448 try w.writeAll(" || ");
4449 try f.writeCValueMember(w, rhs, .{ .identifier = "is_null" });
4450 try w.writeAll(" ? ");
4451 try f.writeCValueMember(w, lhs, .{ .identifier = "is_null" });
4452 try w.writeAll(compareOperatorC(operator));
4453 try f.writeCValueMember(w, rhs, .{ .identifier = "is_null" });
4454 try w.writeAll(" : ");
4455 try f.writeCValueMember(w, lhs, .{ .identifier = "payload" });
4456 try w.writeAll(compareOperatorC(operator));
4457 try f.writeCValueMember(w, rhs, .{ .identifier = "payload" });
4458 } else for (0..aggregate.fields.len) |field_index| {
4459 if (field_index > 0) try w.writeAll(switch (operator) {
4460 .lt, .lte, .gte, .gt => unreachable,
4461 .eq => " && ",
4462 .neq => " || ",
4463 });
4464 const field_name: CValue = .{
4465 .ctype_pool_string = aggregate.fields.at(field_index, ctype_pool).name,
4466 };
4467 try f.writeCValueMember(w, lhs, field_name);
4468 try w.writeAll(compareOperatorC(operator));
4469 try f.writeCValueMember(w, rhs, field_name);
4470 },
4471 }
4472 try a.end(f, w);
4473
4474 return local;
4475}
4476
4477fn airCmpLtErrorsLen(f: *Function, inst: Air.Inst.Index) !CValue {
4478 const un_op = f.air.instructions.items(.data)[@intFromEnum(inst)].un_op;
4479
4480 const operand = try f.resolveInst(un_op);
4481 try reap(f, inst, &.{un_op});
4482
4483 const w = &f.object.code.writer;
4484 const local = try f.allocLocal(inst, .bool);
4485 try f.writeCValue(w, local, .Other);
4486 try w.writeAll(" = ");
4487 try f.writeCValue(w, operand, .Other);
4488 try w.print(" < sizeof({f}) / sizeof(*{0f});", .{fmtIdentSolo("zig_errorName")});
4489 try f.object.newline();
4490 return local;
4491}
4492
4493fn airPtrAddSub(f: *Function, inst: Air.Inst.Index, operator: u8) !CValue {
4494 const pt = f.object.dg.pt;
4495 const zcu = pt.zcu;
4496 const ty_pl = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
4497 const bin_op = f.air.extraData(Air.Bin, ty_pl.payload).data;
4498
4499 const lhs = try f.resolveInst(bin_op.lhs);
4500 const rhs = try f.resolveInst(bin_op.rhs);
4501 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
4502
4503 const inst_ty = f.typeOfIndex(inst);
4504 const inst_scalar_ty = inst_ty.scalarType(zcu);
4505 const elem_ty = inst_scalar_ty.elemType2(zcu);
4506 if (!elem_ty.hasRuntimeBitsIgnoreComptime(zcu)) return f.moveCValue(inst, inst_ty, lhs);
4507 const inst_scalar_ctype = try f.ctypeFromType(inst_scalar_ty, .complete);
4508
4509 const local = try f.allocLocal(inst, inst_ty);
4510 const w = &f.object.code.writer;
4511 const v = try Vectorize.start(f, inst, w, inst_ty);
4512 const a = try Assignment.start(f, w, inst_scalar_ctype);
4513 try f.writeCValue(w, local, .Other);
4514 try v.elem(f, w);
4515 try a.assign(f, w);
4516 // We must convert to and from integer types to prevent UB if the operation
4517 // results in a NULL pointer, or if LHS is NULL. The operation is only UB
4518 // if the result is NULL and then dereferenced.
4519 try w.writeByte('(');
4520 try f.renderCType(w, inst_scalar_ctype);
4521 try w.writeAll(")(((uintptr_t)");
4522 try f.writeCValue(w, lhs, .Other);
4523 try v.elem(f, w);
4524 try w.writeAll(") ");
4525 try w.writeByte(operator);
4526 try w.writeAll(" (");
4527 try f.writeCValue(w, rhs, .Other);
4528 try v.elem(f, w);
4529 try w.writeAll("*sizeof(");
4530 try f.renderType(w, elem_ty);
4531 try w.writeAll(")))");
4532 try a.end(f, w);
4533 try v.end(f, inst, w);
4534 return local;
4535}
4536
4537fn airMinMax(f: *Function, inst: Air.Inst.Index, operator: u8, operation: []const u8) !CValue {
4538 const pt = f.object.dg.pt;
4539 const zcu = pt.zcu;
4540 const bin_op = f.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
4541
4542 const inst_ty = f.typeOfIndex(inst);
4543 const inst_scalar_ty = inst_ty.scalarType(zcu);
4544
4545 if ((inst_scalar_ty.isInt(zcu) and inst_scalar_ty.bitSize(zcu) > 64) or inst_scalar_ty.isRuntimeFloat())
4546 return try airBinBuiltinCall(f, inst, operation, .none);
4547
4548 const lhs = try f.resolveInst(bin_op.lhs);
4549 const rhs = try f.resolveInst(bin_op.rhs);
4550 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
4551
4552 const w = &f.object.code.writer;
4553 const local = try f.allocLocal(inst, inst_ty);
4554 const v = try Vectorize.start(f, inst, w, inst_ty);
4555 try f.writeCValue(w, local, .Other);
4556 try v.elem(f, w);
4557 // (lhs <> rhs) ? lhs : rhs
4558 try w.writeAll(" = (");
4559 try f.writeCValue(w, lhs, .Other);
4560 try v.elem(f, w);
4561 try w.writeByte(' ');
4562 try w.writeByte(operator);
4563 try w.writeByte(' ');
4564 try f.writeCValue(w, rhs, .Other);
4565 try v.elem(f, w);
4566 try w.writeAll(") ? ");
4567 try f.writeCValue(w, lhs, .Other);
4568 try v.elem(f, w);
4569 try w.writeAll(" : ");
4570 try f.writeCValue(w, rhs, .Other);
4571 try v.elem(f, w);
4572 try w.writeByte(';');
4573 try f.object.newline();
4574 try v.end(f, inst, w);
4575
4576 return local;
4577}
4578
4579fn airSlice(f: *Function, inst: Air.Inst.Index) !CValue {
4580 const pt = f.object.dg.pt;
4581 const zcu = pt.zcu;
4582 const ty_pl = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
4583 const bin_op = f.air.extraData(Air.Bin, ty_pl.payload).data;
4584
4585 const ptr = try f.resolveInst(bin_op.lhs);
4586 const len = try f.resolveInst(bin_op.rhs);
4587 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
4588
4589 const inst_ty = f.typeOfIndex(inst);
4590 const ptr_ty = inst_ty.slicePtrFieldType(zcu);
4591
4592 const w = &f.object.code.writer;
4593 const local = try f.allocLocal(inst, inst_ty);
4594 {
4595 const a = try Assignment.start(f, w, try f.ctypeFromType(ptr_ty, .complete));
4596 try f.writeCValueMember(w, local, .{ .identifier = "ptr" });
4597 try a.assign(f, w);
4598 try f.writeCValue(w, ptr, .Other);
4599 try a.end(f, w);
4600 }
4601 {
4602 const a = try Assignment.start(f, w, .usize);
4603 try f.writeCValueMember(w, local, .{ .identifier = "len" });
4604 try a.assign(f, w);
4605 try f.writeCValue(w, len, .Other);
4606 try a.end(f, w);
4607 }
4608 return local;
4609}
4610
4611fn airCall(
4612 f: *Function,
4613 inst: Air.Inst.Index,
4614 modifier: std.builtin.CallModifier,
4615) !CValue {
4616 const pt = f.object.dg.pt;
4617 const zcu = pt.zcu;
4618 const ip = &zcu.intern_pool;
4619 // Not even allowed to call panic in a naked function.
4620 if (f.object.dg.is_naked_fn) return .none;
4621
4622 const gpa = f.object.dg.gpa;
4623 const w = &f.object.code.writer;
4624
4625 const pl_op = f.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
4626 const extra = f.air.extraData(Air.Call, pl_op.payload);
4627 const args: []const Air.Inst.Ref = @ptrCast(f.air.extra.items[extra.end..][0..extra.data.args_len]);
4628
4629 const resolved_args = try gpa.alloc(CValue, args.len);
4630 defer gpa.free(resolved_args);
4631 for (resolved_args, args) |*resolved_arg, arg| {
4632 const arg_ty = f.typeOf(arg);
4633 const arg_ctype = try f.ctypeFromType(arg_ty, .parameter);
4634 if (arg_ctype.index == .void) {
4635 resolved_arg.* = .none;
4636 continue;
4637 }
4638 resolved_arg.* = try f.resolveInst(arg);
4639 if (!arg_ctype.eql(try f.ctypeFromType(arg_ty, .complete))) {
4640 const array_local = try f.allocAlignedLocal(inst, .{
4641 .ctype = arg_ctype,
4642 .alignas = CType.AlignAs.fromAbiAlignment(arg_ty.abiAlignment(zcu)),
4643 });
4644 try w.writeAll("memcpy(");
4645 try f.writeCValueMember(w, array_local, .{ .identifier = "array" });
4646 try w.writeAll(", ");
4647 try f.writeCValue(w, resolved_arg.*, .FunctionArgument);
4648 try w.writeAll(", sizeof(");
4649 try f.renderCType(w, arg_ctype);
4650 try w.writeAll("));");
4651 try f.object.newline();
4652 resolved_arg.* = array_local;
4653 }
4654 }
4655
4656 const callee = try f.resolveInst(pl_op.operand);
4657
4658 {
4659 var bt = iterateBigTomb(f, inst);
4660 try bt.feed(pl_op.operand);
4661 for (args) |arg| try bt.feed(arg);
4662 }
4663
4664 const callee_ty = f.typeOf(pl_op.operand);
4665 const callee_is_ptr = switch (callee_ty.zigTypeTag(zcu)) {
4666 .@"fn" => false,
4667 .pointer => true,
4668 else => unreachable,
4669 };
4670 const fn_info = zcu.typeToFunc(if (callee_is_ptr) callee_ty.childType(zcu) else callee_ty).?;
4671 const ret_ty: Type = .fromInterned(fn_info.return_type);
4672 const ret_ctype: CType = if (ret_ty.isNoReturn(zcu))
4673 .void
4674 else
4675 try f.ctypeFromType(ret_ty, .parameter);
4676
4677 const result_local = result: {
4678 if (modifier == .always_tail) {
4679 try w.writeAll("zig_always_tail return ");
4680 break :result .none;
4681 } else if (ret_ctype.index == .void) {
4682 break :result .none;
4683 } else if (f.liveness.isUnused(inst)) {
4684 try w.writeByte('(');
4685 try f.renderCType(w, .void);
4686 try w.writeByte(')');
4687 break :result .none;
4688 } else {
4689 const local = try f.allocAlignedLocal(inst, .{
4690 .ctype = ret_ctype,
4691 .alignas = CType.AlignAs.fromAbiAlignment(ret_ty.abiAlignment(zcu)),
4692 });
4693 try f.writeCValue(w, local, .Other);
4694 try w.writeAll(" = ");
4695 break :result local;
4696 }
4697 };
4698
4699 callee: {
4700 known: {
4701 const callee_val = (try f.air.value(pl_op.operand, pt)) orelse break :known;
4702 const fn_nav, const need_cast = switch (ip.indexToKey(callee_val.toIntern())) {
4703 .@"extern" => |@"extern"| .{ @"extern".owner_nav, false },
4704 .func => |func| .{ func.owner_nav, Type.fromInterned(func.ty).fnCallingConvention(zcu) != .naked and
4705 Type.fromInterned(func.uncoerced_ty).fnCallingConvention(zcu) == .naked },
4706 .ptr => |ptr| if (ptr.byte_offset == 0) switch (ptr.base_addr) {
4707 .nav => |nav| .{ nav, Type.fromInterned(ptr.ty).childType(zcu).fnCallingConvention(zcu) != .naked and
4708 zcu.navValue(nav).typeOf(zcu).fnCallingConvention(zcu) == .naked },
4709 else => break :known,
4710 } else break :known,
4711 else => break :known,
4712 };
4713 if (need_cast) {
4714 try w.writeAll("((");
4715 try f.renderType(w, if (callee_is_ptr) callee_ty else try pt.singleConstPtrType(callee_ty));
4716 try w.writeByte(')');
4717 if (!callee_is_ptr) try w.writeByte('&');
4718 }
4719 switch (modifier) {
4720 .auto, .always_tail => try f.object.dg.renderNavName(w, fn_nav),
4721 inline .never_tail, .never_inline => |m| try w.writeAll(try f.getLazyFnName(@unionInit(LazyFnKey, @tagName(m), fn_nav))),
4722 else => unreachable,
4723 }
4724 if (need_cast) try w.writeByte(')');
4725 break :callee;
4726 }
4727 switch (modifier) {
4728 .auto, .always_tail => {},
4729 .never_tail => return f.fail("CBE: runtime callee with never_tail attribute unsupported", .{}),
4730 .never_inline => return f.fail("CBE: runtime callee with never_inline attribute unsupported", .{}),
4731 else => unreachable,
4732 }
4733 // Fall back to function pointer call.
4734 try f.writeCValue(w, callee, .Other);
4735 }
4736
4737 try w.writeByte('(');
4738 var need_comma = false;
4739 for (resolved_args) |resolved_arg| {
4740 if (resolved_arg == .none) continue;
4741 if (need_comma) try w.writeAll(", ");
4742 need_comma = true;
4743 try f.writeCValue(w, resolved_arg, .FunctionArgument);
4744 try f.freeCValue(inst, resolved_arg);
4745 }
4746 try w.writeAll(");");
4747 switch (modifier) {
4748 .always_tail => try w.writeByte('\n'),
4749 else => try f.object.newline(),
4750 }
4751
4752 const result = result: {
4753 if (result_local == .none or !lowersToArray(ret_ty, zcu))
4754 break :result result_local;
4755
4756 const array_local = try f.allocLocal(inst, ret_ty);
4757 try w.writeAll("memcpy(");
4758 try f.writeCValue(w, array_local, .FunctionArgument);
4759 try w.writeAll(", ");
4760 try f.writeCValueMember(w, result_local, .{ .identifier = "array" });
4761 try w.writeAll(", sizeof(");
4762 try f.renderType(w, ret_ty);
4763 try w.writeAll("));");
4764 try f.object.newline();
4765 try freeLocal(f, inst, result_local.new_local, null);
4766 break :result array_local;
4767 };
4768
4769 return result;
4770}
4771
4772fn airDbgStmt(f: *Function, inst: Air.Inst.Index) !CValue {
4773 const dbg_stmt = f.air.instructions.items(.data)[@intFromEnum(inst)].dbg_stmt;
4774 const w = &f.object.code.writer;
4775 // TODO re-evaluate whether to emit these or not. If we naively emit
4776 // these directives, the output file will report bogus line numbers because
4777 // every newline after the #line directive adds one to the line.
4778 // We also don't print the filename yet, so the output is strictly unhelpful.
4779 // If we wanted to go this route, we would need to go all the way and not output
4780 // newlines until the next dbg_stmt occurs.
4781 // Perhaps an additional compilation option is in order?
4782 //try w.print("#line {d}", .{dbg_stmt.line + 1});
4783 //try f.object.newline();
4784 try w.print("/* file:{d}:{d} */", .{ dbg_stmt.line + 1, dbg_stmt.column + 1 });
4785 try f.object.newline();
4786 return .none;
4787}
4788
4789fn airDbgEmptyStmt(f: *Function, _: Air.Inst.Index) !CValue {
4790 try f.object.code.writer.writeAll("(void)0;");
4791 try f.object.newline();
4792 return .none;
4793}
4794
4795fn airDbgInlineBlock(f: *Function, inst: Air.Inst.Index) !CValue {
4796 const pt = f.object.dg.pt;
4797 const zcu = pt.zcu;
4798 const ip = &zcu.intern_pool;
4799 const ty_pl = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
4800 const extra = f.air.extraData(Air.DbgInlineBlock, ty_pl.payload);
4801 const owner_nav = ip.getNav(zcu.funcInfo(extra.data.func).owner_nav);
4802 const w = &f.object.code.writer;
4803 try w.print("/* inline:{f} */", .{owner_nav.fqn.fmt(&zcu.intern_pool)});
4804 try f.object.newline();
4805 return lowerBlock(f, inst, @ptrCast(f.air.extra.items[extra.end..][0..extra.data.body_len]));
4806}
4807
4808fn airDbgVar(f: *Function, inst: Air.Inst.Index) !CValue {
4809 const pt = f.object.dg.pt;
4810 const zcu = pt.zcu;
4811 const tag = f.air.instructions.items(.tag)[@intFromEnum(inst)];
4812 const pl_op = f.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
4813 const name: Air.NullTerminatedString = @enumFromInt(pl_op.payload);
4814 const operand_is_undef = if (try f.air.value(pl_op.operand, pt)) |v| v.isUndef(zcu) else false;
4815 if (!operand_is_undef) _ = try f.resolveInst(pl_op.operand);
4816
4817 try reap(f, inst, &.{pl_op.operand});
4818 const w = &f.object.code.writer;
4819 try w.print("/* {s}:{s} */", .{ @tagName(tag), name.toSlice(f.air) });
4820 try f.object.newline();
4821 return .none;
4822}
4823
4824fn airBlock(f: *Function, inst: Air.Inst.Index) !CValue {
4825 const ty_pl = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
4826 const extra = f.air.extraData(Air.Block, ty_pl.payload);
4827 return lowerBlock(f, inst, @ptrCast(f.air.extra.items[extra.end..][0..extra.data.body_len]));
4828}
4829
4830fn lowerBlock(f: *Function, inst: Air.Inst.Index, body: []const Air.Inst.Index) !CValue {
4831 const pt = f.object.dg.pt;
4832 const zcu = pt.zcu;
4833 const liveness_block = f.liveness.getBlock(inst);
4834
4835 const block_id = f.next_block_index;
4836 f.next_block_index += 1;
4837 const w = &f.object.code.writer;
4838
4839 const inst_ty = f.typeOfIndex(inst);
4840 const result = if (inst_ty.hasRuntimeBitsIgnoreComptime(zcu) and !f.liveness.isUnused(inst))
4841 try f.allocLocal(inst, inst_ty)
4842 else
4843 .none;
4844
4845 try f.blocks.putNoClobber(f.object.dg.gpa, inst, .{
4846 .block_id = block_id,
4847 .result = result,
4848 });
4849
4850 try genBodyResolveState(f, inst, &.{}, body, true);
4851
4852 assert(f.blocks.remove(inst));
4853
4854 // The body might result in some values we had beforehand being killed
4855 for (liveness_block.deaths) |death| {
4856 try die(f, inst, death.toRef());
4857 }
4858
4859 // noreturn blocks have no `br` instructions reaching them, so we don't want a label
4860 if (f.object.dg.is_naked_fn) {
4861 if (f.object.dg.expected_block) |expected_block| {
4862 if (block_id != expected_block)
4863 return f.fail("runtime code not allowed in naked function", .{});
4864 f.object.dg.expected_block = null;
4865 }
4866 } else if (!f.typeOfIndex(inst).isNoReturn(zcu)) {
4867 // label must be followed by an expression, include an empty one.
4868 try w.print("\nzig_block_{d}:;", .{block_id});
4869 try f.object.newline();
4870 }
4871
4872 return result;
4873}
4874
4875fn airTry(f: *Function, inst: Air.Inst.Index) !CValue {
4876 const pl_op = f.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
4877 const extra = f.air.extraData(Air.Try, pl_op.payload);
4878 const body: []const Air.Inst.Index = @ptrCast(f.air.extra.items[extra.end..][0..extra.data.body_len]);
4879 const err_union_ty = f.typeOf(pl_op.operand);
4880 return lowerTry(f, inst, pl_op.operand, body, err_union_ty, false);
4881}
4882
4883fn airTryPtr(f: *Function, inst: Air.Inst.Index) !CValue {
4884 const pt = f.object.dg.pt;
4885 const zcu = pt.zcu;
4886 const ty_pl = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
4887 const extra = f.air.extraData(Air.TryPtr, ty_pl.payload);
4888 const body: []const Air.Inst.Index = @ptrCast(f.air.extra.items[extra.end..][0..extra.data.body_len]);
4889 const err_union_ty = f.typeOf(extra.data.ptr).childType(zcu);
4890 return lowerTry(f, inst, extra.data.ptr, body, err_union_ty, true);
4891}
4892
4893fn lowerTry(
4894 f: *Function,
4895 inst: Air.Inst.Index,
4896 operand: Air.Inst.Ref,
4897 body: []const Air.Inst.Index,
4898 err_union_ty: Type,
4899 is_ptr: bool,
4900) !CValue {
4901 const pt = f.object.dg.pt;
4902 const zcu = pt.zcu;
4903 const err_union = try f.resolveInst(operand);
4904 const inst_ty = f.typeOfIndex(inst);
4905 const liveness_condbr = f.liveness.getCondBr(inst);
4906 const w = &f.object.code.writer;
4907 const payload_ty = err_union_ty.errorUnionPayload(zcu);
4908 const payload_has_bits = payload_ty.hasRuntimeBitsIgnoreComptime(zcu);
4909
4910 if (!err_union_ty.errorUnionSet(zcu).errorSetIsEmpty(zcu)) {
4911 try w.writeAll("if (");
4912 if (!payload_has_bits) {
4913 if (is_ptr)
4914 try f.writeCValueDeref(w, err_union)
4915 else
4916 try f.writeCValue(w, err_union, .Other);
4917 } else {
4918 // Reap the operand so that it can be reused inside genBody.
4919 // Remember we must avoid calling reap() twice for the same operand
4920 // in this function.
4921 try reap(f, inst, &.{operand});
4922 if (is_ptr)
4923 try f.writeCValueDerefMember(w, err_union, .{ .identifier = "error" })
4924 else
4925 try f.writeCValueMember(w, err_union, .{ .identifier = "error" });
4926 }
4927 try w.writeAll(") ");
4928
4929 try genBodyResolveState(f, inst, liveness_condbr.else_deaths, body, false);
4930 try f.object.newline();
4931 if (f.object.dg.expected_block) |_|
4932 return f.fail("runtime code not allowed in naked function", .{});
4933 }
4934
4935 // Now we have the "then branch" (in terms of the liveness data); process any deaths.
4936 for (liveness_condbr.then_deaths) |death| {
4937 try die(f, inst, death.toRef());
4938 }
4939
4940 if (!payload_has_bits) {
4941 if (!is_ptr) {
4942 return .none;
4943 } else {
4944 return err_union;
4945 }
4946 }
4947
4948 try reap(f, inst, &.{operand});
4949
4950 if (f.liveness.isUnused(inst)) return .none;
4951
4952 const local = try f.allocLocal(inst, inst_ty);
4953 const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete));
4954 try f.writeCValue(w, local, .Other);
4955 try a.assign(f, w);
4956 if (is_ptr) {
4957 try w.writeByte('&');
4958 try f.writeCValueDerefMember(w, err_union, .{ .identifier = "payload" });
4959 } else try f.writeCValueMember(w, err_union, .{ .identifier = "payload" });
4960 try a.end(f, w);
4961 return local;
4962}
4963
4964fn airBr(f: *Function, inst: Air.Inst.Index) !void {
4965 const branch = f.air.instructions.items(.data)[@intFromEnum(inst)].br;
4966 const block = f.blocks.get(branch.block_inst).?;
4967 const result = block.result;
4968 const w = &f.object.code.writer;
4969
4970 if (f.object.dg.is_naked_fn) {
4971 if (result != .none) return f.fail("runtime code not allowed in naked function", .{});
4972 f.object.dg.expected_block = block.block_id;
4973 return;
4974 }
4975
4976 // If result is .none then the value of the block is unused.
4977 if (result != .none) {
4978 const operand_ty = f.typeOf(branch.operand);
4979 const operand = try f.resolveInst(branch.operand);
4980 try reap(f, inst, &.{branch.operand});
4981
4982 const a = try Assignment.start(f, w, try f.ctypeFromType(operand_ty, .complete));
4983 try f.writeCValue(w, result, .Other);
4984 try a.assign(f, w);
4985 try f.writeCValue(w, operand, .Other);
4986 try a.end(f, w);
4987 }
4988
4989 try w.print("goto zig_block_{d};\n", .{block.block_id});
4990}
4991
4992fn airRepeat(f: *Function, inst: Air.Inst.Index) !void {
4993 const repeat = f.air.instructions.items(.data)[@intFromEnum(inst)].repeat;
4994 try f.object.code.writer.print("goto zig_loop_{d};\n", .{@intFromEnum(repeat.loop_inst)});
4995}
4996
4997fn airSwitchDispatch(f: *Function, inst: Air.Inst.Index) !void {
4998 const pt = f.object.dg.pt;
4999 const zcu = pt.zcu;
5000 const br = f.air.instructions.items(.data)[@intFromEnum(inst)].br;
5001 const w = &f.object.code.writer;
5002
5003 if (try f.air.value(br.operand, pt)) |cond_val| {
5004 // Comptime-known dispatch. Iterate the cases to find the correct
5005 // one, and branch directly to the corresponding case.
5006 const switch_br = f.air.unwrapSwitch(br.block_inst);
5007 var it = switch_br.iterateCases();
5008 const target_case_idx: u32 = target: while (it.next()) |case| {
5009 for (case.items) |item| {
5010 const val = Value.fromInterned(item.toInterned().?);
5011 if (cond_val.compareHetero(.eq, val, zcu)) break :target case.idx;
5012 }
5013 for (case.ranges) |range| {
5014 const low = Value.fromInterned(range[0].toInterned().?);
5015 const high = Value.fromInterned(range[1].toInterned().?);
5016 if (cond_val.compareHetero(.gte, low, zcu) and
5017 cond_val.compareHetero(.lte, high, zcu))
5018 {
5019 break :target case.idx;
5020 }
5021 }
5022 } else switch_br.cases_len;
5023 try w.print("goto zig_switch_{d}_dispatch_{d};\n", .{ @intFromEnum(br.block_inst), target_case_idx });
5024 return;
5025 }
5026
5027 // Runtime-known dispatch. Set the switch condition, and branch back.
5028 const cond = try f.resolveInst(br.operand);
5029 const cond_local = f.loop_switch_conds.get(br.block_inst).?;
5030 try f.writeCValue(w, .{ .local = cond_local }, .Other);
5031 try w.writeAll(" = ");
5032 try f.writeCValue(w, cond, .Other);
5033 try w.writeByte(';');
5034 try f.object.newline();
5035 try w.print("goto zig_switch_{d}_loop;\n", .{@intFromEnum(br.block_inst)});
5036}
5037
5038fn airBitcast(f: *Function, inst: Air.Inst.Index) !CValue {
5039 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
5040 const inst_ty = f.typeOfIndex(inst);
5041
5042 const operand = try f.resolveInst(ty_op.operand);
5043 const operand_ty = f.typeOf(ty_op.operand);
5044
5045 const bitcasted = try bitcast(f, inst_ty, operand, operand_ty);
5046 try reap(f, inst, &.{ty_op.operand});
5047 return f.moveCValue(inst, inst_ty, bitcasted);
5048}
5049
5050fn bitcast(f: *Function, dest_ty: Type, operand: CValue, operand_ty: Type) !CValue {
5051 const pt = f.object.dg.pt;
5052 const zcu = pt.zcu;
5053 const target = &f.object.dg.mod.resolved_target.result;
5054 const ctype_pool = &f.object.dg.ctype_pool;
5055 const w = &f.object.code.writer;
5056
5057 if (operand_ty.isAbiInt(zcu) and dest_ty.isAbiInt(zcu)) {
5058 const src_info = dest_ty.intInfo(zcu);
5059 const dest_info = operand_ty.intInfo(zcu);
5060 if (src_info.signedness == dest_info.signedness and
5061 src_info.bits == dest_info.bits) return operand;
5062 }
5063
5064 if (dest_ty.isPtrAtRuntime(zcu) or operand_ty.isPtrAtRuntime(zcu)) {
5065 const local = try f.allocLocal(null, dest_ty);
5066 try f.writeCValue(w, local, .Other);
5067 try w.writeAll(" = (");
5068 try f.renderType(w, dest_ty);
5069 try w.writeByte(')');
5070 try f.writeCValue(w, operand, .Other);
5071 try w.writeByte(';');
5072 try f.object.newline();
5073 return local;
5074 }
5075
5076 const operand_lval = if (operand == .constant) blk: {
5077 const operand_local = try f.allocLocal(null, operand_ty);
5078 try f.writeCValue(w, operand_local, .Other);
5079 try w.writeAll(" = ");
5080 try f.writeCValue(w, operand, .Other);
5081 try w.writeByte(';');
5082 try f.object.newline();
5083 break :blk operand_local;
5084 } else operand;
5085
5086 const local = try f.allocLocal(null, dest_ty);
5087 // On big-endian targets, copying ABI integers with padding bits is awkward, because the padding bits are at the low bytes of the value.
5088 // We need to offset the source or destination pointer appropriately and copy the right number of bytes.
5089 if (target.cpu.arch.endian() == .big and dest_ty.isAbiInt(zcu) and !operand_ty.isAbiInt(zcu)) {
5090 // e.g. [10]u8 -> u80. We need to offset the destination so that we copy to the least significant bits of the integer.
5091 const offset = dest_ty.abiSize(zcu) - operand_ty.abiSize(zcu);
5092 try w.writeAll("memcpy((char *)&");
5093 try f.writeCValue(w, local, .Other);
5094 try w.print(" + {d}, &", .{offset});
5095 try f.writeCValue(w, operand_lval, .Other);
5096 try w.print(", {d});", .{operand_ty.abiSize(zcu)});
5097 } else if (target.cpu.arch.endian() == .big and operand_ty.isAbiInt(zcu) and !dest_ty.isAbiInt(zcu)) {
5098 // e.g. u80 -> [10]u8. We need to offset the source so that we copy from the least significant bits of the integer.
5099 const offset = operand_ty.abiSize(zcu) - dest_ty.abiSize(zcu);
5100 try w.writeAll("memcpy(&");
5101 try f.writeCValue(w, local, .Other);
5102 try w.writeAll(", (const char *)&");
5103 try f.writeCValue(w, operand_lval, .Other);
5104 try w.print(" + {d}, {d});", .{ offset, dest_ty.abiSize(zcu) });
5105 } else {
5106 try w.writeAll("memcpy(&");
5107 try f.writeCValue(w, local, .Other);
5108 try w.writeAll(", &");
5109 try f.writeCValue(w, operand_lval, .Other);
5110 try w.print(", {d});", .{@min(dest_ty.abiSize(zcu), operand_ty.abiSize(zcu))});
5111 }
5112
5113 try f.object.newline();
5114
5115 // Ensure padding bits have the expected value.
5116 if (dest_ty.isAbiInt(zcu)) {
5117 const dest_ctype = try f.ctypeFromType(dest_ty, .complete);
5118 const dest_info = dest_ty.intInfo(zcu);
5119 var bits: u16 = dest_info.bits;
5120 var wrap_ctype: ?CType = null;
5121 var need_bitcasts = false;
5122
5123 try f.writeCValue(w, local, .Other);
5124 switch (dest_ctype.info(ctype_pool)) {
5125 else => {},
5126 .array => |array_info| {
5127 try w.print("[{d}]", .{switch (target.cpu.arch.endian()) {
5128 .little => array_info.len - 1,
5129 .big => 0,
5130 }});
5131 wrap_ctype = array_info.elem_ctype.toSignedness(dest_info.signedness);
5132 need_bitcasts = wrap_ctype.?.index == .zig_i128;
5133 bits -= 1;
5134 bits %= @as(u16, @intCast(f.byteSize(array_info.elem_ctype) * 8));
5135 bits += 1;
5136 },
5137 }
5138 try w.writeAll(" = ");
5139 if (need_bitcasts) {
5140 try w.writeAll("zig_bitCast_");
5141 try f.object.dg.renderCTypeForBuiltinFnName(w, wrap_ctype.?.toUnsigned());
5142 try w.writeByte('(');
5143 }
5144 try w.writeAll("zig_wrap_");
5145 const info_ty = try pt.intType(dest_info.signedness, bits);
5146 if (wrap_ctype) |ctype|
5147 try f.object.dg.renderCTypeForBuiltinFnName(w, ctype)
5148 else
5149 try f.object.dg.renderTypeForBuiltinFnName(w, info_ty);
5150 try w.writeByte('(');
5151 if (need_bitcasts) {
5152 try w.writeAll("zig_bitCast_");
5153 try f.object.dg.renderCTypeForBuiltinFnName(w, wrap_ctype.?);
5154 try w.writeByte('(');
5155 }
5156 try f.writeCValue(w, local, .Other);
5157 switch (dest_ctype.info(ctype_pool)) {
5158 else => {},
5159 .array => |array_info| try w.print("[{d}]", .{
5160 switch (target.cpu.arch.endian()) {
5161 .little => array_info.len - 1,
5162 .big => 0,
5163 },
5164 }),
5165 }
5166 if (need_bitcasts) try w.writeByte(')');
5167 try f.object.dg.renderBuiltinInfo(w, info_ty, .bits);
5168 if (need_bitcasts) try w.writeByte(')');
5169 try w.writeAll(");");
5170 try f.object.newline();
5171 }
5172
5173 try f.freeCValue(null, operand_lval);
5174 return local;
5175}
5176
5177fn airTrap(f: *Function, w: *Writer) !void {
5178 // Not even allowed to call trap in a naked function.
5179 if (f.object.dg.is_naked_fn) return;
5180 try w.writeAll("zig_trap();\n");
5181}
5182
5183fn airBreakpoint(f: *Function) !CValue {
5184 const w = &f.object.code.writer;
5185 try w.writeAll("zig_breakpoint();");
5186 try f.object.newline();
5187 return .none;
5188}
5189
5190fn airRetAddr(f: *Function, inst: Air.Inst.Index) !CValue {
5191 const w = &f.object.code.writer;
5192 const local = try f.allocLocal(inst, .usize);
5193 try f.writeCValue(w, local, .Other);
5194 try w.writeAll(" = (");
5195 try f.renderType(w, .usize);
5196 try w.writeAll(")zig_return_address();");
5197 try f.object.newline();
5198 return local;
5199}
5200
5201fn airFrameAddress(f: *Function, inst: Air.Inst.Index) !CValue {
5202 const w = &f.object.code.writer;
5203 const local = try f.allocLocal(inst, .usize);
5204 try f.writeCValue(w, local, .Other);
5205 try w.writeAll(" = (");
5206 try f.renderType(w, .usize);
5207 try w.writeAll(")zig_frame_address();");
5208 try f.object.newline();
5209 return local;
5210}
5211
5212fn airUnreach(o: *Object) !void {
5213 // Not even allowed to call unreachable in a naked function.
5214 if (o.dg.is_naked_fn) return;
5215 try o.code.writer.writeAll("zig_unreachable();\n");
5216}
5217
5218fn airLoop(f: *Function, inst: Air.Inst.Index) !void {
5219 const ty_pl = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
5220 const loop = f.air.extraData(Air.Block, ty_pl.payload);
5221 const body: []const Air.Inst.Index = @ptrCast(f.air.extra.items[loop.end..][0..loop.data.body_len]);
5222 const w = &f.object.code.writer;
5223
5224 // `repeat` instructions matching this loop will branch to
5225 // this label. Since we need a label for arbitrary `repeat`
5226 // anyway, there's actually no need to use a "real" looping
5227 // construct at all!
5228 try w.print("zig_loop_{d}:", .{@intFromEnum(inst)});
5229 try f.object.newline();
5230 try genBodyInner(f, body); // no need to restore state, we're noreturn
5231}
5232
5233fn airCondBr(f: *Function, inst: Air.Inst.Index) !void {
5234 const pl_op = f.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
5235 const cond = try f.resolveInst(pl_op.operand);
5236 try reap(f, inst, &.{pl_op.operand});
5237 const extra = f.air.extraData(Air.CondBr, pl_op.payload);
5238 const then_body: []const Air.Inst.Index = @ptrCast(f.air.extra.items[extra.end..][0..extra.data.then_body_len]);
5239 const else_body: []const Air.Inst.Index = @ptrCast(f.air.extra.items[extra.end + then_body.len ..][0..extra.data.else_body_len]);
5240 const liveness_condbr = f.liveness.getCondBr(inst);
5241 const w = &f.object.code.writer;
5242
5243 try w.writeAll("if (");
5244 try f.writeCValue(w, cond, .Other);
5245 try w.writeAll(") ");
5246
5247 try genBodyResolveState(f, inst, liveness_condbr.then_deaths, then_body, false);
5248 try f.object.newline();
5249 if (else_body.len > 0) if (f.object.dg.expected_block) |_|
5250 return f.fail("runtime code not allowed in naked function", .{});
5251
5252 // We don't need to use `genBodyResolveState` for the else block, because this instruction is
5253 // noreturn so must terminate a body, therefore we don't need to leave `value_map` or
5254 // `free_locals_map` well defined (our parent is responsible for doing that).
5255
5256 for (liveness_condbr.else_deaths) |death| {
5257 try die(f, inst, death.toRef());
5258 }
5259
5260 // We never actually need an else block, because our branches are noreturn so must (for
5261 // instance) `br` to a block (label).
5262
5263 try genBodyInner(f, else_body);
5264}
5265
5266fn airSwitchBr(f: *Function, inst: Air.Inst.Index, is_dispatch_loop: bool) !void {
5267 const pt = f.object.dg.pt;
5268 const zcu = pt.zcu;
5269 const gpa = f.object.dg.gpa;
5270 const switch_br = f.air.unwrapSwitch(inst);
5271 const init_condition = try f.resolveInst(switch_br.operand);
5272 try reap(f, inst, &.{switch_br.operand});
5273 const condition_ty = f.typeOf(switch_br.operand);
5274 const w = &f.object.code.writer;
5275
5276 // For dispatches, we will create a local alloc to contain the condition value.
5277 // This may not result in optimal codegen for switch loops, but it minimizes the
5278 // amount of C code we generate, which is probably more desirable here (and is simpler).
5279 const condition = if (is_dispatch_loop) cond: {
5280 const new_local = try f.allocLocal(inst, condition_ty);
5281 try f.copyCValue(try f.ctypeFromType(condition_ty, .complete), new_local, init_condition);
5282 try w.print("zig_switch_{d}_loop:", .{@intFromEnum(inst)});
5283 try f.object.newline();
5284 try f.loop_switch_conds.put(gpa, inst, new_local.new_local);
5285 break :cond new_local;
5286 } else init_condition;
5287
5288 defer if (is_dispatch_loop) {
5289 assert(f.loop_switch_conds.remove(inst));
5290 };
5291
5292 try w.writeAll("switch (");
5293
5294 const lowered_condition_ty: Type = if (condition_ty.toIntern() == .bool_type)
5295 .u1
5296 else if (condition_ty.isPtrAtRuntime(zcu))
5297 .usize
5298 else
5299 condition_ty;
5300 if (condition_ty.toIntern() != lowered_condition_ty.toIntern()) {
5301 try w.writeByte('(');
5302 try f.renderType(w, lowered_condition_ty);
5303 try w.writeByte(')');
5304 }
5305 try f.writeCValue(w, condition, .Other);
5306 try w.writeAll(") {");
5307 f.object.indent();
5308
5309 const liveness = try f.liveness.getSwitchBr(gpa, inst, switch_br.cases_len + 1);
5310 defer gpa.free(liveness.deaths);
5311
5312 var any_range_cases = false;
5313 var it = switch_br.iterateCases();
5314 while (it.next()) |case| {
5315 if (case.ranges.len > 0) {
5316 any_range_cases = true;
5317 continue;
5318 }
5319 for (case.items) |item| {
5320 try f.object.newline();
5321 try w.writeAll("case ");
5322 const item_value = try f.air.value(item, pt);
5323 // If `item_value` is a pointer with a known integer address, print the address
5324 // with no cast to avoid a warning.
5325 write_val: {
5326 if (condition_ty.isPtrAtRuntime(zcu)) {
5327 if (item_value.?.getUnsignedInt(zcu)) |item_int| {
5328 try w.print("{f}", .{try f.fmtIntLiteralDec(try pt.intValue(lowered_condition_ty, item_int))});
5329 break :write_val;
5330 }
5331 }
5332 if (condition_ty.isPtrAtRuntime(zcu)) {
5333 try w.writeByte('(');
5334 try f.renderType(w, .usize);
5335 try w.writeByte(')');
5336 }
5337 try f.object.dg.renderValue(w, (try f.air.value(item, pt)).?, .Other);
5338 }
5339 try w.writeByte(':');
5340 }
5341 try w.writeAll(" {");
5342 f.object.indent();
5343 try f.object.newline();
5344 if (is_dispatch_loop) {
5345 try w.print("zig_switch_{d}_dispatch_{d}:;", .{ @intFromEnum(inst), case.idx });
5346 try f.object.newline();
5347 }
5348 try genBodyResolveState(f, inst, liveness.deaths[case.idx], case.body, true);
5349 try f.object.outdent();
5350 try w.writeByte('}');
5351 if (f.object.dg.expected_block) |_|
5352 return f.fail("runtime code not allowed in naked function", .{});
5353
5354 // The case body must be noreturn so we don't need to insert a break.
5355 }
5356
5357 const else_body = it.elseBody();
5358 try f.object.newline();
5359
5360 try w.writeAll("default: ");
5361 if (any_range_cases) {
5362 // We will iterate the cases again to handle those with ranges, and generate
5363 // code using conditions rather than switch cases for such cases.
5364 it = switch_br.iterateCases();
5365 while (it.next()) |case| {
5366 if (case.ranges.len == 0) continue; // handled above
5367
5368 try w.writeAll("if (");
5369 for (case.items, 0..) |item, item_i| {
5370 if (item_i != 0) try w.writeAll(" || ");
5371 try f.writeCValue(w, condition, .Other);
5372 try w.writeAll(" == ");
5373 try f.object.dg.renderValue(w, (try f.air.value(item, pt)).?, .Other);
5374 }
5375 for (case.ranges, 0..) |range, range_i| {
5376 if (case.items.len != 0 or range_i != 0) try w.writeAll(" || ");
5377 // "(x >= lower && x <= upper)"
5378 try w.writeByte('(');
5379 try f.writeCValue(w, condition, .Other);
5380 try w.writeAll(" >= ");
5381 try f.object.dg.renderValue(w, (try f.air.value(range[0], pt)).?, .Other);
5382 try w.writeAll(" && ");
5383 try f.writeCValue(w, condition, .Other);
5384 try w.writeAll(" <= ");
5385 try f.object.dg.renderValue(w, (try f.air.value(range[1], pt)).?, .Other);
5386 try w.writeByte(')');
5387 }
5388 try w.writeAll(") {");
5389 f.object.indent();
5390 try f.object.newline();
5391 if (is_dispatch_loop) {
5392 try w.print("zig_switch_{d}_dispatch_{d}: ", .{ @intFromEnum(inst), case.idx });
5393 }
5394 try genBodyResolveState(f, inst, liveness.deaths[case.idx], case.body, true);
5395 try f.object.outdent();
5396 try w.writeByte('}');
5397 if (f.object.dg.expected_block) |_|
5398 return f.fail("runtime code not allowed in naked function", .{});
5399 }
5400 }
5401 if (is_dispatch_loop) {
5402 try w.print("zig_switch_{d}_dispatch_{d}: ", .{ @intFromEnum(inst), switch_br.cases_len });
5403 }
5404 if (else_body.len > 0) {
5405 // Note that this must be the last case, so we do not need to use `genBodyResolveState` since
5406 // the parent block will do it (because the case body is noreturn).
5407 for (liveness.deaths[liveness.deaths.len - 1]) |death| {
5408 try die(f, inst, death.toRef());
5409 }
5410 try genBody(f, else_body);
5411 if (f.object.dg.expected_block) |_|
5412 return f.fail("runtime code not allowed in naked function", .{});
5413 } else try airUnreach(&f.object);
5414 try f.object.newline();
5415 try f.object.outdent();
5416 try w.writeAll("}\n");
5417}
5418
5419fn asmInputNeedsLocal(f: *Function, constraint: []const u8, value: CValue) bool {
5420 const dg = f.object.dg;
5421 const target = &dg.mod.resolved_target.result;
5422 return switch (constraint[0]) {
5423 '{' => true,
5424 'i', 'r' => false,
5425 'I' => !target.cpu.arch.isArm(),
5426 else => switch (value) {
5427 .constant => |val| switch (dg.pt.zcu.intern_pool.indexToKey(val.toIntern())) {
5428 .ptr => |ptr| if (ptr.byte_offset == 0) switch (ptr.base_addr) {
5429 .nav => false,
5430 else => true,
5431 } else true,
5432 else => true,
5433 },
5434 else => false,
5435 },
5436 };
5437}
5438
5439fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
5440 const pt = f.object.dg.pt;
5441 const zcu = pt.zcu;
5442 const ty_pl = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
5443 const extra = f.air.extraData(Air.Asm, ty_pl.payload);
5444 const is_volatile = extra.data.flags.is_volatile;
5445 const outputs_len = extra.data.flags.outputs_len;
5446 const gpa = f.object.dg.gpa;
5447 var extra_i: usize = extra.end;
5448 const outputs: []const Air.Inst.Ref = @ptrCast(f.air.extra.items[extra_i..][0..outputs_len]);
5449 extra_i += outputs.len;
5450 const inputs: []const Air.Inst.Ref = @ptrCast(f.air.extra.items[extra_i..][0..extra.data.inputs_len]);
5451 extra_i += inputs.len;
5452
5453 const result = result: {
5454 const w = &f.object.code.writer;
5455 const inst_ty = f.typeOfIndex(inst);
5456 const inst_local = if (inst_ty.hasRuntimeBitsIgnoreComptime(zcu)) local: {
5457 const inst_local = try f.allocLocalValue(.{
5458 .ctype = try f.ctypeFromType(inst_ty, .complete),
5459 .alignas = CType.AlignAs.fromAbiAlignment(inst_ty.abiAlignment(zcu)),
5460 });
5461 if (f.wantSafety()) {
5462 try f.writeCValue(w, inst_local, .Other);
5463 try w.writeAll(" = ");
5464 try f.writeCValue(w, .{ .undef = inst_ty }, .Other);
5465 try w.writeByte(';');
5466 try f.object.newline();
5467 }
5468 break :local inst_local;
5469 } else .none;
5470
5471 const locals_begin: LocalIndex = @intCast(f.locals.items.len);
5472 const constraints_extra_begin = extra_i;
5473 for (outputs) |output| {
5474 const extra_bytes = mem.sliceAsBytes(f.air.extra.items[extra_i..]);
5475 const constraint = mem.sliceTo(extra_bytes, 0);
5476 const name = mem.sliceTo(extra_bytes[constraint.len + 1 ..], 0);
5477 // This equation accounts for the fact that even if we have exactly 4 bytes
5478 // for the string, we still use the next u32 for the null terminator.
5479 extra_i += (constraint.len + name.len + (2 + 3)) / 4;
5480
5481 if (constraint.len < 2 or constraint[0] != '=' or
5482 (constraint[1] == '{' and constraint[constraint.len - 1] != '}'))
5483 {
5484 return f.fail("CBE: constraint not supported: '{s}'", .{constraint});
5485 }
5486
5487 const is_reg = constraint[1] == '{';
5488 if (is_reg) {
5489 const output_ty = if (output == .none) inst_ty else f.typeOf(output).childType(zcu);
5490 try w.writeAll("register ");
5491 const output_local = try f.allocLocalValue(.{
5492 .ctype = try f.ctypeFromType(output_ty, .complete),
5493 .alignas = CType.AlignAs.fromAbiAlignment(output_ty.abiAlignment(zcu)),
5494 });
5495 try f.allocs.put(gpa, output_local.new_local, false);
5496 try f.object.dg.renderTypeAndName(w, output_ty, output_local, .{}, .none, .complete);
5497 try w.writeAll(" __asm(\"");
5498 try w.writeAll(constraint["={".len .. constraint.len - "}".len]);
5499 try w.writeAll("\")");
5500 if (f.wantSafety()) {
5501 try w.writeAll(" = ");
5502 try f.writeCValue(w, .{ .undef = output_ty }, .Other);
5503 }
5504 try w.writeByte(';');
5505 try f.object.newline();
5506 }
5507 }
5508 for (inputs) |input| {
5509 const extra_bytes = mem.sliceAsBytes(f.air.extra.items[extra_i..]);
5510 const constraint = mem.sliceTo(extra_bytes, 0);
5511 const name = mem.sliceTo(extra_bytes[constraint.len + 1 ..], 0);
5512 // This equation accounts for the fact that even if we have exactly 4 bytes
5513 // for the string, we still use the next u32 for the null terminator.
5514 extra_i += (constraint.len + name.len + (2 + 3)) / 4;
5515
5516 if (constraint.len < 1 or mem.indexOfScalar(u8, "=+&%", constraint[0]) != null or
5517 (constraint[0] == '{' and constraint[constraint.len - 1] != '}'))
5518 {
5519 return f.fail("CBE: constraint not supported: '{s}'", .{constraint});
5520 }
5521
5522 const is_reg = constraint[0] == '{';
5523 const input_val = try f.resolveInst(input);
5524 if (asmInputNeedsLocal(f, constraint, input_val)) {
5525 const input_ty = f.typeOf(input);
5526 if (is_reg) try w.writeAll("register ");
5527 const input_local = try f.allocLocalValue(.{
5528 .ctype = try f.ctypeFromType(input_ty, .complete),
5529 .alignas = CType.AlignAs.fromAbiAlignment(input_ty.abiAlignment(zcu)),
5530 });
5531 try f.allocs.put(gpa, input_local.new_local, false);
5532 try f.object.dg.renderTypeAndName(w, input_ty, input_local, Const, .none, .complete);
5533 if (is_reg) {
5534 try w.writeAll(" __asm(\"");
5535 try w.writeAll(constraint["{".len .. constraint.len - "}".len]);
5536 try w.writeAll("\")");
5537 }
5538 try w.writeAll(" = ");
5539 try f.writeCValue(w, input_val, .Other);
5540 try w.writeByte(';');
5541 try f.object.newline();
5542 }
5543 }
5544
5545 {
5546 const asm_source = mem.sliceAsBytes(f.air.extra.items[extra_i..])[0..extra.data.source_len];
5547
5548 var stack = std.heap.stackFallback(256, f.object.dg.gpa);
5549 const allocator = stack.get();
5550 const fixed_asm_source = try allocator.alloc(u8, asm_source.len);
5551 defer allocator.free(fixed_asm_source);
5552
5553 var src_i: usize = 0;
5554 var dst_i: usize = 0;
5555 while (true) {
5556 const literal = mem.sliceTo(asm_source[src_i..], '%');
5557 src_i += literal.len;
5558
5559 @memcpy(fixed_asm_source[dst_i..][0..literal.len], literal);
5560 dst_i += literal.len;
5561
5562 if (src_i >= asm_source.len) break;
5563
5564 src_i += 1;
5565 if (src_i >= asm_source.len)
5566 return f.fail("CBE: invalid inline asm string '{s}'", .{asm_source});
5567
5568 fixed_asm_source[dst_i] = '%';
5569 dst_i += 1;
5570
5571 if (asm_source[src_i] != '[') {
5572 // This also handles %%
5573 fixed_asm_source[dst_i] = asm_source[src_i];
5574 src_i += 1;
5575 dst_i += 1;
5576 continue;
5577 }
5578
5579 const desc = mem.sliceTo(asm_source[src_i..], ']');
5580 if (mem.indexOfScalar(u8, desc, ':')) |colon| {
5581 const name = desc[0..colon];
5582 const modifier = desc[colon + 1 ..];
5583
5584 @memcpy(fixed_asm_source[dst_i..][0..modifier.len], modifier);
5585 dst_i += modifier.len;
5586 @memcpy(fixed_asm_source[dst_i..][0..name.len], name);
5587 dst_i += name.len;
5588
5589 src_i += desc.len;
5590 if (src_i >= asm_source.len)
5591 return f.fail("CBE: invalid inline asm string '{s}'", .{asm_source});
5592 }
5593 }
5594
5595 try w.writeAll("__asm");
5596 if (is_volatile) try w.writeAll(" volatile");
5597 try w.print("({f}", .{fmtStringLiteral(fixed_asm_source[0..dst_i], null)});
5598 }
5599
5600 extra_i = constraints_extra_begin;
5601 var locals_index = locals_begin;
5602 try w.writeByte(':');
5603 for (outputs, 0..) |output, index| {
5604 const extra_bytes = mem.sliceAsBytes(f.air.extra.items[extra_i..]);
5605 const constraint = mem.sliceTo(extra_bytes, 0);
5606 const name = mem.sliceTo(extra_bytes[constraint.len + 1 ..], 0);
5607 // This equation accounts for the fact that even if we have exactly 4 bytes
5608 // for the string, we still use the next u32 for the null terminator.
5609 extra_i += (constraint.len + name.len + (2 + 3)) / 4;
5610
5611 if (index > 0) try w.writeByte(',');
5612 try w.writeByte(' ');
5613 if (!mem.eql(u8, name, "_")) try w.print("[{s}]", .{name});
5614 const is_reg = constraint[1] == '{';
5615 try w.print("{f}(", .{fmtStringLiteral(if (is_reg) "=r" else constraint, null)});
5616 if (is_reg) {
5617 try f.writeCValue(w, .{ .local = locals_index }, .Other);
5618 locals_index += 1;
5619 } else if (output == .none) {
5620 try f.writeCValue(w, inst_local, .FunctionArgument);
5621 } else {
5622 try f.writeCValueDeref(w, try f.resolveInst(output));
5623 }
5624 try w.writeByte(')');
5625 }
5626 try w.writeByte(':');
5627 for (inputs, 0..) |input, index| {
5628 const extra_bytes = mem.sliceAsBytes(f.air.extra.items[extra_i..]);
5629 const constraint = mem.sliceTo(extra_bytes, 0);
5630 const name = mem.sliceTo(extra_bytes[constraint.len + 1 ..], 0);
5631 // This equation accounts for the fact that even if we have exactly 4 bytes
5632 // for the string, we still use the next u32 for the null terminator.
5633 extra_i += (constraint.len + name.len + (2 + 3)) / 4;
5634
5635 if (index > 0) try w.writeByte(',');
5636 try w.writeByte(' ');
5637 if (!mem.eql(u8, name, "_")) try w.print("[{s}]", .{name});
5638
5639 const is_reg = constraint[0] == '{';
5640 const input_val = try f.resolveInst(input);
5641 try w.print("{f}(", .{fmtStringLiteral(if (is_reg) "r" else constraint, null)});
5642 try f.writeCValue(w, if (asmInputNeedsLocal(f, constraint, input_val)) local: {
5643 const input_local_idx = locals_index;
5644 locals_index += 1;
5645 break :local .{ .local = input_local_idx };
5646 } else input_val, .Other);
5647 try w.writeByte(')');
5648 }
5649 try w.writeByte(':');
5650 const ip = &zcu.intern_pool;
5651 const aggregate = ip.indexToKey(extra.data.clobbers).aggregate;
5652 const struct_type: Type = .fromInterned(aggregate.ty);
5653 switch (aggregate.storage) {
5654 .elems => |elems| for (elems, 0..) |elem, i| switch (elem) {
5655 .bool_true => {
5656 const field_name = struct_type.structFieldName(i, zcu).toSlice(ip).?;
5657 assert(field_name.len != 0);
5658
5659 const target = &f.object.dg.mod.resolved_target.result;
5660 var c_name_buf: [16]u8 = undefined;
5661 const name =
5662 if ((target.cpu.arch.isMIPS() or target.cpu.arch == .alpha) and field_name[0] == 'r') name: {
5663 // Convert "rN" to "$N"
5664 const c_name = (&c_name_buf)[0..field_name.len];
5665 @memcpy(c_name, field_name);
5666 c_name_buf[0] = '$';
5667 break :name c_name;
5668 } else if ((target.cpu.arch.isMIPS() and (mem.startsWith(u8, field_name, "fcc") or field_name[0] == 'w')) or
5669 ((target.cpu.arch.isMIPS() or target.cpu.arch == .alpha) and field_name[0] == 'f') or
5670 (target.cpu.arch == .kvx and !mem.eql(u8, field_name, "memory"))) name: {
5671 // "$" prefix for these registers
5672 c_name_buf[0] = '$';
5673 @memcpy((&c_name_buf)[1..][0..field_name.len], field_name);
5674 break :name (&c_name_buf)[0 .. 1 + field_name.len];
5675 } else if (target.cpu.arch.isSPARC() and
5676 (mem.eql(u8, field_name, "ccr") or mem.eql(u8, field_name, "icc") or mem.eql(u8, field_name, "xcc"))) name: {
5677 // C compilers just use `icc` to encompass all of these.
5678 break :name "icc";
5679 } else field_name;
5680
5681 try w.print(" {f}", .{fmtStringLiteral(name, null)});
5682 (try w.writableArray(1))[0] = ',';
5683 },
5684 .bool_false => continue,
5685 else => unreachable,
5686 },
5687 .repeated_elem => |elem| switch (elem) {
5688 .bool_true => @panic("TODO"),
5689 .bool_false => {},
5690 else => unreachable,
5691 },
5692 .bytes => @panic("TODO"),
5693 }
5694 w.undo(1); // erase the last comma
5695 try w.writeAll(");");
5696 try f.object.newline();
5697
5698 extra_i = constraints_extra_begin;
5699 locals_index = locals_begin;
5700 for (outputs) |output| {
5701 const extra_bytes = mem.sliceAsBytes(f.air.extra.items[extra_i..]);
5702 const constraint = mem.sliceTo(extra_bytes, 0);
5703 const name = mem.sliceTo(extra_bytes[constraint.len + 1 ..], 0);
5704 // This equation accounts for the fact that even if we have exactly 4 bytes
5705 // for the string, we still use the next u32 for the null terminator.
5706 extra_i += (constraint.len + name.len + (2 + 3)) / 4;
5707
5708 const is_reg = constraint[1] == '{';
5709 if (is_reg) {
5710 try f.writeCValueDeref(w, if (output == .none)
5711 .{ .local_ref = inst_local.new_local }
5712 else
5713 try f.resolveInst(output));
5714 try w.writeAll(" = ");
5715 try f.writeCValue(w, .{ .local = locals_index }, .Other);
5716 locals_index += 1;
5717 try w.writeByte(';');
5718 try f.object.newline();
5719 }
5720 }
5721
5722 break :result if (f.liveness.isUnused(inst)) .none else inst_local;
5723 };
5724
5725 var bt = iterateBigTomb(f, inst);
5726 for (outputs) |output| {
5727 if (output == .none) continue;
5728 try bt.feed(output);
5729 }
5730 for (inputs) |input| {
5731 try bt.feed(input);
5732 }
5733
5734 return result;
5735}
5736
5737fn airIsNull(
5738 f: *Function,
5739 inst: Air.Inst.Index,
5740 operator: std.math.CompareOperator,
5741 is_ptr: bool,
5742) !CValue {
5743 const pt = f.object.dg.pt;
5744 const zcu = pt.zcu;
5745 const ctype_pool = &f.object.dg.ctype_pool;
5746 const un_op = f.air.instructions.items(.data)[@intFromEnum(inst)].un_op;
5747
5748 const w = &f.object.code.writer;
5749 const operand = try f.resolveInst(un_op);
5750 try reap(f, inst, &.{un_op});
5751
5752 const local = try f.allocLocal(inst, .bool);
5753 const a = try Assignment.start(f, w, .bool);
5754 try f.writeCValue(w, local, .Other);
5755 try a.assign(f, w);
5756
5757 const operand_ty = f.typeOf(un_op);
5758 const optional_ty = if (is_ptr) operand_ty.childType(zcu) else operand_ty;
5759 const opt_ctype = try f.ctypeFromType(optional_ty, .complete);
5760 const rhs = switch (opt_ctype.info(ctype_pool)) {
5761 .basic, .pointer => rhs: {
5762 if (is_ptr)
5763 try f.writeCValueDeref(w, operand)
5764 else
5765 try f.writeCValue(w, operand, .Other);
5766 break :rhs if (opt_ctype.isBool())
5767 "true"
5768 else if (opt_ctype.isInteger())
5769 "0"
5770 else
5771 "NULL";
5772 },
5773 .aligned, .array, .vector, .fwd_decl, .function => unreachable,
5774 .aggregate => |aggregate| switch (aggregate.fields.at(0, ctype_pool).name.index) {
5775 .is_null, .payload => rhs: {
5776 if (is_ptr)
5777 try f.writeCValueDerefMember(w, operand, .{ .identifier = "is_null" })
5778 else
5779 try f.writeCValueMember(w, operand, .{ .identifier = "is_null" });
5780 break :rhs "true";
5781 },
5782 .ptr, .len => rhs: {
5783 if (is_ptr)
5784 try f.writeCValueDerefMember(w, operand, .{ .identifier = "ptr" })
5785 else
5786 try f.writeCValueMember(w, operand, .{ .identifier = "ptr" });
5787 break :rhs "NULL";
5788 },
5789 else => unreachable,
5790 },
5791 };
5792 try w.writeAll(compareOperatorC(operator));
5793 try w.writeAll(rhs);
5794 try a.end(f, w);
5795 return local;
5796}
5797
5798fn airOptionalPayload(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValue {
5799 const pt = f.object.dg.pt;
5800 const zcu = pt.zcu;
5801 const ctype_pool = &f.object.dg.ctype_pool;
5802 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
5803
5804 const inst_ty = f.typeOfIndex(inst);
5805 const operand_ty = f.typeOf(ty_op.operand);
5806 const opt_ty = if (is_ptr) operand_ty.childType(zcu) else operand_ty;
5807 const opt_ctype = try f.ctypeFromType(opt_ty, .complete);
5808 if (opt_ctype.isBool()) return if (is_ptr) .{ .undef = inst_ty } else .none;
5809
5810 const operand = try f.resolveInst(ty_op.operand);
5811 switch (opt_ctype.info(ctype_pool)) {
5812 .basic, .pointer => return f.moveCValue(inst, inst_ty, operand),
5813 .aligned, .array, .vector, .fwd_decl, .function => unreachable,
5814 .aggregate => |aggregate| switch (aggregate.fields.at(0, ctype_pool).name.index) {
5815 .is_null, .payload => {
5816 const w = &f.object.code.writer;
5817 const local = try f.allocLocal(inst, inst_ty);
5818 const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete));
5819 try f.writeCValue(w, local, .Other);
5820 try a.assign(f, w);
5821 if (is_ptr) {
5822 try w.writeByte('&');
5823 try f.writeCValueDerefMember(w, operand, .{ .identifier = "payload" });
5824 } else try f.writeCValueMember(w, operand, .{ .identifier = "payload" });
5825 try a.end(f, w);
5826 return local;
5827 },
5828 .ptr, .len => return f.moveCValue(inst, inst_ty, operand),
5829 else => unreachable,
5830 },
5831 }
5832}
5833
5834fn airOptionalPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue {
5835 const pt = f.object.dg.pt;
5836 const zcu = pt.zcu;
5837 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
5838 const w = &f.object.code.writer;
5839 const operand = try f.resolveInst(ty_op.operand);
5840 try reap(f, inst, &.{ty_op.operand});
5841 const operand_ty = f.typeOf(ty_op.operand);
5842
5843 const inst_ty = f.typeOfIndex(inst);
5844 const opt_ctype = try f.ctypeFromType(operand_ty.childType(zcu), .complete);
5845 switch (opt_ctype.info(&f.object.dg.ctype_pool)) {
5846 .basic => {
5847 const a = try Assignment.start(f, w, opt_ctype);
5848 try f.writeCValueDeref(w, operand);
5849 try a.assign(f, w);
5850 try f.object.dg.renderValue(w, Value.false, .Other);
5851 try a.end(f, w);
5852 return .none;
5853 },
5854 .pointer => {
5855 if (f.liveness.isUnused(inst)) return .none;
5856 const local = try f.allocLocal(inst, inst_ty);
5857 const a = try Assignment.start(f, w, opt_ctype);
5858 try f.writeCValue(w, local, .Other);
5859 try a.assign(f, w);
5860 try f.writeCValue(w, operand, .Other);
5861 try a.end(f, w);
5862 return local;
5863 },
5864 .aligned, .array, .vector, .fwd_decl, .function => unreachable,
5865 .aggregate => {
5866 {
5867 const a = try Assignment.start(f, w, opt_ctype);
5868 try f.writeCValueDerefMember(w, operand, .{ .identifier = "is_null" });
5869 try a.assign(f, w);
5870 try f.object.dg.renderValue(w, Value.false, .Other);
5871 try a.end(f, w);
5872 }
5873 if (f.liveness.isUnused(inst)) return .none;
5874 const local = try f.allocLocal(inst, inst_ty);
5875 const a = try Assignment.start(f, w, opt_ctype);
5876 try f.writeCValue(w, local, .Other);
5877 try a.assign(f, w);
5878 try w.writeByte('&');
5879 try f.writeCValueDerefMember(w, operand, .{ .identifier = "payload" });
5880 try a.end(f, w);
5881 return local;
5882 },
5883 }
5884}
5885
5886fn fieldLocation(
5887 container_ptr_ty: Type,
5888 field_ptr_ty: Type,
5889 field_index: u32,
5890 zcu: *Zcu,
5891) union(enum) {
5892 begin: void,
5893 field: CValue,
5894 byte_offset: u64,
5895} {
5896 const ip = &zcu.intern_pool;
5897 const container_ty: Type = .fromInterned(ip.indexToKey(container_ptr_ty.toIntern()).ptr_type.child);
5898 switch (ip.indexToKey(container_ty.toIntern())) {
5899 .struct_type => {
5900 const loaded_struct = ip.loadStructType(container_ty.toIntern());
5901 return switch (loaded_struct.layout) {
5902 .auto, .@"extern" => if (!container_ty.hasRuntimeBitsIgnoreComptime(zcu))
5903 .begin
5904 else if (!field_ptr_ty.childType(zcu).hasRuntimeBitsIgnoreComptime(zcu))
5905 .{ .byte_offset = loaded_struct.offsets.get(ip)[field_index] }
5906 else
5907 .{ .field = .{ .identifier = loaded_struct.fieldName(ip, field_index).toSlice(ip) } },
5908 .@"packed" => if (field_ptr_ty.ptrInfo(zcu).packed_offset.host_size == 0)
5909 .{ .byte_offset = @divExact(zcu.structPackedFieldBitOffset(loaded_struct, field_index) +
5910 container_ptr_ty.ptrInfo(zcu).packed_offset.bit_offset, 8) }
5911 else
5912 .begin,
5913 };
5914 },
5915 .tuple_type => return if (!container_ty.hasRuntimeBitsIgnoreComptime(zcu))
5916 .begin
5917 else if (!field_ptr_ty.childType(zcu).hasRuntimeBitsIgnoreComptime(zcu))
5918 .{ .byte_offset = container_ty.structFieldOffset(field_index, zcu) }
5919 else
5920 .{ .field = .{ .field = field_index } },
5921 .union_type => {
5922 const loaded_union = ip.loadUnionType(container_ty.toIntern());
5923 switch (loaded_union.flagsUnordered(ip).layout) {
5924 .auto, .@"extern" => {
5925 const field_ty: Type = .fromInterned(loaded_union.field_types.get(ip)[field_index]);
5926 if (!field_ty.hasRuntimeBitsIgnoreComptime(zcu))
5927 return if (loaded_union.hasTag(ip) and !container_ty.unionHasAllZeroBitFieldTypes(zcu))
5928 .{ .field = .{ .identifier = "payload" } }
5929 else
5930 .begin;
5931 const field_name = loaded_union.loadTagType(ip).names.get(ip)[field_index];
5932 return .{ .field = if (loaded_union.hasTag(ip))
5933 .{ .payload_identifier = field_name.toSlice(ip) }
5934 else
5935 .{ .identifier = field_name.toSlice(ip) } };
5936 },
5937 .@"packed" => return .begin,
5938 }
5939 },
5940 .ptr_type => |ptr_info| switch (ptr_info.flags.size) {
5941 .one, .many, .c => unreachable,
5942 .slice => switch (field_index) {
5943 0 => return .{ .field = .{ .identifier = "ptr" } },
5944 1 => return .{ .field = .{ .identifier = "len" } },
5945 else => unreachable,
5946 },
5947 },
5948 else => unreachable,
5949 }
5950}
5951
5952fn airStructFieldPtr(f: *Function, inst: Air.Inst.Index) !CValue {
5953 const ty_pl = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
5954 const extra = f.air.extraData(Air.StructField, ty_pl.payload).data;
5955
5956 const container_ptr_val = try f.resolveInst(extra.struct_operand);
5957 try reap(f, inst, &.{extra.struct_operand});
5958 const container_ptr_ty = f.typeOf(extra.struct_operand);
5959 return fieldPtr(f, inst, container_ptr_ty, container_ptr_val, extra.field_index);
5960}
5961
5962fn airStructFieldPtrIndex(f: *Function, inst: Air.Inst.Index, index: u8) !CValue {
5963 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
5964
5965 const container_ptr_val = try f.resolveInst(ty_op.operand);
5966 try reap(f, inst, &.{ty_op.operand});
5967 const container_ptr_ty = f.typeOf(ty_op.operand);
5968 return fieldPtr(f, inst, container_ptr_ty, container_ptr_val, index);
5969}
5970
5971fn airFieldParentPtr(f: *Function, inst: Air.Inst.Index) !CValue {
5972 const pt = f.object.dg.pt;
5973 const zcu = pt.zcu;
5974 const ty_pl = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
5975 const extra = f.air.extraData(Air.FieldParentPtr, ty_pl.payload).data;
5976
5977 const container_ptr_ty = f.typeOfIndex(inst);
5978 const container_ty = container_ptr_ty.childType(zcu);
5979
5980 const field_ptr_ty = f.typeOf(extra.field_ptr);
5981 const field_ptr_val = try f.resolveInst(extra.field_ptr);
5982 try reap(f, inst, &.{extra.field_ptr});
5983
5984 const w = &f.object.code.writer;
5985 const local = try f.allocLocal(inst, container_ptr_ty);
5986 try f.writeCValue(w, local, .Other);
5987 try w.writeAll(" = (");
5988 try f.renderType(w, container_ptr_ty);
5989 try w.writeByte(')');
5990
5991 switch (fieldLocation(container_ptr_ty, field_ptr_ty, extra.field_index, zcu)) {
5992 .begin => try f.writeCValue(w, field_ptr_val, .Other),
5993 .field => |field| {
5994 const u8_ptr_ty = try pt.adjustPtrTypeChild(field_ptr_ty, .u8);
5995
5996 try w.writeAll("((");
5997 try f.renderType(w, u8_ptr_ty);
5998 try w.writeByte(')');
5999 try f.writeCValue(w, field_ptr_val, .Other);
6000 try w.writeAll(" - offsetof(");
6001 try f.renderType(w, container_ty);
6002 try w.writeAll(", ");
6003 try f.writeCValue(w, field, .Other);
6004 try w.writeAll("))");
6005 },
6006 .byte_offset => |byte_offset| {
6007 const u8_ptr_ty = try pt.adjustPtrTypeChild(field_ptr_ty, .u8);
6008
6009 try w.writeAll("((");
6010 try f.renderType(w, u8_ptr_ty);
6011 try w.writeByte(')');
6012 try f.writeCValue(w, field_ptr_val, .Other);
6013 try w.print(" - {f})", .{
6014 try f.fmtIntLiteralDec(try pt.intValue(.usize, byte_offset)),
6015 });
6016 },
6017 }
6018
6019 try w.writeByte(';');
6020 try f.object.newline();
6021 return local;
6022}
6023
6024fn fieldPtr(
6025 f: *Function,
6026 inst: Air.Inst.Index,
6027 container_ptr_ty: Type,
6028 container_ptr_val: CValue,
6029 field_index: u32,
6030) !CValue {
6031 const pt = f.object.dg.pt;
6032 const zcu = pt.zcu;
6033 const container_ty = container_ptr_ty.childType(zcu);
6034 const field_ptr_ty = f.typeOfIndex(inst);
6035
6036 // Ensure complete type definition is visible before accessing fields.
6037 _ = try f.ctypeFromType(container_ty, .complete);
6038
6039 const w = &f.object.code.writer;
6040 const local = try f.allocLocal(inst, field_ptr_ty);
6041 try f.writeCValue(w, local, .Other);
6042 try w.writeAll(" = (");
6043 try f.renderType(w, field_ptr_ty);
6044 try w.writeByte(')');
6045
6046 switch (fieldLocation(container_ptr_ty, field_ptr_ty, field_index, zcu)) {
6047 .begin => try f.writeCValue(w, container_ptr_val, .Other),
6048 .field => |field| {
6049 try w.writeByte('&');
6050 try f.writeCValueDerefMember(w, container_ptr_val, field);
6051 },
6052 .byte_offset => |byte_offset| {
6053 const u8_ptr_ty = try pt.adjustPtrTypeChild(field_ptr_ty, .u8);
6054
6055 try w.writeAll("((");
6056 try f.renderType(w, u8_ptr_ty);
6057 try w.writeByte(')');
6058 try f.writeCValue(w, container_ptr_val, .Other);
6059 try w.print(" + {f})", .{
6060 try f.fmtIntLiteralDec(try pt.intValue(.usize, byte_offset)),
6061 });
6062 },
6063 }
6064
6065 try w.writeByte(';');
6066 try f.object.newline();
6067 return local;
6068}
6069
6070fn airStructFieldVal(f: *Function, inst: Air.Inst.Index) !CValue {
6071 const pt = f.object.dg.pt;
6072 const zcu = pt.zcu;
6073 const ip = &zcu.intern_pool;
6074 const ty_pl = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
6075 const extra = f.air.extraData(Air.StructField, ty_pl.payload).data;
6076
6077 const inst_ty = f.typeOfIndex(inst);
6078 if (!inst_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
6079 try reap(f, inst, &.{extra.struct_operand});
6080 return .none;
6081 }
6082
6083 const struct_byval = try f.resolveInst(extra.struct_operand);
6084 try reap(f, inst, &.{extra.struct_operand});
6085 const struct_ty = f.typeOf(extra.struct_operand);
6086 const w = &f.object.code.writer;
6087
6088 // Ensure complete type definition is visible before accessing fields.
6089 _ = try f.ctypeFromType(struct_ty, .complete);
6090
6091 assert(struct_ty.containerLayout(zcu) != .@"packed"); // `Air.Legalize.Feature.expand_packed_struct_field_val` handles this case
6092 const field_name: CValue = switch (ip.indexToKey(struct_ty.toIntern())) {
6093 .struct_type => .{ .identifier = struct_ty.structFieldName(extra.field_index, zcu).unwrap().?.toSlice(ip) },
6094 .union_type => name: {
6095 const union_type = ip.loadUnionType(struct_ty.toIntern());
6096 const enum_tag_ty: Type = .fromInterned(union_type.enum_tag_ty);
6097 const field_name_str = enum_tag_ty.enumFieldName(extra.field_index, zcu).toSlice(ip);
6098 if (union_type.hasTag(ip)) {
6099 break :name .{ .payload_identifier = field_name_str };
6100 } else {
6101 break :name .{ .identifier = field_name_str };
6102 }
6103 },
6104 .tuple_type => .{ .field = extra.field_index },
6105 else => unreachable,
6106 };
6107
6108 const local = try f.allocLocal(inst, inst_ty);
6109 const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete));
6110 try f.writeCValue(w, local, .Other);
6111 try a.assign(f, w);
6112 try f.writeCValueMember(w, struct_byval, field_name);
6113 try a.end(f, w);
6114 return local;
6115}
6116
6117/// *(E!T) -> E
6118/// Note that the result is never a pointer.
6119fn airUnwrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue {
6120 const pt = f.object.dg.pt;
6121 const zcu = pt.zcu;
6122 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
6123
6124 const inst_ty = f.typeOfIndex(inst);
6125 const operand = try f.resolveInst(ty_op.operand);
6126 const operand_ty = f.typeOf(ty_op.operand);
6127 try reap(f, inst, &.{ty_op.operand});
6128
6129 const operand_is_ptr = operand_ty.zigTypeTag(zcu) == .pointer;
6130 const error_union_ty = if (operand_is_ptr) operand_ty.childType(zcu) else operand_ty;
6131 const error_ty = error_union_ty.errorUnionSet(zcu);
6132 const payload_ty = error_union_ty.errorUnionPayload(zcu);
6133 const local = try f.allocLocal(inst, inst_ty);
6134
6135 if (!payload_ty.hasRuntimeBits(zcu) and operand == .local and operand.local == local.new_local) {
6136 // The store will be 'x = x'; elide it.
6137 return local;
6138 }
6139
6140 const w = &f.object.code.writer;
6141 try f.writeCValue(w, local, .Other);
6142 try w.writeAll(" = ");
6143
6144 if (!payload_ty.hasRuntimeBits(zcu))
6145 try f.writeCValue(w, operand, .Other)
6146 else if (error_ty.errorSetIsEmpty(zcu))
6147 try w.print("{f}", .{
6148 try f.fmtIntLiteralDec(try pt.intValue(try pt.errorIntType(), 0)),
6149 })
6150 else if (operand_is_ptr)
6151 try f.writeCValueDerefMember(w, operand, .{ .identifier = "error" })
6152 else
6153 try f.writeCValueMember(w, operand, .{ .identifier = "error" });
6154 try w.writeByte(';');
6155 try f.object.newline();
6156 return local;
6157}
6158
6159fn airUnwrapErrUnionPay(f: *Function, inst: Air.Inst.Index, is_ptr: bool) !CValue {
6160 const pt = f.object.dg.pt;
6161 const zcu = pt.zcu;
6162 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
6163
6164 const inst_ty = f.typeOfIndex(inst);
6165 const operand = try f.resolveInst(ty_op.operand);
6166 try reap(f, inst, &.{ty_op.operand});
6167 const operand_ty = f.typeOf(ty_op.operand);
6168 const error_union_ty = if (is_ptr) operand_ty.childType(zcu) else operand_ty;
6169
6170 const w = &f.object.code.writer;
6171 if (!error_union_ty.errorUnionPayload(zcu).hasRuntimeBits(zcu)) {
6172 if (!is_ptr) return .none;
6173
6174 const local = try f.allocLocal(inst, inst_ty);
6175 try f.writeCValue(w, local, .Other);
6176 try w.writeAll(" = (");
6177 try f.renderType(w, inst_ty);
6178 try w.writeByte(')');
6179 try f.writeCValue(w, operand, .Other);
6180 try w.writeByte(';');
6181 try f.object.newline();
6182 return local;
6183 }
6184
6185 const local = try f.allocLocal(inst, inst_ty);
6186 const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete));
6187 try f.writeCValue(w, local, .Other);
6188 try a.assign(f, w);
6189 if (is_ptr) {
6190 try w.writeByte('&');
6191 try f.writeCValueDerefMember(w, operand, .{ .identifier = "payload" });
6192 } else try f.writeCValueMember(w, operand, .{ .identifier = "payload" });
6193 try a.end(f, w);
6194 return local;
6195}
6196
6197fn airWrapOptional(f: *Function, inst: Air.Inst.Index) !CValue {
6198 const ctype_pool = &f.object.dg.ctype_pool;
6199 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
6200
6201 const inst_ty = f.typeOfIndex(inst);
6202 const inst_ctype = try f.ctypeFromType(inst_ty, .complete);
6203 if (inst_ctype.isBool()) return .{ .constant = Value.true };
6204
6205 const operand = try f.resolveInst(ty_op.operand);
6206 switch (inst_ctype.info(ctype_pool)) {
6207 .basic, .pointer => return f.moveCValue(inst, inst_ty, operand),
6208 .aligned, .array, .vector, .fwd_decl, .function => unreachable,
6209 .aggregate => |aggregate| switch (aggregate.fields.at(0, ctype_pool).name.index) {
6210 .is_null, .payload => {
6211 const operand_ctype = try f.ctypeFromType(f.typeOf(ty_op.operand), .complete);
6212 const w = &f.object.code.writer;
6213 const local = try f.allocLocal(inst, inst_ty);
6214 {
6215 const a = try Assignment.start(f, w, .bool);
6216 try f.writeCValueMember(w, local, .{ .identifier = "is_null" });
6217 try a.assign(f, w);
6218 try w.writeAll("false");
6219 try a.end(f, w);
6220 }
6221 {
6222 const a = try Assignment.start(f, w, operand_ctype);
6223 try f.writeCValueMember(w, local, .{ .identifier = "payload" });
6224 try a.assign(f, w);
6225 try f.writeCValue(w, operand, .Other);
6226 try a.end(f, w);
6227 }
6228 return local;
6229 },
6230 .ptr, .len => return f.moveCValue(inst, inst_ty, operand),
6231 else => unreachable,
6232 },
6233 }
6234}
6235
6236fn airWrapErrUnionErr(f: *Function, inst: Air.Inst.Index) !CValue {
6237 const pt = f.object.dg.pt;
6238 const zcu = pt.zcu;
6239 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
6240
6241 const inst_ty = f.typeOfIndex(inst);
6242 const payload_ty = inst_ty.errorUnionPayload(zcu);
6243 const repr_is_err = !payload_ty.hasRuntimeBitsIgnoreComptime(zcu);
6244 const err_ty = inst_ty.errorUnionSet(zcu);
6245 const err = try f.resolveInst(ty_op.operand);
6246 try reap(f, inst, &.{ty_op.operand});
6247
6248 const w = &f.object.code.writer;
6249 const local = try f.allocLocal(inst, inst_ty);
6250
6251 if (repr_is_err and err == .local and err.local == local.new_local) {
6252 // The store will be 'x = x'; elide it.
6253 return local;
6254 }
6255
6256 if (!repr_is_err) {
6257 const a = try Assignment.start(f, w, try f.ctypeFromType(payload_ty, .complete));
6258 try f.writeCValueMember(w, local, .{ .identifier = "payload" });
6259 try a.assign(f, w);
6260 try f.object.dg.renderUndefValue(w, payload_ty, .Other);
6261 try a.end(f, w);
6262 }
6263 {
6264 const a = try Assignment.start(f, w, try f.ctypeFromType(err_ty, .complete));
6265 if (repr_is_err)
6266 try f.writeCValue(w, local, .Other)
6267 else
6268 try f.writeCValueMember(w, local, .{ .identifier = "error" });
6269 try a.assign(f, w);
6270 try f.writeCValue(w, err, .Other);
6271 try a.end(f, w);
6272 }
6273 return local;
6274}
6275
6276fn airErrUnionPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue {
6277 const pt = f.object.dg.pt;
6278 const zcu = pt.zcu;
6279 const w = &f.object.code.writer;
6280 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
6281 const inst_ty = f.typeOfIndex(inst);
6282 const operand = try f.resolveInst(ty_op.operand);
6283 const operand_ty = f.typeOf(ty_op.operand);
6284 const error_union_ty = operand_ty.childType(zcu);
6285
6286 const payload_ty = error_union_ty.errorUnionPayload(zcu);
6287 const err_int_ty = try pt.errorIntType();
6288 const no_err = try pt.intValue(err_int_ty, 0);
6289 try reap(f, inst, &.{ty_op.operand});
6290
6291 // First, set the non-error value.
6292 if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
6293 const a = try Assignment.start(f, w, try f.ctypeFromType(operand_ty, .complete));
6294 try f.writeCValueDeref(w, operand);
6295 try a.assign(f, w);
6296 try w.print("{f}", .{try f.fmtIntLiteralDec(no_err)});
6297 try a.end(f, w);
6298 return .none;
6299 }
6300 {
6301 const a = try Assignment.start(f, w, try f.ctypeFromType(err_int_ty, .complete));
6302 try f.writeCValueDerefMember(w, operand, .{ .identifier = "error" });
6303 try a.assign(f, w);
6304 try w.print("{f}", .{try f.fmtIntLiteralDec(no_err)});
6305 try a.end(f, w);
6306 }
6307
6308 // Then return the payload pointer (only if it is used)
6309 if (f.liveness.isUnused(inst)) return .none;
6310
6311 const local = try f.allocLocal(inst, inst_ty);
6312 const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete));
6313 try f.writeCValue(w, local, .Other);
6314 try a.assign(f, w);
6315 try w.writeByte('&');
6316 try f.writeCValueDerefMember(w, operand, .{ .identifier = "payload" });
6317 try a.end(f, w);
6318 return local;
6319}
6320
6321fn airErrReturnTrace(f: *Function, inst: Air.Inst.Index) !CValue {
6322 _ = inst;
6323 return f.fail("TODO: C backend: implement airErrReturnTrace", .{});
6324}
6325
6326fn airSetErrReturnTrace(f: *Function, inst: Air.Inst.Index) !CValue {
6327 _ = inst;
6328 return f.fail("TODO: C backend: implement airSetErrReturnTrace", .{});
6329}
6330
6331fn airSaveErrReturnTraceIndex(f: *Function, inst: Air.Inst.Index) !CValue {
6332 _ = inst;
6333 return f.fail("TODO: C backend: implement airSaveErrReturnTraceIndex", .{});
6334}
6335
6336fn airWrapErrUnionPay(f: *Function, inst: Air.Inst.Index) !CValue {
6337 const pt = f.object.dg.pt;
6338 const zcu = pt.zcu;
6339 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
6340
6341 const inst_ty = f.typeOfIndex(inst);
6342 const payload_ty = inst_ty.errorUnionPayload(zcu);
6343 const payload = try f.resolveInst(ty_op.operand);
6344 const repr_is_err = !payload_ty.hasRuntimeBitsIgnoreComptime(zcu);
6345 const err_ty = inst_ty.errorUnionSet(zcu);
6346 try reap(f, inst, &.{ty_op.operand});
6347
6348 const w = &f.object.code.writer;
6349 const local = try f.allocLocal(inst, inst_ty);
6350 if (!repr_is_err) {
6351 const a = try Assignment.start(f, w, try f.ctypeFromType(payload_ty, .complete));
6352 try f.writeCValueMember(w, local, .{ .identifier = "payload" });
6353 try a.assign(f, w);
6354 try f.writeCValue(w, payload, .Other);
6355 try a.end(f, w);
6356 }
6357 {
6358 const a = try Assignment.start(f, w, try f.ctypeFromType(err_ty, .complete));
6359 if (repr_is_err)
6360 try f.writeCValue(w, local, .Other)
6361 else
6362 try f.writeCValueMember(w, local, .{ .identifier = "error" });
6363 try a.assign(f, w);
6364 try f.object.dg.renderValue(w, try pt.intValue(try pt.errorIntType(), 0), .Other);
6365 try a.end(f, w);
6366 }
6367 return local;
6368}
6369
6370fn airIsErr(f: *Function, inst: Air.Inst.Index, is_ptr: bool, operator: []const u8) !CValue {
6371 const pt = f.object.dg.pt;
6372 const zcu = pt.zcu;
6373 const un_op = f.air.instructions.items(.data)[@intFromEnum(inst)].un_op;
6374
6375 const w = &f.object.code.writer;
6376 const operand = try f.resolveInst(un_op);
6377 try reap(f, inst, &.{un_op});
6378 const operand_ty = f.typeOf(un_op);
6379 const local = try f.allocLocal(inst, .bool);
6380 const err_union_ty = if (is_ptr) operand_ty.childType(zcu) else operand_ty;
6381 const payload_ty = err_union_ty.errorUnionPayload(zcu);
6382 const error_ty = err_union_ty.errorUnionSet(zcu);
6383
6384 const a = try Assignment.start(f, w, .bool);
6385 try f.writeCValue(w, local, .Other);
6386 try a.assign(f, w);
6387 const err_int_ty = try pt.errorIntType();
6388 if (!error_ty.errorSetIsEmpty(zcu))
6389 if (payload_ty.hasRuntimeBits(zcu))
6390 if (is_ptr)
6391 try f.writeCValueDerefMember(w, operand, .{ .identifier = "error" })
6392 else
6393 try f.writeCValueMember(w, operand, .{ .identifier = "error" })
6394 else
6395 try f.writeCValue(w, operand, .Other)
6396 else
6397 try f.object.dg.renderValue(w, try pt.intValue(err_int_ty, 0), .Other);
6398 try w.writeByte(' ');
6399 try w.writeAll(operator);
6400 try w.writeByte(' ');
6401 try f.object.dg.renderValue(w, try pt.intValue(err_int_ty, 0), .Other);
6402 try a.end(f, w);
6403 return local;
6404}
6405
6406fn airArrayToSlice(f: *Function, inst: Air.Inst.Index) !CValue {
6407 const pt = f.object.dg.pt;
6408 const zcu = pt.zcu;
6409 const ctype_pool = &f.object.dg.ctype_pool;
6410 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
6411
6412 const operand = try f.resolveInst(ty_op.operand);
6413 try reap(f, inst, &.{ty_op.operand});
6414 const inst_ty = f.typeOfIndex(inst);
6415 const ptr_ty = inst_ty.slicePtrFieldType(zcu);
6416 const w = &f.object.code.writer;
6417 const local = try f.allocLocal(inst, inst_ty);
6418 const operand_ty = f.typeOf(ty_op.operand);
6419 const array_ty = operand_ty.childType(zcu);
6420
6421 {
6422 const a = try Assignment.start(f, w, try f.ctypeFromType(ptr_ty, .complete));
6423 try f.writeCValueMember(w, local, .{ .identifier = "ptr" });
6424 try a.assign(f, w);
6425 if (operand == .undef) {
6426 try f.writeCValue(w, .{ .undef = inst_ty.slicePtrFieldType(zcu) }, .Other);
6427 } else {
6428 const ptr_ctype = try f.ctypeFromType(ptr_ty, .complete);
6429 const ptr_child_ctype = ptr_ctype.info(ctype_pool).pointer.elem_ctype;
6430 const elem_ty = array_ty.childType(zcu);
6431 const elem_ctype = try f.ctypeFromType(elem_ty, .complete);
6432 if (!ptr_child_ctype.eql(elem_ctype)) {
6433 try w.writeByte('(');
6434 try f.renderCType(w, ptr_ctype);
6435 try w.writeByte(')');
6436 }
6437 const operand_ctype = try f.ctypeFromType(operand_ty, .complete);
6438 const operand_child_ctype = operand_ctype.info(ctype_pool).pointer.elem_ctype;
6439 if (operand_child_ctype.info(ctype_pool) == .array) {
6440 try w.writeByte('&');
6441 try f.writeCValueDeref(w, operand);
6442 try w.print("[{f}]", .{try f.fmtIntLiteralDec(.zero_usize)});
6443 } else try f.writeCValue(w, operand, .Other);
6444 }
6445 try a.end(f, w);
6446 }
6447 {
6448 const a = try Assignment.start(f, w, .usize);
6449 try f.writeCValueMember(w, local, .{ .identifier = "len" });
6450 try a.assign(f, w);
6451 try w.print("{f}", .{
6452 try f.fmtIntLiteralDec(try pt.intValue(.usize, array_ty.arrayLen(zcu))),
6453 });
6454 try a.end(f, w);
6455 }
6456
6457 return local;
6458}
6459
6460fn airFloatCast(f: *Function, inst: Air.Inst.Index) !CValue {
6461 const pt = f.object.dg.pt;
6462 const zcu = pt.zcu;
6463 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
6464
6465 const inst_ty = f.typeOfIndex(inst);
6466 const inst_scalar_ty = inst_ty.scalarType(zcu);
6467 const operand = try f.resolveInst(ty_op.operand);
6468 try reap(f, inst, &.{ty_op.operand});
6469 const operand_ty = f.typeOf(ty_op.operand);
6470 const scalar_ty = operand_ty.scalarType(zcu);
6471 const target = &f.object.dg.mod.resolved_target.result;
6472 const operation = if (inst_scalar_ty.isRuntimeFloat() and scalar_ty.isRuntimeFloat())
6473 if (inst_scalar_ty.floatBits(target) < scalar_ty.floatBits(target)) "trunc" else "extend"
6474 else if (inst_scalar_ty.isInt(zcu) and scalar_ty.isRuntimeFloat())
6475 if (inst_scalar_ty.isSignedInt(zcu)) "fix" else "fixuns"
6476 else if (inst_scalar_ty.isRuntimeFloat() and scalar_ty.isInt(zcu))
6477 if (scalar_ty.isSignedInt(zcu)) "float" else "floatun"
6478 else
6479 unreachable;
6480
6481 const w = &f.object.code.writer;
6482 const local = try f.allocLocal(inst, inst_ty);
6483 const v = try Vectorize.start(f, inst, w, operand_ty);
6484 const a = try Assignment.start(f, w, try f.ctypeFromType(scalar_ty, .complete));
6485 try f.writeCValue(w, local, .Other);
6486 try v.elem(f, w);
6487 try a.assign(f, w);
6488 if (inst_scalar_ty.isInt(zcu) and scalar_ty.isRuntimeFloat()) {
6489 try w.writeAll("zig_wrap_");
6490 try f.object.dg.renderTypeForBuiltinFnName(w, inst_scalar_ty);
6491 try w.writeByte('(');
6492 }
6493 try w.writeAll("zig_");
6494 try w.writeAll(operation);
6495 try w.writeAll(compilerRtAbbrev(scalar_ty, zcu, target));
6496 try w.writeAll(compilerRtAbbrev(inst_scalar_ty, zcu, target));
6497 try w.writeByte('(');
6498 try f.writeCValue(w, operand, .FunctionArgument);
6499 try v.elem(f, w);
6500 try w.writeByte(')');
6501 if (inst_scalar_ty.isInt(zcu) and scalar_ty.isRuntimeFloat()) {
6502 try f.object.dg.renderBuiltinInfo(w, inst_scalar_ty, .bits);
6503 try w.writeByte(')');
6504 }
6505 try a.end(f, w);
6506 try v.end(f, inst, w);
6507
6508 return local;
6509}
6510
6511fn airUnBuiltinCall(
6512 f: *Function,
6513 inst: Air.Inst.Index,
6514 operand_ref: Air.Inst.Ref,
6515 operation: []const u8,
6516 info: BuiltinInfo,
6517) !CValue {
6518 const pt = f.object.dg.pt;
6519 const zcu = pt.zcu;
6520
6521 const operand = try f.resolveInst(operand_ref);
6522 try reap(f, inst, &.{operand_ref});
6523 const inst_ty = f.typeOfIndex(inst);
6524 const inst_scalar_ty = inst_ty.scalarType(zcu);
6525 const operand_ty = f.typeOf(operand_ref);
6526 const scalar_ty = operand_ty.scalarType(zcu);
6527
6528 const inst_scalar_ctype = try f.ctypeFromType(inst_scalar_ty, .complete);
6529 const ref_ret = inst_scalar_ctype.info(&f.object.dg.ctype_pool) == .array;
6530
6531 const w = &f.object.code.writer;
6532 const local = try f.allocLocal(inst, inst_ty);
6533 const v = try Vectorize.start(f, inst, w, operand_ty);
6534 if (!ref_ret) {
6535 try f.writeCValue(w, local, .Other);
6536 try v.elem(f, w);
6537 try w.writeAll(" = ");
6538 }
6539 try w.print("zig_{s}_", .{operation});
6540 try f.object.dg.renderTypeForBuiltinFnName(w, scalar_ty);
6541 try w.writeByte('(');
6542 if (ref_ret) {
6543 try f.writeCValue(w, local, .FunctionArgument);
6544 try v.elem(f, w);
6545 try w.writeAll(", ");
6546 }
6547 try f.writeCValue(w, operand, .FunctionArgument);
6548 try v.elem(f, w);
6549 try f.object.dg.renderBuiltinInfo(w, scalar_ty, info);
6550 try w.writeAll(");");
6551 try f.object.newline();
6552 try v.end(f, inst, w);
6553
6554 return local;
6555}
6556
6557fn airBinBuiltinCall(
6558 f: *Function,
6559 inst: Air.Inst.Index,
6560 operation: []const u8,
6561 info: BuiltinInfo,
6562) !CValue {
6563 const pt = f.object.dg.pt;
6564 const zcu = pt.zcu;
6565 const bin_op = f.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
6566
6567 const operand_ty = f.typeOf(bin_op.lhs);
6568 const operand_ctype = try f.ctypeFromType(operand_ty, .complete);
6569 const is_big = operand_ctype.info(&f.object.dg.ctype_pool) == .array;
6570
6571 const lhs = try f.resolveInst(bin_op.lhs);
6572 const rhs = try f.resolveInst(bin_op.rhs);
6573 if (!is_big) try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
6574
6575 const inst_ty = f.typeOfIndex(inst);
6576 const inst_scalar_ty = inst_ty.scalarType(zcu);
6577 const scalar_ty = operand_ty.scalarType(zcu);
6578
6579 const inst_scalar_ctype = try f.ctypeFromType(inst_scalar_ty, .complete);
6580 const ref_ret = inst_scalar_ctype.info(&f.object.dg.ctype_pool) == .array;
6581
6582 const w = &f.object.code.writer;
6583 const local = try f.allocLocal(inst, inst_ty);
6584 if (is_big) try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
6585 const v = try Vectorize.start(f, inst, w, operand_ty);
6586 if (!ref_ret) {
6587 try f.writeCValue(w, local, .Other);
6588 try v.elem(f, w);
6589 try w.writeAll(" = ");
6590 }
6591 try w.print("zig_{s}_", .{operation});
6592 try f.object.dg.renderTypeForBuiltinFnName(w, scalar_ty);
6593 try w.writeByte('(');
6594 if (ref_ret) {
6595 try f.writeCValue(w, local, .FunctionArgument);
6596 try v.elem(f, w);
6597 try w.writeAll(", ");
6598 }
6599 try f.writeCValue(w, lhs, .FunctionArgument);
6600 try v.elem(f, w);
6601 try w.writeAll(", ");
6602 try f.writeCValue(w, rhs, .FunctionArgument);
6603 if (f.typeOf(bin_op.rhs).isVector(zcu)) try v.elem(f, w);
6604 try f.object.dg.renderBuiltinInfo(w, scalar_ty, info);
6605 try w.writeAll(");\n");
6606 try v.end(f, inst, w);
6607
6608 return local;
6609}
6610
6611fn airCmpBuiltinCall(
6612 f: *Function,
6613 inst: Air.Inst.Index,
6614 data: anytype,
6615 operator: std.math.CompareOperator,
6616 operation: enum { cmp, operator },
6617 info: BuiltinInfo,
6618) !CValue {
6619 const pt = f.object.dg.pt;
6620 const zcu = pt.zcu;
6621 const lhs = try f.resolveInst(data.lhs);
6622 const rhs = try f.resolveInst(data.rhs);
6623 try reap(f, inst, &.{ data.lhs, data.rhs });
6624
6625 const inst_ty = f.typeOfIndex(inst);
6626 const inst_scalar_ty = inst_ty.scalarType(zcu);
6627 const operand_ty = f.typeOf(data.lhs);
6628 const scalar_ty = operand_ty.scalarType(zcu);
6629
6630 const inst_scalar_ctype = try f.ctypeFromType(inst_scalar_ty, .complete);
6631 const ref_ret = inst_scalar_ctype.info(&f.object.dg.ctype_pool) == .array;
6632
6633 const w = &f.object.code.writer;
6634 const local = try f.allocLocal(inst, inst_ty);
6635 const v = try Vectorize.start(f, inst, w, operand_ty);
6636 if (!ref_ret) {
6637 try f.writeCValue(w, local, .Other);
6638 try v.elem(f, w);
6639 try w.writeAll(" = ");
6640 }
6641 try w.print("zig_{s}_", .{switch (operation) {
6642 else => @tagName(operation),
6643 .operator => compareOperatorAbbrev(operator),
6644 }});
6645 try f.object.dg.renderTypeForBuiltinFnName(w, scalar_ty);
6646 try w.writeByte('(');
6647 if (ref_ret) {
6648 try f.writeCValue(w, local, .FunctionArgument);
6649 try v.elem(f, w);
6650 try w.writeAll(", ");
6651 }
6652 try f.writeCValue(w, lhs, .FunctionArgument);
6653 try v.elem(f, w);
6654 try w.writeAll(", ");
6655 try f.writeCValue(w, rhs, .FunctionArgument);
6656 try v.elem(f, w);
6657 try f.object.dg.renderBuiltinInfo(w, scalar_ty, info);
6658 try w.writeByte(')');
6659 if (!ref_ret) try w.print("{s}{f}", .{
6660 compareOperatorC(operator),
6661 try f.fmtIntLiteralDec(try pt.intValue(.i32, 0)),
6662 });
6663 try w.writeByte(';');
6664 try f.object.newline();
6665 try v.end(f, inst, w);
6666
6667 return local;
6668}
6669
6670fn airCmpxchg(f: *Function, inst: Air.Inst.Index, flavor: [*:0]const u8) !CValue {
6671 const pt = f.object.dg.pt;
6672 const zcu = pt.zcu;
6673 const ty_pl = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
6674 const extra = f.air.extraData(Air.Cmpxchg, ty_pl.payload).data;
6675 const inst_ty = f.typeOfIndex(inst);
6676 const ptr = try f.resolveInst(extra.ptr);
6677 const expected_value = try f.resolveInst(extra.expected_value);
6678 const new_value = try f.resolveInst(extra.new_value);
6679 const ptr_ty = f.typeOf(extra.ptr);
6680 const ty = ptr_ty.childType(zcu);
6681 const ctype = try f.ctypeFromType(ty, .complete);
6682
6683 const w = &f.object.code.writer;
6684 const new_value_mat = try Materialize.start(f, inst, ty, new_value);
6685 try reap(f, inst, &.{ extra.ptr, extra.expected_value, extra.new_value });
6686
6687 const repr_ty = if (ty.isRuntimeFloat())
6688 pt.intType(.unsigned, @as(u16, @intCast(ty.abiSize(zcu) * 8))) catch unreachable
6689 else
6690 ty;
6691
6692 const local = try f.allocLocal(inst, inst_ty);
6693 if (inst_ty.isPtrLikeOptional(zcu)) {
6694 {
6695 const a = try Assignment.start(f, w, ctype);
6696 try f.writeCValue(w, local, .Other);
6697 try a.assign(f, w);
6698 try f.writeCValue(w, expected_value, .Other);
6699 try a.end(f, w);
6700 }
6701
6702 try w.writeAll("if (");
6703 try w.print("zig_cmpxchg_{s}((zig_atomic(", .{flavor});
6704 try f.renderType(w, ty);
6705 try w.writeByte(')');
6706 if (ptr_ty.isVolatilePtr(zcu)) try w.writeAll(" volatile");
6707 try w.writeAll(" *)");
6708 try f.writeCValue(w, ptr, .Other);
6709 try w.writeAll(", ");
6710 try f.writeCValue(w, local, .FunctionArgument);
6711 try w.writeAll(", ");
6712 try new_value_mat.mat(f, w);
6713 try w.writeAll(", ");
6714 try writeMemoryOrder(w, extra.successOrder());
6715 try w.writeAll(", ");
6716 try writeMemoryOrder(w, extra.failureOrder());
6717 try w.writeAll(", ");
6718 try f.object.dg.renderTypeForBuiltinFnName(w, ty);
6719 try w.writeAll(", ");
6720 try f.renderType(w, repr_ty);
6721 try w.writeByte(')');
6722 try w.writeAll(") {");
6723 f.object.indent();
6724 try f.object.newline();
6725 {
6726 const a = try Assignment.start(f, w, ctype);
6727 try f.writeCValue(w, local, .Other);
6728 try a.assign(f, w);
6729 try w.writeAll("NULL");
6730 try a.end(f, w);
6731 }
6732 try f.object.outdent();
6733 try w.writeByte('}');
6734 try f.object.newline();
6735 } else {
6736 {
6737 const a = try Assignment.start(f, w, ctype);
6738 try f.writeCValueMember(w, local, .{ .identifier = "payload" });
6739 try a.assign(f, w);
6740 try f.writeCValue(w, expected_value, .Other);
6741 try a.end(f, w);
6742 }
6743 {
6744 const a = try Assignment.start(f, w, .bool);
6745 try f.writeCValueMember(w, local, .{ .identifier = "is_null" });
6746 try a.assign(f, w);
6747 try w.print("zig_cmpxchg_{s}((zig_atomic(", .{flavor});
6748 try f.renderType(w, ty);
6749 try w.writeByte(')');
6750 if (ptr_ty.isVolatilePtr(zcu)) try w.writeAll(" volatile");
6751 try w.writeAll(" *)");
6752 try f.writeCValue(w, ptr, .Other);
6753 try w.writeAll(", ");
6754 try f.writeCValueMember(w, local, .{ .identifier = "payload" });
6755 try w.writeAll(", ");
6756 try new_value_mat.mat(f, w);
6757 try w.writeAll(", ");
6758 try writeMemoryOrder(w, extra.successOrder());
6759 try w.writeAll(", ");
6760 try writeMemoryOrder(w, extra.failureOrder());
6761 try w.writeAll(", ");
6762 try f.object.dg.renderTypeForBuiltinFnName(w, ty);
6763 try w.writeAll(", ");
6764 try f.renderType(w, repr_ty);
6765 try w.writeByte(')');
6766 try a.end(f, w);
6767 }
6768 }
6769 try new_value_mat.end(f, inst);
6770
6771 if (f.liveness.isUnused(inst)) {
6772 try freeLocal(f, inst, local.new_local, null);
6773 return .none;
6774 }
6775
6776 return local;
6777}
6778
6779fn airAtomicRmw(f: *Function, inst: Air.Inst.Index) !CValue {
6780 const pt = f.object.dg.pt;
6781 const zcu = pt.zcu;
6782 const pl_op = f.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
6783 const extra = f.air.extraData(Air.AtomicRmw, pl_op.payload).data;
6784 const inst_ty = f.typeOfIndex(inst);
6785 const ptr_ty = f.typeOf(pl_op.operand);
6786 const ty = ptr_ty.childType(zcu);
6787 const ptr = try f.resolveInst(pl_op.operand);
6788 const operand = try f.resolveInst(extra.operand);
6789
6790 const w = &f.object.code.writer;
6791 const operand_mat = try Materialize.start(f, inst, ty, operand);
6792 try reap(f, inst, &.{ pl_op.operand, extra.operand });
6793
6794 const repr_bits: u16 = @intCast(ty.abiSize(zcu) * 8);
6795 const is_float = ty.isRuntimeFloat();
6796 const is_128 = repr_bits == 128;
6797 const repr_ty = if (is_float) pt.intType(.unsigned, repr_bits) catch unreachable else ty;
6798
6799 const local = try f.allocLocal(inst, inst_ty);
6800 try w.print("zig_atomicrmw_{s}", .{toAtomicRmwSuffix(extra.op())});
6801 if (is_float) try w.writeAll("_float") else if (is_128) try w.writeAll("_int128");
6802 try w.writeByte('(');
6803 try f.writeCValue(w, local, .Other);
6804 try w.writeAll(", (");
6805 const use_atomic = switch (extra.op()) {
6806 else => true,
6807 // These are missing from stdatomic.h, so no atomic types unless a fallback is used.
6808 .Nand, .Min, .Max => is_float or is_128,
6809 };
6810 if (use_atomic) try w.writeAll("zig_atomic(");
6811 try f.renderType(w, ty);
6812 if (use_atomic) try w.writeByte(')');
6813 if (ptr_ty.isVolatilePtr(zcu)) try w.writeAll(" volatile");
6814 try w.writeAll(" *)");
6815 try f.writeCValue(w, ptr, .Other);
6816 try w.writeAll(", ");
6817 try operand_mat.mat(f, w);
6818 try w.writeAll(", ");
6819 try writeMemoryOrder(w, extra.ordering());
6820 try w.writeAll(", ");
6821 try f.object.dg.renderTypeForBuiltinFnName(w, ty);
6822 try w.writeAll(", ");
6823 try f.renderType(w, repr_ty);
6824 try w.writeAll(");");
6825 try f.object.newline();
6826 try operand_mat.end(f, inst);
6827
6828 if (f.liveness.isUnused(inst)) {
6829 try freeLocal(f, inst, local.new_local, null);
6830 return .none;
6831 }
6832
6833 return local;
6834}
6835
6836fn airAtomicLoad(f: *Function, inst: Air.Inst.Index) !CValue {
6837 const pt = f.object.dg.pt;
6838 const zcu = pt.zcu;
6839 const atomic_load = f.air.instructions.items(.data)[@intFromEnum(inst)].atomic_load;
6840 const ptr = try f.resolveInst(atomic_load.ptr);
6841 try reap(f, inst, &.{atomic_load.ptr});
6842 const ptr_ty = f.typeOf(atomic_load.ptr);
6843 const ty = ptr_ty.childType(zcu);
6844
6845 const repr_ty = if (ty.isRuntimeFloat())
6846 pt.intType(.unsigned, @as(u16, @intCast(ty.abiSize(zcu) * 8))) catch unreachable
6847 else
6848 ty;
6849
6850 const inst_ty = f.typeOfIndex(inst);
6851 const w = &f.object.code.writer;
6852 const local = try f.allocLocal(inst, inst_ty);
6853
6854 try w.writeAll("zig_atomic_load(");
6855 try f.writeCValue(w, local, .Other);
6856 try w.writeAll(", (zig_atomic(");
6857 try f.renderType(w, ty);
6858 try w.writeByte(')');
6859 if (ptr_ty.isVolatilePtr(zcu)) try w.writeAll(" volatile");
6860 try w.writeAll(" *)");
6861 try f.writeCValue(w, ptr, .Other);
6862 try w.writeAll(", ");
6863 try writeMemoryOrder(w, atomic_load.order);
6864 try w.writeAll(", ");
6865 try f.object.dg.renderTypeForBuiltinFnName(w, ty);
6866 try w.writeAll(", ");
6867 try f.renderType(w, repr_ty);
6868 try w.writeAll(");");
6869 try f.object.newline();
6870
6871 return local;
6872}
6873
6874fn airAtomicStore(f: *Function, inst: Air.Inst.Index, order: [*:0]const u8) !CValue {
6875 const pt = f.object.dg.pt;
6876 const zcu = pt.zcu;
6877 const bin_op = f.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
6878 const ptr_ty = f.typeOf(bin_op.lhs);
6879 const ty = ptr_ty.childType(zcu);
6880 const ptr = try f.resolveInst(bin_op.lhs);
6881 const element = try f.resolveInst(bin_op.rhs);
6882
6883 const w = &f.object.code.writer;
6884 const element_mat = try Materialize.start(f, inst, ty, element);
6885 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
6886
6887 const repr_ty = if (ty.isRuntimeFloat())
6888 pt.intType(.unsigned, @as(u16, @intCast(ty.abiSize(zcu) * 8))) catch unreachable
6889 else
6890 ty;
6891
6892 try w.writeAll("zig_atomic_store((zig_atomic(");
6893 try f.renderType(w, ty);
6894 try w.writeByte(')');
6895 if (ptr_ty.isVolatilePtr(zcu)) try w.writeAll(" volatile");
6896 try w.writeAll(" *)");
6897 try f.writeCValue(w, ptr, .Other);
6898 try w.writeAll(", ");
6899 try element_mat.mat(f, w);
6900 try w.print(", {s}, ", .{order});
6901 try f.object.dg.renderTypeForBuiltinFnName(w, ty);
6902 try w.writeAll(", ");
6903 try f.renderType(w, repr_ty);
6904 try w.writeAll(");");
6905 try f.object.newline();
6906 try element_mat.end(f, inst);
6907
6908 return .none;
6909}
6910
6911fn writeSliceOrPtr(f: *Function, w: *Writer, ptr: CValue, ptr_ty: Type) !void {
6912 const pt = f.object.dg.pt;
6913 const zcu = pt.zcu;
6914 if (ptr_ty.isSlice(zcu)) {
6915 try f.writeCValueMember(w, ptr, .{ .identifier = "ptr" });
6916 } else {
6917 try f.writeCValue(w, ptr, .FunctionArgument);
6918 }
6919}
6920
6921fn airMemset(f: *Function, inst: Air.Inst.Index, safety: bool) !CValue {
6922 const pt = f.object.dg.pt;
6923 const zcu = pt.zcu;
6924 const bin_op = f.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
6925 const dest_ty = f.typeOf(bin_op.lhs);
6926 const dest_slice = try f.resolveInst(bin_op.lhs);
6927 const value = try f.resolveInst(bin_op.rhs);
6928 const elem_ty = f.typeOf(bin_op.rhs);
6929 const elem_abi_size = elem_ty.abiSize(zcu);
6930 const val_is_undef = if (try f.air.value(bin_op.rhs, pt)) |val| val.isUndef(zcu) else false;
6931 const w = &f.object.code.writer;
6932
6933 if (val_is_undef) {
6934 if (!safety) {
6935 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
6936 return .none;
6937 }
6938
6939 try w.writeAll("memset(");
6940 switch (dest_ty.ptrSize(zcu)) {
6941 .slice => {
6942 try f.writeCValueMember(w, dest_slice, .{ .identifier = "ptr" });
6943 try w.writeAll(", 0xaa, ");
6944 try f.writeCValueMember(w, dest_slice, .{ .identifier = "len" });
6945 if (elem_abi_size > 1) {
6946 try w.print(" * {d}", .{elem_abi_size});
6947 }
6948 try w.writeAll(");");
6949 try f.object.newline();
6950 },
6951 .one => {
6952 const array_ty = dest_ty.childType(zcu);
6953 const len = array_ty.arrayLen(zcu) * elem_abi_size;
6954
6955 try f.writeCValue(w, dest_slice, .FunctionArgument);
6956 try w.print(", 0xaa, {d});", .{len});
6957 try f.object.newline();
6958 },
6959 .many, .c => unreachable,
6960 }
6961 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
6962 return .none;
6963 }
6964
6965 if (elem_abi_size > 1 or dest_ty.isVolatilePtr(zcu)) {
6966 // For the assignment in this loop, the array pointer needs to get
6967 // casted to a regular pointer, otherwise an error like this occurs:
6968 // error: array type 'uint32_t[20]' (aka 'unsigned int[20]') is not assignable
6969 const elem_ptr_ty = try pt.ptrType(.{
6970 .child = elem_ty.toIntern(),
6971 .flags = .{
6972 .size = .c,
6973 },
6974 });
6975
6976 const index = try f.allocLocal(inst, .usize);
6977
6978 try w.writeAll("for (");
6979 try f.writeCValue(w, index, .Other);
6980 try w.writeAll(" = ");
6981 try f.object.dg.renderValue(w, .zero_usize, .Other);
6982 try w.writeAll("; ");
6983 try f.writeCValue(w, index, .Other);
6984 try w.writeAll(" != ");
6985 switch (dest_ty.ptrSize(zcu)) {
6986 .slice => {
6987 try f.writeCValueMember(w, dest_slice, .{ .identifier = "len" });
6988 },
6989 .one => {
6990 const array_ty = dest_ty.childType(zcu);
6991 try w.print("{d}", .{array_ty.arrayLen(zcu)});
6992 },
6993 .many, .c => unreachable,
6994 }
6995 try w.writeAll("; ++");
6996 try f.writeCValue(w, index, .Other);
6997 try w.writeAll(") ");
6998
6999 const a = try Assignment.start(f, w, try f.ctypeFromType(elem_ty, .complete));
7000 try w.writeAll("((");
7001 try f.renderType(w, elem_ptr_ty);
7002 try w.writeByte(')');
7003 try writeSliceOrPtr(f, w, dest_slice, dest_ty);
7004 try w.writeAll(")[");
7005 try f.writeCValue(w, index, .Other);
7006 try w.writeByte(']');
7007 try a.assign(f, w);
7008 try f.writeCValue(w, value, .Other);
7009 try a.end(f, w);
7010
7011 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
7012 try freeLocal(f, inst, index.new_local, null);
7013
7014 return .none;
7015 }
7016
7017 const bitcasted = try bitcast(f, .u8, value, elem_ty);
7018
7019 try w.writeAll("memset(");
7020 switch (dest_ty.ptrSize(zcu)) {
7021 .slice => {
7022 try f.writeCValueMember(w, dest_slice, .{ .identifier = "ptr" });
7023 try w.writeAll(", ");
7024 try f.writeCValue(w, bitcasted, .FunctionArgument);
7025 try w.writeAll(", ");
7026 try f.writeCValueMember(w, dest_slice, .{ .identifier = "len" });
7027 try w.writeAll(");");
7028 try f.object.newline();
7029 },
7030 .one => {
7031 const array_ty = dest_ty.childType(zcu);
7032 const len = array_ty.arrayLen(zcu) * elem_abi_size;
7033
7034 try f.writeCValue(w, dest_slice, .FunctionArgument);
7035 try w.writeAll(", ");
7036 try f.writeCValue(w, bitcasted, .FunctionArgument);
7037 try w.print(", {d});", .{len});
7038 try f.object.newline();
7039 },
7040 .many, .c => unreachable,
7041 }
7042 try f.freeCValue(inst, bitcasted);
7043 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
7044 return .none;
7045}
7046
7047fn airMemcpy(f: *Function, inst: Air.Inst.Index, function_paren: []const u8) !CValue {
7048 const pt = f.object.dg.pt;
7049 const zcu = pt.zcu;
7050 const bin_op = f.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
7051 const dest_ptr = try f.resolveInst(bin_op.lhs);
7052 const src_ptr = try f.resolveInst(bin_op.rhs);
7053 const dest_ty = f.typeOf(bin_op.lhs);
7054 const src_ty = f.typeOf(bin_op.rhs);
7055 const w = &f.object.code.writer;
7056
7057 if (dest_ty.ptrSize(zcu) != .one) {
7058 try w.writeAll("if (");
7059 try writeArrayLen(f, dest_ptr, dest_ty);
7060 try w.writeAll(" != 0) ");
7061 }
7062 try w.writeAll(function_paren);
7063 try writeSliceOrPtr(f, w, dest_ptr, dest_ty);
7064 try w.writeAll(", ");
7065 try writeSliceOrPtr(f, w, src_ptr, src_ty);
7066 try w.writeAll(", ");
7067 try writeArrayLen(f, dest_ptr, dest_ty);
7068 try w.writeAll(" * sizeof(");
7069 try f.renderType(w, dest_ty.elemType2(zcu));
7070 try w.writeAll("));");
7071 try f.object.newline();
7072
7073 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
7074 return .none;
7075}
7076
7077fn writeArrayLen(f: *Function, dest_ptr: CValue, dest_ty: Type) !void {
7078 const pt = f.object.dg.pt;
7079 const zcu = pt.zcu;
7080 const w = &f.object.code.writer;
7081 switch (dest_ty.ptrSize(zcu)) {
7082 .one => try w.print("{f}", .{
7083 try f.fmtIntLiteralDec(try pt.intValue(.usize, dest_ty.childType(zcu).arrayLen(zcu))),
7084 }),
7085 .many, .c => unreachable,
7086 .slice => try f.writeCValueMember(w, dest_ptr, .{ .identifier = "len" }),
7087 }
7088}
7089
7090fn airSetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue {
7091 const pt = f.object.dg.pt;
7092 const zcu = pt.zcu;
7093 const bin_op = f.air.instructions.items(.data)[@intFromEnum(inst)].bin_op;
7094 const union_ptr = try f.resolveInst(bin_op.lhs);
7095 const new_tag = try f.resolveInst(bin_op.rhs);
7096 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs });
7097
7098 const union_ty = f.typeOf(bin_op.lhs).childType(zcu);
7099 const layout = union_ty.unionGetLayout(zcu);
7100 if (layout.tag_size == 0) return .none;
7101 const tag_ty = union_ty.unionTagTypeSafety(zcu).?;
7102
7103 const w = &f.object.code.writer;
7104 const a = try Assignment.start(f, w, try f.ctypeFromType(tag_ty, .complete));
7105 try f.writeCValueDerefMember(w, union_ptr, .{ .identifier = "tag" });
7106 try a.assign(f, w);
7107 try f.writeCValue(w, new_tag, .Other);
7108 try a.end(f, w);
7109 return .none;
7110}
7111
7112fn airGetUnionTag(f: *Function, inst: Air.Inst.Index) !CValue {
7113 const pt = f.object.dg.pt;
7114 const zcu = pt.zcu;
7115 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
7116
7117 const operand = try f.resolveInst(ty_op.operand);
7118 try reap(f, inst, &.{ty_op.operand});
7119
7120 const union_ty = f.typeOf(ty_op.operand);
7121 const layout = union_ty.unionGetLayout(zcu);
7122 if (layout.tag_size == 0) return .none;
7123
7124 const inst_ty = f.typeOfIndex(inst);
7125 const w = &f.object.code.writer;
7126 const local = try f.allocLocal(inst, inst_ty);
7127 const a = try Assignment.start(f, w, try f.ctypeFromType(inst_ty, .complete));
7128 try f.writeCValue(w, local, .Other);
7129 try a.assign(f, w);
7130 try f.writeCValueMember(w, operand, .{ .identifier = "tag" });
7131 try a.end(f, w);
7132 return local;
7133}
7134
7135fn airTagName(f: *Function, inst: Air.Inst.Index) !CValue {
7136 const un_op = f.air.instructions.items(.data)[@intFromEnum(inst)].un_op;
7137
7138 const inst_ty = f.typeOfIndex(inst);
7139 const enum_ty = f.typeOf(un_op);
7140 const operand = try f.resolveInst(un_op);
7141 try reap(f, inst, &.{un_op});
7142
7143 const w = &f.object.code.writer;
7144 const local = try f.allocLocal(inst, inst_ty);
7145 try f.writeCValue(w, local, .Other);
7146 try w.print(" = {s}(", .{
7147 try f.getLazyFnName(.{ .tag_name = enum_ty.toIntern() }),
7148 });
7149 try f.writeCValue(w, operand, .Other);
7150 try w.writeAll(");");
7151 try f.object.newline();
7152
7153 return local;
7154}
7155
7156fn airErrorName(f: *Function, inst: Air.Inst.Index) !CValue {
7157 const un_op = f.air.instructions.items(.data)[@intFromEnum(inst)].un_op;
7158
7159 const w = &f.object.code.writer;
7160 const inst_ty = f.typeOfIndex(inst);
7161 const operand = try f.resolveInst(un_op);
7162 try reap(f, inst, &.{un_op});
7163 const local = try f.allocLocal(inst, inst_ty);
7164 try f.writeCValue(w, local, .Other);
7165
7166 try w.writeAll(" = zig_errorName[");
7167 try f.writeCValue(w, operand, .Other);
7168 try w.writeAll(" - 1];");
7169 try f.object.newline();
7170 return local;
7171}
7172
7173fn airSplat(f: *Function, inst: Air.Inst.Index) !CValue {
7174 const pt = f.object.dg.pt;
7175 const zcu = pt.zcu;
7176 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
7177
7178 const operand = try f.resolveInst(ty_op.operand);
7179 try reap(f, inst, &.{ty_op.operand});
7180
7181 const inst_ty = f.typeOfIndex(inst);
7182 const inst_scalar_ty = inst_ty.scalarType(zcu);
7183
7184 const w = &f.object.code.writer;
7185 const local = try f.allocLocal(inst, inst_ty);
7186 const v = try Vectorize.start(f, inst, w, inst_ty);
7187 const a = try Assignment.start(f, w, try f.ctypeFromType(inst_scalar_ty, .complete));
7188 try f.writeCValue(w, local, .Other);
7189 try v.elem(f, w);
7190 try a.assign(f, w);
7191 try f.writeCValue(w, operand, .Other);
7192 try a.end(f, w);
7193 try v.end(f, inst, w);
7194
7195 return local;
7196}
7197
7198fn airSelect(f: *Function, inst: Air.Inst.Index) !CValue {
7199 const pl_op = f.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
7200 const extra = f.air.extraData(Air.Bin, pl_op.payload).data;
7201
7202 const pred = try f.resolveInst(pl_op.operand);
7203 const lhs = try f.resolveInst(extra.lhs);
7204 const rhs = try f.resolveInst(extra.rhs);
7205 try reap(f, inst, &.{ pl_op.operand, extra.lhs, extra.rhs });
7206
7207 const inst_ty = f.typeOfIndex(inst);
7208
7209 const w = &f.object.code.writer;
7210 const local = try f.allocLocal(inst, inst_ty);
7211 const v = try Vectorize.start(f, inst, w, inst_ty);
7212 try f.writeCValue(w, local, .Other);
7213 try v.elem(f, w);
7214 try w.writeAll(" = ");
7215 try f.writeCValue(w, pred, .Other);
7216 try v.elem(f, w);
7217 try w.writeAll(" ? ");
7218 try f.writeCValue(w, lhs, .Other);
7219 try v.elem(f, w);
7220 try w.writeAll(" : ");
7221 try f.writeCValue(w, rhs, .Other);
7222 try v.elem(f, w);
7223 try w.writeByte(';');
7224 try f.object.newline();
7225 try v.end(f, inst, w);
7226
7227 return local;
7228}
7229
7230fn airShuffleOne(f: *Function, inst: Air.Inst.Index) !CValue {
7231 const pt = f.object.dg.pt;
7232 const zcu = pt.zcu;
7233
7234 const unwrapped = f.air.unwrapShuffleOne(zcu, inst);
7235 const mask = unwrapped.mask;
7236 const operand = try f.resolveInst(unwrapped.operand);
7237 const inst_ty = unwrapped.result_ty;
7238
7239 const w = &f.object.code.writer;
7240 const local = try f.allocLocal(inst, inst_ty);
7241 try reap(f, inst, &.{unwrapped.operand}); // local cannot alias operand
7242 for (mask, 0..) |mask_elem, out_idx| {
7243 try f.writeCValue(w, local, .Other);
7244 try w.writeByte('[');
7245 try f.object.dg.renderValue(w, try pt.intValue(.usize, out_idx), .Other);
7246 try w.writeAll("] = ");
7247 switch (mask_elem.unwrap()) {
7248 .elem => |src_idx| {
7249 try f.writeCValue(w, operand, .Other);
7250 try w.writeByte('[');
7251 try f.object.dg.renderValue(w, try pt.intValue(.usize, src_idx), .Other);
7252 try w.writeByte(']');
7253 },
7254 .value => |val| try f.object.dg.renderValue(w, .fromInterned(val), .Other),
7255 }
7256 try w.writeAll(";\n");
7257 }
7258
7259 return local;
7260}
7261
7262fn airShuffleTwo(f: *Function, inst: Air.Inst.Index) !CValue {
7263 const pt = f.object.dg.pt;
7264 const zcu = pt.zcu;
7265
7266 const unwrapped = f.air.unwrapShuffleTwo(zcu, inst);
7267 const mask = unwrapped.mask;
7268 const operand_a = try f.resolveInst(unwrapped.operand_a);
7269 const operand_b = try f.resolveInst(unwrapped.operand_b);
7270 const inst_ty = unwrapped.result_ty;
7271 const elem_ty = inst_ty.childType(zcu);
7272
7273 const w = &f.object.code.writer;
7274 const local = try f.allocLocal(inst, inst_ty);
7275 try reap(f, inst, &.{ unwrapped.operand_a, unwrapped.operand_b }); // local cannot alias operands
7276 for (mask, 0..) |mask_elem, out_idx| {
7277 try f.writeCValue(w, local, .Other);
7278 try w.writeByte('[');
7279 try f.object.dg.renderValue(w, try pt.intValue(.usize, out_idx), .Other);
7280 try w.writeAll("] = ");
7281 switch (mask_elem.unwrap()) {
7282 .a_elem => |src_idx| {
7283 try f.writeCValue(w, operand_a, .Other);
7284 try w.writeByte('[');
7285 try f.object.dg.renderValue(w, try pt.intValue(.usize, src_idx), .Other);
7286 try w.writeByte(']');
7287 },
7288 .b_elem => |src_idx| {
7289 try f.writeCValue(w, operand_b, .Other);
7290 try w.writeByte('[');
7291 try f.object.dg.renderValue(w, try pt.intValue(.usize, src_idx), .Other);
7292 try w.writeByte(']');
7293 },
7294 .undef => try f.object.dg.renderUndefValue(w, elem_ty, .Other),
7295 }
7296 try w.writeByte(';');
7297 try f.object.newline();
7298 }
7299
7300 return local;
7301}
7302
7303fn airReduce(f: *Function, inst: Air.Inst.Index) !CValue {
7304 const pt = f.object.dg.pt;
7305 const zcu = pt.zcu;
7306 const reduce = f.air.instructions.items(.data)[@intFromEnum(inst)].reduce;
7307
7308 const scalar_ty = f.typeOfIndex(inst);
7309 const operand = try f.resolveInst(reduce.operand);
7310 try reap(f, inst, &.{reduce.operand});
7311 const operand_ty = f.typeOf(reduce.operand);
7312 const w = &f.object.code.writer;
7313
7314 const use_operator = scalar_ty.bitSize(zcu) <= 64;
7315 const op: union(enum) {
7316 const Func = struct { operation: []const u8, info: BuiltinInfo = .none };
7317 builtin: Func,
7318 infix: []const u8,
7319 ternary: []const u8,
7320 } = switch (reduce.operation) {
7321 .And => if (use_operator) .{ .infix = " &= " } else .{ .builtin = .{ .operation = "and" } },
7322 .Or => if (use_operator) .{ .infix = " |= " } else .{ .builtin = .{ .operation = "or" } },
7323 .Xor => if (use_operator) .{ .infix = " ^= " } else .{ .builtin = .{ .operation = "xor" } },
7324 .Min => switch (scalar_ty.zigTypeTag(zcu)) {
7325 .int => if (use_operator) .{ .ternary = " < " } else .{ .builtin = .{ .operation = "min" } },
7326 .float => .{ .builtin = .{ .operation = "min" } },
7327 else => unreachable,
7328 },
7329 .Max => switch (scalar_ty.zigTypeTag(zcu)) {
7330 .int => if (use_operator) .{ .ternary = " > " } else .{ .builtin = .{ .operation = "max" } },
7331 .float => .{ .builtin = .{ .operation = "max" } },
7332 else => unreachable,
7333 },
7334 .Add => switch (scalar_ty.zigTypeTag(zcu)) {
7335 .int => if (use_operator) .{ .infix = " += " } else .{ .builtin = .{ .operation = "addw", .info = .bits } },
7336 .float => .{ .builtin = .{ .operation = "add" } },
7337 else => unreachable,
7338 },
7339 .Mul => switch (scalar_ty.zigTypeTag(zcu)) {
7340 .int => if (use_operator) .{ .infix = " *= " } else .{ .builtin = .{ .operation = "mulw", .info = .bits } },
7341 .float => .{ .builtin = .{ .operation = "mul" } },
7342 else => unreachable,
7343 },
7344 };
7345
7346 // Reduce a vector by repeatedly applying a function to produce an
7347 // accumulated result.
7348 //
7349 // Equivalent to:
7350 // reduce: {
7351 // var accum: T = init;
7352 // for (vec) |elem| {
7353 // accum = func(accum, elem);
7354 // }
7355 // break :reduce accum;
7356 // }
7357
7358 const accum = try f.allocLocal(inst, scalar_ty);
7359 try f.writeCValue(w, accum, .Other);
7360 try w.writeAll(" = ");
7361
7362 try f.object.dg.renderValue(w, switch (reduce.operation) {
7363 .Or, .Xor => switch (scalar_ty.zigTypeTag(zcu)) {
7364 .bool => Value.false,
7365 .int => try pt.intValue(scalar_ty, 0),
7366 else => unreachable,
7367 },
7368 .And => switch (scalar_ty.zigTypeTag(zcu)) {
7369 .bool => Value.true,
7370 .int => switch (scalar_ty.intInfo(zcu).signedness) {
7371 .unsigned => try scalar_ty.maxIntScalar(pt, scalar_ty),
7372 .signed => try pt.intValue(scalar_ty, -1),
7373 },
7374 else => unreachable,
7375 },
7376 .Add => switch (scalar_ty.zigTypeTag(zcu)) {
7377 .int => try pt.intValue(scalar_ty, 0),
7378 .float => try pt.floatValue(scalar_ty, 0.0),
7379 else => unreachable,
7380 },
7381 .Mul => switch (scalar_ty.zigTypeTag(zcu)) {
7382 .int => try pt.intValue(scalar_ty, 1),
7383 .float => try pt.floatValue(scalar_ty, 1.0),
7384 else => unreachable,
7385 },
7386 .Min => switch (scalar_ty.zigTypeTag(zcu)) {
7387 .bool => Value.true,
7388 .int => try scalar_ty.maxIntScalar(pt, scalar_ty),
7389 .float => try pt.floatValue(scalar_ty, std.math.nan(f128)),
7390 else => unreachable,
7391 },
7392 .Max => switch (scalar_ty.zigTypeTag(zcu)) {
7393 .bool => Value.false,
7394 .int => try scalar_ty.minIntScalar(pt, scalar_ty),
7395 .float => try pt.floatValue(scalar_ty, std.math.nan(f128)),
7396 else => unreachable,
7397 },
7398 }, .Other);
7399 try w.writeByte(';');
7400 try f.object.newline();
7401
7402 const v = try Vectorize.start(f, inst, w, operand_ty);
7403 try f.writeCValue(w, accum, .Other);
7404 switch (op) {
7405 .builtin => |func| {
7406 try w.print(" = zig_{s}_", .{func.operation});
7407 try f.object.dg.renderTypeForBuiltinFnName(w, scalar_ty);
7408 try w.writeByte('(');
7409 try f.writeCValue(w, accum, .FunctionArgument);
7410 try w.writeAll(", ");
7411 try f.writeCValue(w, operand, .Other);
7412 try v.elem(f, w);
7413 try f.object.dg.renderBuiltinInfo(w, scalar_ty, func.info);
7414 try w.writeByte(')');
7415 },
7416 .infix => |ass| {
7417 try w.writeAll(ass);
7418 try f.writeCValue(w, operand, .Other);
7419 try v.elem(f, w);
7420 },
7421 .ternary => |cmp| {
7422 try w.writeAll(" = ");
7423 try f.writeCValue(w, accum, .Other);
7424 try w.writeAll(cmp);
7425 try f.writeCValue(w, operand, .Other);
7426 try v.elem(f, w);
7427 try w.writeAll(" ? ");
7428 try f.writeCValue(w, accum, .Other);
7429 try w.writeAll(" : ");
7430 try f.writeCValue(w, operand, .Other);
7431 try v.elem(f, w);
7432 },
7433 }
7434 try w.writeByte(';');
7435 try f.object.newline();
7436 try v.end(f, inst, w);
7437
7438 return accum;
7439}
7440
7441fn airAggregateInit(f: *Function, inst: Air.Inst.Index) !CValue {
7442 const pt = f.object.dg.pt;
7443 const zcu = pt.zcu;
7444 const ip = &zcu.intern_pool;
7445 const ty_pl = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
7446 const inst_ty = f.typeOfIndex(inst);
7447 const len: usize = @intCast(inst_ty.arrayLen(zcu));
7448 const elements: []const Air.Inst.Ref = @ptrCast(f.air.extra.items[ty_pl.payload..][0..len]);
7449 const gpa = f.object.dg.gpa;
7450 const resolved_elements = try gpa.alloc(CValue, elements.len);
7451 defer gpa.free(resolved_elements);
7452 for (resolved_elements, elements) |*resolved_element, element| {
7453 resolved_element.* = try f.resolveInst(element);
7454 }
7455 {
7456 var bt = iterateBigTomb(f, inst);
7457 for (elements) |element| {
7458 try bt.feed(element);
7459 }
7460 }
7461
7462 const w = &f.object.code.writer;
7463 const local = try f.allocLocal(inst, inst_ty);
7464 switch (ip.indexToKey(inst_ty.toIntern())) {
7465 inline .array_type, .vector_type => |info, tag| {
7466 const a: Assignment = .{
7467 .ctype = try f.ctypeFromType(.fromInterned(info.child), .complete),
7468 };
7469 for (resolved_elements, 0..) |element, i| {
7470 try a.restart(f, w);
7471 try f.writeCValue(w, local, .Other);
7472 try w.print("[{d}]", .{i});
7473 try a.assign(f, w);
7474 try f.writeCValue(w, element, .Other);
7475 try a.end(f, w);
7476 }
7477 if (tag == .array_type and info.sentinel != .none) {
7478 try a.restart(f, w);
7479 try f.writeCValue(w, local, .Other);
7480 try w.print("[{d}]", .{info.len});
7481 try a.assign(f, w);
7482 try f.object.dg.renderValue(w, Value.fromInterned(info.sentinel), .Other);
7483 try a.end(f, w);
7484 }
7485 },
7486 .struct_type => {
7487 const loaded_struct = ip.loadStructType(inst_ty.toIntern());
7488 switch (loaded_struct.layout) {
7489 .auto, .@"extern" => {
7490 var field_it = loaded_struct.iterateRuntimeOrder(ip);
7491 while (field_it.next()) |field_index| {
7492 const field_ty: Type = .fromInterned(loaded_struct.field_types.get(ip)[field_index]);
7493 if (!field_ty.hasRuntimeBitsIgnoreComptime(zcu)) continue;
7494
7495 const a = try Assignment.start(f, w, try f.ctypeFromType(field_ty, .complete));
7496 try f.writeCValueMember(w, local, .{ .identifier = loaded_struct.fieldName(ip, field_index).toSlice(ip) });
7497 try a.assign(f, w);
7498 try f.writeCValue(w, resolved_elements[field_index], .Other);
7499 try a.end(f, w);
7500 }
7501 },
7502 .@"packed" => unreachable, // `Air.Legalize.Feature.expand_packed_struct_init` handles this case
7503 }
7504 },
7505 .tuple_type => |tuple_info| for (0..tuple_info.types.len) |field_index| {
7506 if (tuple_info.values.get(ip)[field_index] != .none) continue;
7507 const field_ty: Type = .fromInterned(tuple_info.types.get(ip)[field_index]);
7508 if (!field_ty.hasRuntimeBitsIgnoreComptime(zcu)) continue;
7509
7510 const a = try Assignment.start(f, w, try f.ctypeFromType(field_ty, .complete));
7511 try f.writeCValueMember(w, local, .{ .field = field_index });
7512 try a.assign(f, w);
7513 try f.writeCValue(w, resolved_elements[field_index], .Other);
7514 try a.end(f, w);
7515 },
7516 else => unreachable,
7517 }
7518
7519 return local;
7520}
7521
7522fn airUnionInit(f: *Function, inst: Air.Inst.Index) !CValue {
7523 const pt = f.object.dg.pt;
7524 const zcu = pt.zcu;
7525 const ip = &zcu.intern_pool;
7526 const ty_pl = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_pl;
7527 const extra = f.air.extraData(Air.UnionInit, ty_pl.payload).data;
7528
7529 const union_ty = f.typeOfIndex(inst);
7530 const loaded_union = ip.loadUnionType(union_ty.toIntern());
7531 const field_name = loaded_union.loadTagType(ip).names.get(ip)[extra.field_index];
7532 const payload_ty = f.typeOf(extra.init);
7533 const payload = try f.resolveInst(extra.init);
7534 try reap(f, inst, &.{extra.init});
7535
7536 const w = &f.object.code.writer;
7537 if (loaded_union.flagsUnordered(ip).layout == .@"packed") return f.moveCValue(inst, union_ty, payload);
7538
7539 const local = try f.allocLocal(inst, union_ty);
7540
7541 const field: CValue = if (union_ty.unionTagTypeSafety(zcu)) |tag_ty| field: {
7542 const layout = union_ty.unionGetLayout(zcu);
7543 if (layout.tag_size != 0) {
7544 const field_index = tag_ty.enumFieldIndex(field_name, zcu).?;
7545 const tag_val = try pt.enumValueFieldIndex(tag_ty, field_index);
7546
7547 const a = try Assignment.start(f, w, try f.ctypeFromType(tag_ty, .complete));
7548 try f.writeCValueMember(w, local, .{ .identifier = "tag" });
7549 try a.assign(f, w);
7550 try w.print("{f}", .{try f.fmtIntLiteralDec(try tag_val.intFromEnum(tag_ty, pt))});
7551 try a.end(f, w);
7552 }
7553 break :field .{ .payload_identifier = field_name.toSlice(ip) };
7554 } else .{ .identifier = field_name.toSlice(ip) };
7555
7556 const a = try Assignment.start(f, w, try f.ctypeFromType(payload_ty, .complete));
7557 try f.writeCValueMember(w, local, field);
7558 try a.assign(f, w);
7559 try f.writeCValue(w, payload, .Other);
7560 try a.end(f, w);
7561 return local;
7562}
7563
7564fn airPrefetch(f: *Function, inst: Air.Inst.Index) !CValue {
7565 const pt = f.object.dg.pt;
7566 const zcu = pt.zcu;
7567 const prefetch = f.air.instructions.items(.data)[@intFromEnum(inst)].prefetch;
7568
7569 const ptr_ty = f.typeOf(prefetch.ptr);
7570 const ptr = try f.resolveInst(prefetch.ptr);
7571 try reap(f, inst, &.{prefetch.ptr});
7572
7573 const w = &f.object.code.writer;
7574 switch (prefetch.cache) {
7575 .data => {
7576 try w.writeAll("zig_prefetch(");
7577 if (ptr_ty.isSlice(zcu))
7578 try f.writeCValueMember(w, ptr, .{ .identifier = "ptr" })
7579 else
7580 try f.writeCValue(w, ptr, .FunctionArgument);
7581 try w.print(", {d}, {d});", .{ @intFromEnum(prefetch.rw), prefetch.locality });
7582 try f.object.newline();
7583 },
7584 // The available prefetch intrinsics do not accept a cache argument; only
7585 // address, rw, and locality.
7586 .instruction => {},
7587 }
7588
7589 return .none;
7590}
7591
7592fn airWasmMemorySize(f: *Function, inst: Air.Inst.Index) !CValue {
7593 const pl_op = f.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
7594
7595 const w = &f.object.code.writer;
7596 const inst_ty = f.typeOfIndex(inst);
7597 const local = try f.allocLocal(inst, inst_ty);
7598 try f.writeCValue(w, local, .Other);
7599
7600 try w.writeAll(" = ");
7601 try w.print("zig_wasm_memory_size({d});", .{pl_op.payload});
7602 try f.object.newline();
7603
7604 return local;
7605}
7606
7607fn airWasmMemoryGrow(f: *Function, inst: Air.Inst.Index) !CValue {
7608 const pl_op = f.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
7609
7610 const w = &f.object.code.writer;
7611 const inst_ty = f.typeOfIndex(inst);
7612 const operand = try f.resolveInst(pl_op.operand);
7613 try reap(f, inst, &.{pl_op.operand});
7614 const local = try f.allocLocal(inst, inst_ty);
7615 try f.writeCValue(w, local, .Other);
7616
7617 try w.writeAll(" = ");
7618 try w.print("zig_wasm_memory_grow({d}, ", .{pl_op.payload});
7619 try f.writeCValue(w, operand, .FunctionArgument);
7620 try w.writeAll(");");
7621 try f.object.newline();
7622 return local;
7623}
7624
7625fn airMulAdd(f: *Function, inst: Air.Inst.Index) !CValue {
7626 const pt = f.object.dg.pt;
7627 const zcu = pt.zcu;
7628 const pl_op = f.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
7629 const bin_op = f.air.extraData(Air.Bin, pl_op.payload).data;
7630
7631 const mulend1 = try f.resolveInst(bin_op.lhs);
7632 const mulend2 = try f.resolveInst(bin_op.rhs);
7633 const addend = try f.resolveInst(pl_op.operand);
7634 try reap(f, inst, &.{ bin_op.lhs, bin_op.rhs, pl_op.operand });
7635
7636 const inst_ty = f.typeOfIndex(inst);
7637 const inst_scalar_ty = inst_ty.scalarType(zcu);
7638
7639 const w = &f.object.code.writer;
7640 const local = try f.allocLocal(inst, inst_ty);
7641 const v = try Vectorize.start(f, inst, w, inst_ty);
7642 try f.writeCValue(w, local, .Other);
7643 try v.elem(f, w);
7644 try w.writeAll(" = zig_fma_");
7645 try f.object.dg.renderTypeForBuiltinFnName(w, inst_scalar_ty);
7646 try w.writeByte('(');
7647 try f.writeCValue(w, mulend1, .FunctionArgument);
7648 try v.elem(f, w);
7649 try w.writeAll(", ");
7650 try f.writeCValue(w, mulend2, .FunctionArgument);
7651 try v.elem(f, w);
7652 try w.writeAll(", ");
7653 try f.writeCValue(w, addend, .FunctionArgument);
7654 try v.elem(f, w);
7655 try w.writeAll(");");
7656 try f.object.newline();
7657 try v.end(f, inst, w);
7658
7659 return local;
7660}
7661
7662fn airRuntimeNavPtr(f: *Function, inst: Air.Inst.Index) !CValue {
7663 const ty_nav = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_nav;
7664 const w = &f.object.code.writer;
7665 const local = try f.allocLocal(inst, .fromInterned(ty_nav.ty));
7666 try f.writeCValue(w, local, .Other);
7667 try w.writeAll(" = ");
7668 try f.object.dg.renderNav(w, ty_nav.nav, .Other);
7669 try w.writeByte(';');
7670 try f.object.newline();
7671 return local;
7672}
7673
7674fn airCVaStart(f: *Function, inst: Air.Inst.Index) !CValue {
7675 const pt = f.object.dg.pt;
7676 const zcu = pt.zcu;
7677 const inst_ty = f.typeOfIndex(inst);
7678 const function_ty = zcu.navValue(f.object.dg.pass.nav).typeOf(zcu);
7679 const function_info = (try f.ctypeFromType(function_ty, .complete)).info(&f.object.dg.ctype_pool).function;
7680 assert(function_info.varargs);
7681
7682 const w = &f.object.code.writer;
7683 const local = try f.allocLocal(inst, inst_ty);
7684 try w.writeAll("va_start(*(va_list *)&");
7685 try f.writeCValue(w, local, .Other);
7686 if (function_info.param_ctypes.len > 0) {
7687 try w.writeAll(", ");
7688 try f.writeCValue(w, .{ .arg = function_info.param_ctypes.len - 1 }, .FunctionArgument);
7689 }
7690 try w.writeAll(");");
7691 try f.object.newline();
7692 return local;
7693}
7694
7695fn airCVaArg(f: *Function, inst: Air.Inst.Index) !CValue {
7696 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
7697
7698 const inst_ty = f.typeOfIndex(inst);
7699 const va_list = try f.resolveInst(ty_op.operand);
7700 try reap(f, inst, &.{ty_op.operand});
7701
7702 const w = &f.object.code.writer;
7703 const local = try f.allocLocal(inst, inst_ty);
7704 try f.writeCValue(w, local, .Other);
7705 try w.writeAll(" = va_arg(*(va_list *)");
7706 try f.writeCValue(w, va_list, .Other);
7707 try w.writeAll(", ");
7708 try f.renderType(w, ty_op.ty.toType());
7709 try w.writeAll(");");
7710 try f.object.newline();
7711 return local;
7712}
7713
7714fn airCVaEnd(f: *Function, inst: Air.Inst.Index) !CValue {
7715 const un_op = f.air.instructions.items(.data)[@intFromEnum(inst)].un_op;
7716
7717 const va_list = try f.resolveInst(un_op);
7718 try reap(f, inst, &.{un_op});
7719
7720 const w = &f.object.code.writer;
7721 try w.writeAll("va_end(*(va_list *)");
7722 try f.writeCValue(w, va_list, .Other);
7723 try w.writeAll(");");
7724 try f.object.newline();
7725 return .none;
7726}
7727
7728fn airCVaCopy(f: *Function, inst: Air.Inst.Index) !CValue {
7729 const ty_op = f.air.instructions.items(.data)[@intFromEnum(inst)].ty_op;
7730
7731 const inst_ty = f.typeOfIndex(inst);
7732 const va_list = try f.resolveInst(ty_op.operand);
7733 try reap(f, inst, &.{ty_op.operand});
7734
7735 const w = &f.object.code.writer;
7736 const local = try f.allocLocal(inst, inst_ty);
7737 try w.writeAll("va_copy(*(va_list *)&");
7738 try f.writeCValue(w, local, .Other);
7739 try w.writeAll(", *(va_list *)");
7740 try f.writeCValue(w, va_list, .Other);
7741 try w.writeAll(");");
7742 try f.object.newline();
7743 return local;
7744}
7745
7746fn toMemoryOrder(order: std.builtin.AtomicOrder) [:0]const u8 {
7747 return switch (order) {
7748 // Note: unordered is actually even less atomic than relaxed
7749 .unordered, .monotonic => "zig_memory_order_relaxed",
7750 .acquire => "zig_memory_order_acquire",
7751 .release => "zig_memory_order_release",
7752 .acq_rel => "zig_memory_order_acq_rel",
7753 .seq_cst => "zig_memory_order_seq_cst",
7754 };
7755}
7756
7757fn writeMemoryOrder(w: *Writer, order: std.builtin.AtomicOrder) !void {
7758 return w.writeAll(toMemoryOrder(order));
7759}
7760
7761fn toCallingConvention(cc: std.builtin.CallingConvention, zcu: *Zcu) ?[]const u8 {
7762 if (zcu.getTarget().cCallingConvention()) |ccc| {
7763 if (cc.eql(ccc)) {
7764 return null;
7765 }
7766 }
7767 return switch (cc) {
7768 .auto, .naked => null,
7769
7770 .x86_16_cdecl => "cdecl",
7771 .x86_16_regparmcall => "regparmcall",
7772 .x86_64_sysv, .x86_sysv => "sysv_abi",
7773 .x86_64_win, .x86_win => "ms_abi",
7774 .x86_16_stdcall, .x86_stdcall => "stdcall",
7775 .x86_fastcall => "fastcall",
7776 .x86_thiscall => "thiscall",
7777
7778 .x86_vectorcall,
7779 .x86_64_vectorcall,
7780 => "vectorcall",
7781
7782 .x86_64_regcall_v3_sysv,
7783 .x86_64_regcall_v4_win,
7784 .x86_regcall_v3,
7785 .x86_regcall_v4_win,
7786 => "regcall",
7787
7788 .aarch64_vfabi => "aarch64_vector_pcs",
7789 .aarch64_vfabi_sve => "aarch64_sve_pcs",
7790
7791 .arm_aapcs => "pcs(\"aapcs\")",
7792 .arm_aapcs_vfp => "pcs(\"aapcs-vfp\")",
7793
7794 .arc_interrupt => |opts| switch (opts.type) {
7795 inline else => |t| "interrupt(\"" ++ @tagName(t) ++ "\")",
7796 },
7797
7798 .arm_interrupt => |opts| switch (opts.type) {
7799 .generic => "interrupt",
7800 .irq => "interrupt(\"IRQ\")",
7801 .fiq => "interrupt(\"FIQ\")",
7802 .swi => "interrupt(\"SWI\")",
7803 .abort => "interrupt(\"ABORT\")",
7804 .undef => "interrupt(\"UNDEF\")",
7805 },
7806
7807 .avr_signal => "signal",
7808
7809 .microblaze_interrupt => |opts| switch (opts.type) {
7810 .user => "save_volatiles",
7811 .regular => "interrupt_handler",
7812 .fast => "fast_interrupt",
7813 .breakpoint => "break_handler",
7814 },
7815
7816 .mips_interrupt,
7817 .mips64_interrupt,
7818 => |opts| switch (opts.mode) {
7819 inline else => |m| "interrupt(\"" ++ @tagName(m) ++ "\")",
7820 },
7821
7822 .riscv64_lp64_v, .riscv32_ilp32_v => "riscv_vector_cc",
7823
7824 .riscv32_interrupt,
7825 .riscv64_interrupt,
7826 => |opts| switch (opts.mode) {
7827 inline else => |m| "interrupt(\"" ++ @tagName(m) ++ "\")",
7828 },
7829
7830 .sh_renesas => "renesas",
7831 .sh_interrupt => |opts| switch (opts.save) {
7832 .fpscr => "trapa_handler", // Implies `interrupt_handler`.
7833 .high => "interrupt_handler, nosave_low_regs",
7834 .full => "interrupt_handler",
7835 .bank => "interrupt_handler, resbank",
7836 },
7837
7838 .m68k_rtd => "m68k_rtd",
7839
7840 .avr_interrupt,
7841 .csky_interrupt,
7842 .m68k_interrupt,
7843 .msp430_interrupt,
7844 .x86_16_interrupt,
7845 .x86_interrupt,
7846 .x86_64_interrupt,
7847 => "interrupt",
7848
7849 else => unreachable, // `Zcu.callconvSupported`
7850 };
7851}
7852
7853fn toAtomicRmwSuffix(order: std.builtin.AtomicRmwOp) []const u8 {
7854 return switch (order) {
7855 .Xchg => "xchg",
7856 .Add => "add",
7857 .Sub => "sub",
7858 .And => "and",
7859 .Nand => "nand",
7860 .Or => "or",
7861 .Xor => "xor",
7862 .Max => "max",
7863 .Min => "min",
7864 };
7865}
7866
7867fn toCIntBits(zig_bits: u32) ?u32 {
7868 for (&[_]u8{ 8, 16, 32, 64, 128 }) |c_bits| {
7869 if (zig_bits <= c_bits) {
7870 return c_bits;
7871 }
7872 }
7873 return null;
7874}
7875
7876fn signAbbrev(signedness: std.builtin.Signedness) u8 {
7877 return switch (signedness) {
7878 .signed => 'i',
7879 .unsigned => 'u',
7880 };
7881}
7882
7883fn compilerRtAbbrev(ty: Type, zcu: *Zcu, target: *const std.Target) []const u8 {
7884 return if (ty.isInt(zcu)) switch (ty.intInfo(zcu).bits) {
7885 1...32 => "si",
7886 33...64 => "di",
7887 65...128 => "ti",
7888 else => unreachable,
7889 } else if (ty.isRuntimeFloat()) switch (ty.floatBits(target)) {
7890 16 => "hf",
7891 32 => "sf",
7892 64 => "df",
7893 80 => "xf",
7894 128 => "tf",
7895 else => unreachable,
7896 } else unreachable;
7897}
7898
7899fn compareOperatorAbbrev(operator: std.math.CompareOperator) []const u8 {
7900 return switch (operator) {
7901 .lt => "lt",
7902 .lte => "le",
7903 .eq => "eq",
7904 .gte => "ge",
7905 .gt => "gt",
7906 .neq => "ne",
7907 };
7908}
7909
7910fn compareOperatorC(operator: std.math.CompareOperator) []const u8 {
7911 return switch (operator) {
7912 .lt => " < ",
7913 .lte => " <= ",
7914 .eq => " == ",
7915 .gte => " >= ",
7916 .gt => " > ",
7917 .neq => " != ",
7918 };
7919}
7920
7921const StringLiteral = struct {
7922 len: usize,
7923 cur_len: usize,
7924 w: *Writer,
7925 first: bool,
7926
7927 // MSVC throws C2078 if an array of size 65536 or greater is initialized with a string literal,
7928 // regardless of the length of the string literal initializing it. Array initializer syntax is
7929 // used instead.
7930 // C99 only requires 4095.
7931 const max_string_initializer_len = @min(65535, 4095);
7932
7933 // MSVC has a length limit of 16380 per string literal (before concatenation)
7934 // C99 only requires 4095.
7935 const max_char_len = 4;
7936 const max_literal_len = @min(16380 - max_char_len, 4095);
7937
7938 fn init(w: *Writer, len: usize) StringLiteral {
7939 return .{
7940 .cur_len = 0,
7941 .len = len,
7942 .w = w,
7943 .first = true,
7944 };
7945 }
7946
7947 pub fn start(sl: *StringLiteral) Writer.Error!void {
7948 if (sl.len <= max_string_initializer_len) {
7949 try sl.w.writeByte('\"');
7950 } else {
7951 try sl.w.writeByte('{');
7952 }
7953 }
7954
7955 pub fn end(sl: *StringLiteral) Writer.Error!void {
7956 if (sl.len <= max_string_initializer_len) {
7957 try sl.w.writeByte('\"');
7958 } else {
7959 try sl.w.writeByte('}');
7960 }
7961 }
7962
7963 fn writeStringLiteralChar(sl: *StringLiteral, c: u8) Writer.Error!usize {
7964 const w = sl.w;
7965 switch (c) {
7966 7 => {
7967 try w.writeAll("\\a");
7968 return 2;
7969 },
7970 8 => {
7971 try w.writeAll("\\b");
7972 return 2;
7973 },
7974 '\t' => {
7975 try w.writeAll("\\t");
7976 return 2;
7977 },
7978 '\n' => {
7979 try w.writeAll("\\n");
7980 return 2;
7981 },
7982 11 => {
7983 try w.writeAll("\\v");
7984 return 2;
7985 },
7986 12 => {
7987 try w.writeAll("\\f");
7988 return 2;
7989 },
7990 '\r' => {
7991 try w.writeAll("\\r");
7992 return 2;
7993 },
7994 '"', '\'', '?', '\\' => {
7995 try w.print("\\{c}", .{c});
7996 return 2;
7997 },
7998 ' '...'!', '#'...'&', '('...'>', '@'...'[', ']'...'~' => {
7999 try w.writeByte(c);
8000 return 1;
8001 },
8002 else => {
8003 var buf: [4]u8 = undefined;
8004 const printed = std.fmt.bufPrint(&buf, "\\{o:0>3}", .{c}) catch unreachable;
8005 try w.writeAll(printed);
8006 return printed.len;
8007 },
8008 }
8009 }
8010
8011 pub fn writeChar(sl: *StringLiteral, c: u8) Writer.Error!void {
8012 if (sl.len <= max_string_initializer_len) {
8013 if (sl.cur_len == 0 and !sl.first) try sl.w.writeAll("\"\"");
8014
8015 const char_len = try sl.writeStringLiteralChar(c);
8016 assert(char_len <= max_char_len);
8017 sl.cur_len += char_len;
8018
8019 if (sl.cur_len >= max_literal_len) {
8020 sl.cur_len = 0;
8021 sl.first = false;
8022 }
8023 } else {
8024 if (!sl.first) try sl.w.writeByte(',');
8025 var buf: [6]u8 = undefined;
8026 const printed = std.fmt.bufPrint(&buf, "'\\x{x}'", .{c}) catch unreachable;
8027 try sl.w.writeAll(printed);
8028 sl.cur_len += printed.len;
8029 sl.first = false;
8030 }
8031 }
8032};
8033
8034const FormatStringContext = struct {
8035 str: []const u8,
8036 sentinel: ?u8,
8037};
8038
8039fn formatStringLiteral(data: FormatStringContext, w: *Writer) Writer.Error!void {
8040 var literal: StringLiteral = .init(w, data.str.len + @intFromBool(data.sentinel != null));
8041 try literal.start();
8042 for (data.str) |c| try literal.writeChar(c);
8043 if (data.sentinel) |sentinel| if (sentinel != 0) try literal.writeChar(sentinel);
8044 try literal.end();
8045}
8046
8047fn fmtStringLiteral(str: []const u8, sentinel: ?u8) std.fmt.Alt(FormatStringContext, formatStringLiteral) {
8048 return .{ .data = .{ .str = str, .sentinel = sentinel } };
8049}
8050
8051fn undefPattern(comptime IntType: type) IntType {
8052 const int_info = @typeInfo(IntType).int;
8053 const UnsignedType = std.meta.Int(.unsigned, int_info.bits);
8054 return @bitCast(@as(UnsignedType, (1 << (int_info.bits | 1)) / 3));
8055}
8056
8057const FormatIntLiteralContext = struct {
8058 dg: *DeclGen,
8059 int_info: InternPool.Key.IntType,
8060 kind: CType.Kind,
8061 ctype: CType,
8062 val: Value,
8063 base: u8,
8064 case: std.fmt.Case,
8065};
8066fn formatIntLiteral(data: FormatIntLiteralContext, w: *Writer) Writer.Error!void {
8067 const pt = data.dg.pt;
8068 const zcu = pt.zcu;
8069 const target = &data.dg.mod.resolved_target.result;
8070 const ctype_pool = &data.dg.ctype_pool;
8071
8072 const ExpectedContents = struct {
8073 const base = 10;
8074 const bits = 128;
8075 const limbs_count = BigInt.calcTwosCompLimbCount(bits);
8076
8077 undef_limbs: [limbs_count]BigIntLimb,
8078 wrap_limbs: [limbs_count]BigIntLimb,
8079 to_string_buf: [bits]u8,
8080 to_string_limbs: [BigInt.calcToStringLimbsBufferLen(limbs_count, base)]BigIntLimb,
8081 };
8082 var stack align(@alignOf(ExpectedContents)) =
8083 std.heap.stackFallback(@sizeOf(ExpectedContents), data.dg.gpa);
8084 const allocator = stack.get();
8085
8086 var undef_limbs: []BigIntLimb = &.{};
8087 defer allocator.free(undef_limbs);
8088
8089 var int_buf: Value.BigIntSpace = undefined;
8090 const int = if (data.val.isUndef(zcu)) blk: {
8091 undef_limbs = allocator.alloc(BigIntLimb, BigInt.calcTwosCompLimbCount(data.int_info.bits)) catch return error.WriteFailed;
8092 @memset(undef_limbs, undefPattern(BigIntLimb));
8093
8094 var undef_int = BigInt.Mutable{
8095 .limbs = undef_limbs,
8096 .len = undef_limbs.len,
8097 .positive = true,
8098 };
8099 undef_int.truncate(undef_int.toConst(), data.int_info.signedness, data.int_info.bits);
8100 break :blk undef_int.toConst();
8101 } else data.val.toBigInt(&int_buf, zcu);
8102 assert(int.fitsInTwosComp(data.int_info.signedness, data.int_info.bits));
8103
8104 const c_bits: usize = @intCast(data.ctype.byteSize(ctype_pool, data.dg.mod) * 8);
8105 var one_limbs: [BigInt.calcLimbLen(1)]BigIntLimb = undefined;
8106 const one = BigInt.Mutable.init(&one_limbs, 1).toConst();
8107
8108 var wrap = BigInt.Mutable{
8109 .limbs = allocator.alloc(BigIntLimb, BigInt.calcTwosCompLimbCount(c_bits)) catch return error.WriteFailed,
8110 .len = undefined,
8111 .positive = undefined,
8112 };
8113 defer allocator.free(wrap.limbs);
8114
8115 const c_limb_info: struct {
8116 ctype: CType,
8117 count: usize,
8118 endian: std.builtin.Endian,
8119 homogeneous: bool,
8120 } = switch (data.ctype.info(ctype_pool)) {
8121 .basic => |basic_info| switch (basic_info) {
8122 else => .{
8123 .ctype = .void,
8124 .count = 1,
8125 .endian = .little,
8126 .homogeneous = true,
8127 },
8128 .zig_u128, .zig_i128 => .{
8129 .ctype = .u64,
8130 .count = 2,
8131 .endian = .big,
8132 .homogeneous = false,
8133 },
8134 },
8135 .array => |array_info| .{
8136 .ctype = array_info.elem_ctype,
8137 .count = @intCast(array_info.len),
8138 .endian = target.cpu.arch.endian(),
8139 .homogeneous = true,
8140 },
8141 else => unreachable,
8142 };
8143 if (c_limb_info.count == 1) {
8144 if (wrap.addWrap(int, one, data.int_info.signedness, c_bits) or
8145 data.int_info.signedness == .signed and wrap.subWrap(int, one, data.int_info.signedness, c_bits))
8146 return w.print("{s}_{s}", .{
8147 data.ctype.getStandardDefineAbbrev() orelse return w.print("zig_{s}Int_{c}{d}", .{
8148 if (int.positive) "max" else "min", signAbbrev(data.int_info.signedness), c_bits,
8149 }),
8150 if (int.positive) "MAX" else "MIN",
8151 });
8152
8153 if (!int.positive) try w.writeByte('-');
8154 try data.ctype.renderLiteralPrefix(w, data.kind, ctype_pool);
8155
8156 switch (data.base) {
8157 2 => try w.writeAll("0b"),
8158 8 => try w.writeByte('0'),
8159 10 => {},
8160 16 => try w.writeAll("0x"),
8161 else => unreachable,
8162 }
8163 const string = int.abs().toStringAlloc(allocator, data.base, data.case) catch
8164 return error.WriteFailed;
8165 defer allocator.free(string);
8166 try w.writeAll(string);
8167 } else {
8168 try data.ctype.renderLiteralPrefix(w, data.kind, ctype_pool);
8169 wrap.truncate(int, .unsigned, c_bits);
8170 @memset(wrap.limbs[wrap.len..], 0);
8171 wrap.len = wrap.limbs.len;
8172 const limbs_per_c_limb = @divExact(wrap.len, c_limb_info.count);
8173
8174 var c_limb_int_info: std.builtin.Type.Int = .{
8175 .signedness = undefined,
8176 .bits = @intCast(@divExact(c_bits, c_limb_info.count)),
8177 };
8178 var c_limb_ctype: CType = undefined;
8179
8180 var limb_offset: usize = 0;
8181 const most_significant_limb_i = wrap.len - limbs_per_c_limb;
8182 while (limb_offset < wrap.len) : (limb_offset += limbs_per_c_limb) {
8183 const limb_i = switch (c_limb_info.endian) {
8184 .little => limb_offset,
8185 .big => most_significant_limb_i - limb_offset,
8186 };
8187 var c_limb_mut = BigInt.Mutable{
8188 .limbs = wrap.limbs[limb_i..][0..limbs_per_c_limb],
8189 .len = undefined,
8190 .positive = true,
8191 };
8192 c_limb_mut.normalize(limbs_per_c_limb);
8193
8194 if (limb_i == most_significant_limb_i and
8195 !c_limb_info.homogeneous and data.int_info.signedness == .signed)
8196 {
8197 // most significant limb is actually signed
8198 c_limb_int_info.signedness = .signed;
8199 c_limb_ctype = c_limb_info.ctype.toSigned();
8200
8201 c_limb_mut.truncate(
8202 c_limb_mut.toConst(),
8203 .signed,
8204 data.int_info.bits - limb_i * @bitSizeOf(BigIntLimb),
8205 );
8206 } else {
8207 c_limb_int_info.signedness = .unsigned;
8208 c_limb_ctype = c_limb_info.ctype;
8209 }
8210
8211 if (limb_offset > 0) try w.writeAll(", ");
8212 try formatIntLiteral(.{
8213 .dg = data.dg,
8214 .int_info = c_limb_int_info,
8215 .kind = data.kind,
8216 .ctype = c_limb_ctype,
8217 .val = pt.intValue_big(.comptime_int, c_limb_mut.toConst()) catch
8218 return error.WriteFailed,
8219 .base = data.base,
8220 .case = data.case,
8221 }, w);
8222 }
8223 }
8224 try data.ctype.renderLiteralSuffix(w, ctype_pool);
8225}
8226
8227const Materialize = struct {
8228 local: CValue,
8229
8230 pub fn start(f: *Function, inst: Air.Inst.Index, ty: Type, value: CValue) !Materialize {
8231 return .{ .local = switch (value) {
8232 .local_ref, .constant, .nav_ref, .undef => try f.moveCValue(inst, ty, value),
8233 .new_local => |local| .{ .local = local },
8234 else => value,
8235 } };
8236 }
8237
8238 pub fn mat(self: Materialize, f: *Function, w: *Writer) !void {
8239 try f.writeCValue(w, self.local, .Other);
8240 }
8241
8242 pub fn end(self: Materialize, f: *Function, inst: Air.Inst.Index) !void {
8243 try f.freeCValue(inst, self.local);
8244 }
8245};
8246
8247const Assignment = struct {
8248 ctype: CType,
8249
8250 pub fn start(f: *Function, w: *Writer, ctype: CType) !Assignment {
8251 const self: Assignment = .{ .ctype = ctype };
8252 try self.restart(f, w);
8253 return self;
8254 }
8255
8256 pub fn restart(self: Assignment, f: *Function, w: *Writer) !void {
8257 switch (self.strategy(f)) {
8258 .assign => {},
8259 .memcpy => try w.writeAll("memcpy("),
8260 }
8261 }
8262
8263 pub fn assign(self: Assignment, f: *Function, w: *Writer) !void {
8264 switch (self.strategy(f)) {
8265 .assign => try w.writeAll(" = "),
8266 .memcpy => try w.writeAll(", "),
8267 }
8268 }
8269
8270 pub fn end(self: Assignment, f: *Function, w: *Writer) !void {
8271 switch (self.strategy(f)) {
8272 .assign => {},
8273 .memcpy => {
8274 try w.writeAll(", sizeof(");
8275 try f.renderCType(w, self.ctype);
8276 try w.writeAll("))");
8277 },
8278 }
8279 try w.writeByte(';');
8280 try f.object.newline();
8281 }
8282
8283 fn strategy(self: Assignment, f: *Function) enum { assign, memcpy } {
8284 return switch (self.ctype.info(&f.object.dg.ctype_pool)) {
8285 else => .assign,
8286 .array, .vector => .memcpy,
8287 };
8288 }
8289};
8290
8291const Vectorize = struct {
8292 index: CValue = .none,
8293
8294 pub fn start(f: *Function, inst: Air.Inst.Index, w: *Writer, ty: Type) !Vectorize {
8295 const pt = f.object.dg.pt;
8296 const zcu = pt.zcu;
8297 return if (ty.zigTypeTag(zcu) == .vector) index: {
8298 const local = try f.allocLocal(inst, .usize);
8299
8300 try w.writeAll("for (");
8301 try f.writeCValue(w, local, .Other);
8302 try w.print(" = {f}; ", .{try f.fmtIntLiteralDec(.zero_usize)});
8303 try f.writeCValue(w, local, .Other);
8304 try w.print(" < {f}; ", .{try f.fmtIntLiteralDec(try pt.intValue(.usize, ty.vectorLen(zcu)))});
8305 try f.writeCValue(w, local, .Other);
8306 try w.print(" += {f}) {{\n", .{try f.fmtIntLiteralDec(.one_usize)});
8307 f.object.indent();
8308 try f.object.newline();
8309
8310 break :index .{ .index = local };
8311 } else .{};
8312 }
8313
8314 pub fn elem(self: Vectorize, f: *Function, w: *Writer) !void {
8315 if (self.index != .none) {
8316 try w.writeByte('[');
8317 try f.writeCValue(w, self.index, .Other);
8318 try w.writeByte(']');
8319 }
8320 }
8321
8322 pub fn end(self: Vectorize, f: *Function, inst: Air.Inst.Index, w: *Writer) !void {
8323 if (self.index != .none) {
8324 try f.object.outdent();
8325 try w.writeByte('}');
8326 try f.object.newline();
8327 try freeLocal(f, inst, self.index.new_local, null);
8328 }
8329 }
8330};
8331
8332fn lowersToArray(ty: Type, zcu: *Zcu) bool {
8333 return switch (ty.zigTypeTag(zcu)) {
8334 .array, .vector => return true,
8335 else => return ty.isAbiInt(zcu) and toCIntBits(@as(u32, @intCast(ty.bitSize(zcu)))) == null,
8336 };
8337}
8338
8339fn reap(f: *Function, inst: Air.Inst.Index, operands: []const Air.Inst.Ref) !void {
8340 assert(operands.len <= Air.Liveness.bpi - 1);
8341 var tomb_bits = f.liveness.getTombBits(inst);
8342 for (operands) |operand| {
8343 const dies = @as(u1, @truncate(tomb_bits)) != 0;
8344 tomb_bits >>= 1;
8345 if (!dies) continue;
8346 try die(f, inst, operand);
8347 }
8348}
8349
8350fn die(f: *Function, inst: Air.Inst.Index, ref: Air.Inst.Ref) !void {
8351 const ref_inst = ref.toIndex() orelse return;
8352 const c_value = (f.value_map.fetchRemove(ref) orelse return).value;
8353 const local_index = switch (c_value) {
8354 .new_local, .local => |l| l,
8355 else => return,
8356 };
8357 try freeLocal(f, inst, local_index, ref_inst);
8358}
8359
8360fn freeLocal(f: *Function, inst: ?Air.Inst.Index, local_index: LocalIndex, ref_inst: ?Air.Inst.Index) !void {
8361 const gpa = f.object.dg.gpa;
8362 const local = &f.locals.items[local_index];
8363 if (inst) |i| {
8364 if (ref_inst) |operand| {
8365 log.debug("%{d}: freeing t{d} (operand %{d})", .{ @intFromEnum(i), local_index, operand });
8366 } else {
8367 log.debug("%{d}: freeing t{d}", .{ @intFromEnum(i), local_index });
8368 }
8369 } else {
8370 if (ref_inst) |operand| {
8371 log.debug("freeing t{d} (operand %{d})", .{ local_index, operand });
8372 } else {
8373 log.debug("freeing t{d}", .{local_index});
8374 }
8375 }
8376 const gop = try f.free_locals_map.getOrPut(gpa, local.getType());
8377 if (!gop.found_existing) gop.value_ptr.* = .{};
8378 if (std.debug.runtime_safety) {
8379 // If this trips, an unfreeable allocation was attempted to be freed.
8380 assert(!f.allocs.contains(local_index));
8381 }
8382 // If this trips, it means a local is being inserted into the
8383 // free_locals map while it already exists in the map, which is not
8384 // allowed.
8385 try gop.value_ptr.putNoClobber(gpa, local_index, {});
8386}
8387
8388const BigTomb = struct {
8389 f: *Function,
8390 inst: Air.Inst.Index,
8391 lbt: Air.Liveness.BigTomb,
8392
8393 fn feed(bt: *BigTomb, op_ref: Air.Inst.Ref) !void {
8394 const dies = bt.lbt.feed();
8395 if (!dies) return;
8396 try die(bt.f, bt.inst, op_ref);
8397 }
8398};
8399
8400fn iterateBigTomb(f: *Function, inst: Air.Inst.Index) BigTomb {
8401 return .{
8402 .f = f,
8403 .inst = inst,
8404 .lbt = f.liveness.iterateBigTomb(inst),
8405 };
8406}
8407
8408/// A naive clone of this map would create copies of the ArrayList which is
8409/// stored in the values. This function additionally clones the values.
8410fn cloneFreeLocalsMap(gpa: Allocator, map: *LocalsMap) !LocalsMap {
8411 var cloned = try map.clone(gpa);
8412 const values = cloned.values();
8413 var i: usize = 0;
8414 errdefer {
8415 cloned.deinit(gpa);
8416 while (i > 0) {
8417 i -= 1;
8418 values[i].deinit(gpa);
8419 }
8420 }
8421 while (i < values.len) : (i += 1) {
8422 values[i] = try values[i].clone(gpa);
8423 }
8424 return cloned;
8425}
8426
8427fn deinitFreeLocalsMap(gpa: Allocator, map: *LocalsMap) void {
8428 for (map.values()) |*value| {
8429 value.deinit(gpa);
8430 }
8431 map.deinit(gpa);
8432}