master
1const std = @import("std");
2const build_options = @import("build_options");
3const builtin = @import("builtin");
4const assert = std.debug.assert;
5const link = @import("link.zig");
6const log = std.log.scoped(.codegen);
7const mem = std.mem;
8const math = std.math;
9const target_util = @import("target.zig");
10const trace = @import("tracy.zig").trace;
11
12const Air = @import("Air.zig");
13const Allocator = mem.Allocator;
14const Compilation = @import("Compilation.zig");
15const ErrorMsg = Zcu.ErrorMsg;
16const InternPool = @import("InternPool.zig");
17const Zcu = @import("Zcu.zig");
18
19const Type = @import("Type.zig");
20const Value = @import("Value.zig");
21const Zir = std.zig.Zir;
22const Alignment = InternPool.Alignment;
23const dev = @import("dev.zig");
24
25pub const aarch64 = @import("codegen/aarch64.zig");
26
27pub const CodeGenError = GenerateSymbolError || error{
28 /// Indicates the error is already stored in Zcu `failed_codegen`.
29 CodegenFail,
30};
31
32fn devFeatureForBackend(backend: std.builtin.CompilerBackend) dev.Feature {
33 return switch (backend) {
34 .other, .stage1 => unreachable,
35 .stage2_aarch64 => .aarch64_backend,
36 .stage2_arm => .arm_backend,
37 .stage2_c => .c_backend,
38 .stage2_llvm => .llvm_backend,
39 .stage2_powerpc => unreachable,
40 .stage2_riscv64 => .riscv64_backend,
41 .stage2_sparc64 => .sparc64_backend,
42 .stage2_spirv => .spirv_backend,
43 .stage2_wasm => .wasm_backend,
44 .stage2_x86 => .x86_backend,
45 .stage2_x86_64 => .x86_64_backend,
46 _ => unreachable,
47 };
48}
49
50fn importBackend(comptime backend: std.builtin.CompilerBackend) type {
51 return switch (backend) {
52 .other, .stage1 => unreachable,
53 .stage2_aarch64 => aarch64,
54 .stage2_arm => unreachable,
55 .stage2_c => @import("codegen/c.zig"),
56 .stage2_llvm => @import("codegen/llvm.zig"),
57 .stage2_powerpc => unreachable,
58 .stage2_riscv64 => @import("codegen/riscv64/CodeGen.zig"),
59 .stage2_sparc64 => @import("codegen/sparc64/CodeGen.zig"),
60 .stage2_spirv => @import("codegen/spirv/CodeGen.zig"),
61 .stage2_wasm => @import("codegen/wasm/CodeGen.zig"),
62 .stage2_x86, .stage2_x86_64 => @import("codegen/x86_64/CodeGen.zig"),
63 _ => unreachable,
64 };
65}
66
67pub fn legalizeFeatures(pt: Zcu.PerThread, nav_index: InternPool.Nav.Index) ?*const Air.Legalize.Features {
68 const zcu = pt.zcu;
69 const target = &zcu.navFileScope(nav_index).mod.?.resolved_target.result;
70 switch (target_util.zigBackend(target, zcu.comp.config.use_llvm)) {
71 else => unreachable,
72 inline .stage2_llvm,
73 .stage2_c,
74 .stage2_wasm,
75 .stage2_x86_64,
76 .stage2_aarch64,
77 .stage2_x86,
78 .stage2_riscv64,
79 .stage2_sparc64,
80 .stage2_spirv,
81 => |backend| {
82 dev.check(devFeatureForBackend(backend));
83 return importBackend(backend).legalizeFeatures(target);
84 },
85 }
86}
87
88pub fn wantsLiveness(pt: Zcu.PerThread, nav_index: InternPool.Nav.Index) bool {
89 const zcu = pt.zcu;
90 const target = &zcu.navFileScope(nav_index).mod.?.resolved_target.result;
91 return switch (target_util.zigBackend(target, zcu.comp.config.use_llvm)) {
92 else => true,
93 .stage2_aarch64 => false,
94 };
95}
96
97/// Every code generation backend has a different MIR representation. However, we want to pass
98/// MIR from codegen to the linker *regardless* of which backend is in use. So, we use this: a
99/// union of all MIR types. The active tag is known from the backend in use; see `AnyMir.tag`.
100pub const AnyMir = union {
101 aarch64: if (dev.env.supports(.aarch64_backend)) @import("codegen/aarch64/Mir.zig") else noreturn,
102 riscv64: if (dev.env.supports(.riscv64_backend)) @import("codegen/riscv64/Mir.zig") else noreturn,
103 sparc64: if (dev.env.supports(.sparc64_backend)) @import("codegen/sparc64/Mir.zig") else noreturn,
104 x86_64: if (dev.env.supports(.x86_64_backend)) @import("codegen/x86_64/Mir.zig") else noreturn,
105 wasm: if (dev.env.supports(.wasm_backend)) @import("codegen/wasm/Mir.zig") else noreturn,
106 c: if (dev.env.supports(.c_backend)) @import("codegen/c.zig").Mir else noreturn,
107
108 pub inline fn tag(comptime backend: std.builtin.CompilerBackend) []const u8 {
109 return switch (backend) {
110 .stage2_aarch64 => "aarch64",
111 .stage2_riscv64 => "riscv64",
112 .stage2_sparc64 => "sparc64",
113 .stage2_x86_64 => "x86_64",
114 .stage2_wasm => "wasm",
115 .stage2_c => "c",
116 else => unreachable,
117 };
118 }
119
120 pub fn deinit(mir: *AnyMir, zcu: *const Zcu) void {
121 const gpa = zcu.gpa;
122 const backend = target_util.zigBackend(&zcu.root_mod.resolved_target.result, zcu.comp.config.use_llvm);
123 switch (backend) {
124 else => unreachable,
125 inline .stage2_aarch64,
126 .stage2_riscv64,
127 .stage2_sparc64,
128 .stage2_x86_64,
129 .stage2_wasm,
130 .stage2_c,
131 => |backend_ct| @field(mir, tag(backend_ct)).deinit(gpa),
132 }
133 }
134};
135
136/// Runs code generation for a function. This process converts the `Air` emitted by `Sema`,
137/// alongside annotated `Liveness` data, to machine code in the form of MIR (see `AnyMir`).
138///
139/// This is supposed to be a "pure" process, but some backends are currently buggy; see
140/// `Zcu.Feature.separate_thread` for details.
141pub fn generateFunction(
142 lf: *link.File,
143 pt: Zcu.PerThread,
144 src_loc: Zcu.LazySrcLoc,
145 func_index: InternPool.Index,
146 air: *const Air,
147 liveness: *const ?Air.Liveness,
148) CodeGenError!AnyMir {
149 const zcu = pt.zcu;
150 const func = zcu.funcInfo(func_index);
151 const target = &zcu.navFileScope(func.owner_nav).mod.?.resolved_target.result;
152 switch (target_util.zigBackend(target, false)) {
153 else => unreachable,
154 inline .stage2_aarch64,
155 .stage2_riscv64,
156 .stage2_sparc64,
157 .stage2_x86_64,
158 .stage2_wasm,
159 .stage2_c,
160 => |backend| {
161 dev.check(devFeatureForBackend(backend));
162 const CodeGen = importBackend(backend);
163 const mir = try CodeGen.generate(lf, pt, src_loc, func_index, air, liveness);
164 return @unionInit(AnyMir, AnyMir.tag(backend), mir);
165 },
166 }
167}
168
169/// Converts the MIR returned by `generateFunction` to finalized machine code to be placed in
170/// the output binary. This is called from linker implementations, and may query linker state.
171///
172/// This function is not called for the C backend, as `link.C` directly understands its MIR.
173///
174/// The `air` parameter is not supposed to exist, but some backends are currently buggy; see
175/// `Zcu.Feature.separate_thread` for details.
176pub fn emitFunction(
177 lf: *link.File,
178 pt: Zcu.PerThread,
179 src_loc: Zcu.LazySrcLoc,
180 func_index: InternPool.Index,
181 atom_index: u32,
182 any_mir: *const AnyMir,
183 w: *std.Io.Writer,
184 debug_output: link.File.DebugInfoOutput,
185) (CodeGenError || std.Io.Writer.Error)!void {
186 const zcu = pt.zcu;
187 const func = zcu.funcInfo(func_index);
188 const target = &zcu.navFileScope(func.owner_nav).mod.?.resolved_target.result;
189 switch (target_util.zigBackend(target, zcu.comp.config.use_llvm)) {
190 else => unreachable,
191 inline .stage2_aarch64,
192 .stage2_riscv64,
193 .stage2_sparc64,
194 .stage2_x86_64,
195 => |backend| {
196 dev.check(devFeatureForBackend(backend));
197 const mir = &@field(any_mir, AnyMir.tag(backend));
198 return mir.emit(lf, pt, src_loc, func_index, atom_index, w, debug_output);
199 },
200 }
201}
202
203pub fn generateLazyFunction(
204 lf: *link.File,
205 pt: Zcu.PerThread,
206 src_loc: Zcu.LazySrcLoc,
207 lazy_sym: link.File.LazySymbol,
208 atom_index: u32,
209 w: *std.Io.Writer,
210 debug_output: link.File.DebugInfoOutput,
211) (CodeGenError || std.Io.Writer.Error)!void {
212 const zcu = pt.zcu;
213 const target = if (Type.fromInterned(lazy_sym.ty).typeDeclInstAllowGeneratedTag(zcu)) |inst_index|
214 &zcu.fileByIndex(inst_index.resolveFile(&zcu.intern_pool)).mod.?.resolved_target.result
215 else
216 zcu.getTarget();
217 switch (target_util.zigBackend(target, zcu.comp.config.use_llvm)) {
218 else => unreachable,
219 inline .stage2_riscv64, .stage2_x86_64 => |backend| {
220 dev.check(devFeatureForBackend(backend));
221 return importBackend(backend).generateLazy(lf, pt, src_loc, lazy_sym, atom_index, w, debug_output);
222 },
223 }
224}
225
226pub fn generateLazySymbol(
227 bin_file: *link.File,
228 pt: Zcu.PerThread,
229 src_loc: Zcu.LazySrcLoc,
230 lazy_sym: link.File.LazySymbol,
231 // TODO don't use an "out" parameter like this; put it in the result instead
232 alignment: *Alignment,
233 w: *std.Io.Writer,
234 debug_output: link.File.DebugInfoOutput,
235 reloc_parent: link.File.RelocInfo.Parent,
236) (CodeGenError || std.Io.Writer.Error)!void {
237 const tracy = trace(@src());
238 defer tracy.end();
239
240 const comp = bin_file.comp;
241 const zcu = pt.zcu;
242 const ip = &zcu.intern_pool;
243 const target = &comp.root_mod.resolved_target.result;
244 const endian = target.cpu.arch.endian();
245
246 log.debug("generateLazySymbol: kind = {s}, ty = {f}", .{
247 @tagName(lazy_sym.kind),
248 Type.fromInterned(lazy_sym.ty).fmt(pt),
249 });
250
251 if (lazy_sym.kind == .code) {
252 alignment.* = target_util.defaultFunctionAlignment(target);
253 return generateLazyFunction(bin_file, pt, src_loc, lazy_sym, reloc_parent.atom_index, w, debug_output);
254 }
255
256 if (lazy_sym.ty == .anyerror_type) {
257 alignment.* = .@"4";
258 const err_names = ip.global_error_set.getNamesFromMainThread();
259 const strings_start: u32 = @intCast(4 * (1 + err_names.len + @intFromBool(err_names.len > 0)));
260 var string_index = strings_start;
261 try w.rebase(w.end, string_index);
262 w.writeInt(u32, @intCast(err_names.len), endian) catch unreachable;
263 if (err_names.len == 0) return;
264 for (err_names) |err_name_nts| {
265 w.writeInt(u32, string_index, endian) catch unreachable;
266 string_index += @intCast(err_name_nts.toSlice(ip).len + 1);
267 }
268 w.writeInt(u32, string_index, endian) catch unreachable;
269 try w.rebase(w.end, string_index - strings_start);
270 for (err_names) |err_name_nts| {
271 w.writeAll(err_name_nts.toSlice(ip)) catch unreachable;
272 w.writeByte(0) catch unreachable;
273 }
274 } else if (Type.fromInterned(lazy_sym.ty).zigTypeTag(zcu) == .@"enum") {
275 alignment.* = .@"1";
276 const enum_ty = Type.fromInterned(lazy_sym.ty);
277 const tag_names = enum_ty.enumFields(zcu);
278 for (0..tag_names.len) |tag_index| {
279 const tag_name = tag_names.get(ip)[tag_index].toSlice(ip);
280 try w.rebase(w.end, tag_name.len + 1);
281 w.writeAll(tag_name) catch unreachable;
282 w.writeByte(0) catch unreachable;
283 }
284 } else {
285 return zcu.codegenFailType(lazy_sym.ty, "TODO implement generateLazySymbol for {s} {f}", .{
286 @tagName(lazy_sym.kind), Type.fromInterned(lazy_sym.ty).fmt(pt),
287 });
288 }
289}
290
291pub const GenerateSymbolError = error{
292 OutOfMemory,
293 /// Compiler was asked to operate on a number larger than supported.
294 Overflow,
295 /// Compiler was asked to produce a non-byte-aligned relocation.
296 RelocationNotByteAligned,
297};
298
299pub fn generateSymbol(
300 bin_file: *link.File,
301 pt: Zcu.PerThread,
302 src_loc: Zcu.LazySrcLoc,
303 val: Value,
304 w: *std.Io.Writer,
305 reloc_parent: link.File.RelocInfo.Parent,
306) (GenerateSymbolError || std.Io.Writer.Error)!void {
307 const tracy = trace(@src());
308 defer tracy.end();
309
310 const zcu = pt.zcu;
311 const ip = &zcu.intern_pool;
312 const ty = val.typeOf(zcu);
313
314 const target = zcu.getTarget();
315 const endian = target.cpu.arch.endian();
316
317 log.debug("generateSymbol: val = {f}", .{val.fmtValue(pt)});
318
319 if (val.isUndef(zcu)) {
320 const abi_size = math.cast(usize, ty.abiSize(zcu)) orelse return error.Overflow;
321 try w.splatByteAll(0xaa, abi_size);
322 return;
323 }
324
325 switch (ip.indexToKey(val.toIntern())) {
326 .int_type,
327 .ptr_type,
328 .array_type,
329 .vector_type,
330 .opt_type,
331 .anyframe_type,
332 .error_union_type,
333 .simple_type,
334 .struct_type,
335 .tuple_type,
336 .union_type,
337 .opaque_type,
338 .enum_type,
339 .func_type,
340 .error_set_type,
341 .inferred_error_set_type,
342 => unreachable, // types, not values
343
344 .undef => unreachable, // handled above
345 .simple_value => |simple_value| switch (simple_value) {
346 .undefined => unreachable, // non-runtime value
347 .void => unreachable, // non-runtime value
348 .null => unreachable, // non-runtime value
349 .@"unreachable" => unreachable, // non-runtime value
350 .empty_tuple => return,
351 .false, .true => try w.writeByte(switch (simple_value) {
352 .false => 0,
353 .true => 1,
354 else => unreachable,
355 }),
356 },
357 .variable,
358 .@"extern",
359 .func,
360 .enum_literal,
361 .empty_enum_value,
362 => unreachable, // non-runtime values
363 .int => {
364 const abi_size = math.cast(usize, ty.abiSize(zcu)) orelse return error.Overflow;
365 var space: Value.BigIntSpace = undefined;
366 const int_val = val.toBigInt(&space, zcu);
367 int_val.writeTwosComplement(try w.writableSlice(abi_size), endian);
368 },
369 .err => |err| {
370 const int = try pt.getErrorValue(err.name);
371 try w.writeInt(u16, @intCast(int), endian);
372 },
373 .error_union => |error_union| {
374 const payload_ty = ty.errorUnionPayload(zcu);
375 const err_val: u16 = switch (error_union.val) {
376 .err_name => |err_name| @intCast(try pt.getErrorValue(err_name)),
377 .payload => 0,
378 };
379
380 if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
381 try w.writeInt(u16, err_val, endian);
382 return;
383 }
384
385 const payload_align = payload_ty.abiAlignment(zcu);
386 const error_align = Type.anyerror.abiAlignment(zcu);
387 const abi_align = ty.abiAlignment(zcu);
388
389 // error value first when its type is larger than the error union's payload
390 if (error_align.order(payload_align) == .gt) {
391 try w.writeInt(u16, err_val, endian);
392 }
393
394 // emit payload part of the error union
395 {
396 const begin = w.end;
397 try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(switch (error_union.val) {
398 .err_name => try pt.intern(.{ .undef = payload_ty.toIntern() }),
399 .payload => |payload| payload,
400 }), w, reloc_parent);
401 const unpadded_end = w.end - begin;
402 const padded_end = abi_align.forward(unpadded_end);
403 const padding = math.cast(usize, padded_end - unpadded_end) orelse return error.Overflow;
404
405 if (padding > 0) {
406 try w.splatByteAll(0, padding);
407 }
408 }
409
410 // Payload size is larger than error set, so emit our error set last
411 if (error_align.compare(.lte, payload_align)) {
412 const begin = w.end;
413 try w.writeInt(u16, err_val, endian);
414 const unpadded_end = w.end - begin;
415 const padded_end = abi_align.forward(unpadded_end);
416 const padding = math.cast(usize, padded_end - unpadded_end) orelse return error.Overflow;
417
418 if (padding > 0) {
419 try w.splatByteAll(0, padding);
420 }
421 }
422 },
423 .enum_tag => |enum_tag| {
424 const int_tag_ty = ty.intTagType(zcu);
425 try generateSymbol(bin_file, pt, src_loc, try pt.getCoerced(Value.fromInterned(enum_tag.int), int_tag_ty), w, reloc_parent);
426 },
427 .float => |float| storage: switch (float.storage) {
428 .f16 => |f16_val| try w.writeInt(u16, @bitCast(f16_val), endian),
429 .f32 => |f32_val| try w.writeInt(u32, @bitCast(f32_val), endian),
430 .f64 => |f64_val| try w.writeInt(u64, @bitCast(f64_val), endian),
431 .f80 => |f80_val| {
432 try w.writeInt(u80, @bitCast(f80_val), endian);
433 const abi_size = math.cast(usize, ty.abiSize(zcu)) orelse return error.Overflow;
434 try w.splatByteAll(0, abi_size - 10);
435 },
436 .f128 => |f128_val| switch (Type.fromInterned(float.ty).floatBits(target)) {
437 else => unreachable,
438 16 => continue :storage .{ .f16 = @floatCast(f128_val) },
439 32 => continue :storage .{ .f32 = @floatCast(f128_val) },
440 64 => continue :storage .{ .f64 = @floatCast(f128_val) },
441 128 => try w.writeInt(u128, @bitCast(f128_val), endian),
442 },
443 },
444 .ptr => try lowerPtr(bin_file, pt, src_loc, val.toIntern(), w, reloc_parent, 0),
445 .slice => |slice| {
446 try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(slice.ptr), w, reloc_parent);
447 try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(slice.len), w, reloc_parent);
448 },
449 .opt => {
450 const payload_type = ty.optionalChild(zcu);
451 const payload_val = val.optionalValue(zcu);
452 const abi_size = math.cast(usize, ty.abiSize(zcu)) orelse return error.Overflow;
453
454 if (ty.optionalReprIsPayload(zcu)) {
455 if (payload_val) |value| {
456 try generateSymbol(bin_file, pt, src_loc, value, w, reloc_parent);
457 } else {
458 try w.splatByteAll(0, abi_size);
459 }
460 } else {
461 const padding = abi_size - (math.cast(usize, payload_type.abiSize(zcu)) orelse return error.Overflow) - 1;
462 if (payload_type.hasRuntimeBits(zcu)) {
463 const value = payload_val orelse Value.fromInterned(try pt.intern(.{
464 .undef = payload_type.toIntern(),
465 }));
466 try generateSymbol(bin_file, pt, src_loc, value, w, reloc_parent);
467 }
468 try w.writeByte(@intFromBool(payload_val != null));
469 try w.splatByteAll(0, padding);
470 }
471 },
472 .aggregate => |aggregate| switch (ip.indexToKey(ty.toIntern())) {
473 .array_type => |array_type| switch (aggregate.storage) {
474 .bytes => |bytes| try w.writeAll(bytes.toSlice(array_type.lenIncludingSentinel(), ip)),
475 .elems, .repeated_elem => {
476 var index: u64 = 0;
477 while (index < array_type.lenIncludingSentinel()) : (index += 1) {
478 try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(switch (aggregate.storage) {
479 .bytes => unreachable,
480 .elems => |elems| elems[@intCast(index)],
481 .repeated_elem => |elem| if (index < array_type.len)
482 elem
483 else
484 array_type.sentinel,
485 }), w, reloc_parent);
486 }
487 },
488 },
489 .vector_type => |vector_type| {
490 const abi_size = math.cast(usize, ty.abiSize(zcu)) orelse return error.Overflow;
491 if (vector_type.child == .bool_type) {
492 const bytes = try w.writableSlice(abi_size);
493 @memset(bytes, 0xaa);
494 var index: usize = 0;
495 const len = math.cast(usize, vector_type.len) orelse return error.Overflow;
496 while (index < len) : (index += 1) {
497 const bit_index = switch (endian) {
498 .big => len - 1 - index,
499 .little => index,
500 };
501 const byte = &bytes[bit_index / 8];
502 const mask = @as(u8, 1) << @truncate(bit_index);
503 if (switch (switch (aggregate.storage) {
504 .bytes => unreachable,
505 .elems => |elems| elems[index],
506 .repeated_elem => |elem| elem,
507 }) {
508 .bool_true => true,
509 .bool_false => false,
510 else => |elem| switch (ip.indexToKey(elem)) {
511 .undef => continue,
512 .int => |int| switch (int.storage) {
513 .u64 => |x| switch (x) {
514 0 => false,
515 1 => true,
516 else => unreachable,
517 },
518 .i64 => |x| switch (x) {
519 -1 => true,
520 0 => false,
521 else => unreachable,
522 },
523 else => unreachable,
524 },
525 else => unreachable,
526 },
527 }) byte.* |= mask else byte.* &= ~mask;
528 }
529 } else {
530 switch (aggregate.storage) {
531 .bytes => |bytes| try w.writeAll(bytes.toSlice(vector_type.len, ip)),
532 .elems, .repeated_elem => {
533 var index: u64 = 0;
534 while (index < vector_type.len) : (index += 1) {
535 try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(switch (aggregate.storage) {
536 .bytes => unreachable,
537 .elems => |elems| elems[math.cast(usize, index) orelse return error.Overflow],
538 .repeated_elem => |elem| elem,
539 }), w, reloc_parent);
540 }
541 },
542 }
543
544 const padding = abi_size -
545 (math.cast(usize, Type.fromInterned(vector_type.child).abiSize(zcu) * vector_type.len) orelse
546 return error.Overflow);
547 if (padding > 0) try w.splatByteAll(0, padding);
548 }
549 },
550 .tuple_type => |tuple| {
551 const struct_begin = w.end;
552 for (tuple.types.get(ip), tuple.values.get(ip), 0..) |field_ty, field_val, field_index| {
553 if (field_val != .none) continue;
554 if (!Type.fromInterned(field_ty).hasRuntimeBits(zcu)) continue;
555
556 try w.splatByteAll(0, math.cast(usize, struct_begin +
557 Type.fromInterned(field_ty).abiAlignment(zcu).forward(w.end - struct_begin) - w.end) orelse
558 return error.Overflow);
559 try generateSymbol(bin_file, pt, src_loc, .fromInterned(switch (aggregate.storage) {
560 .bytes => |bytes| try pt.intern(.{ .int = .{
561 .ty = field_ty,
562 .storage = .{ .u64 = bytes.at(field_index, ip) },
563 } }),
564 .elems => |elems| elems[field_index],
565 .repeated_elem => |elem| elem,
566 }), w, reloc_parent);
567 }
568 try w.splatByteAll(0, math.cast(usize, struct_begin + ty.abiSize(zcu) - w.end) orelse
569 return error.Overflow);
570 },
571 .struct_type => {
572 const struct_type = ip.loadStructType(ty.toIntern());
573 switch (struct_type.layout) {
574 .@"packed" => {
575 const abi_size = math.cast(usize, ty.abiSize(zcu)) orelse return error.Overflow;
576 const start = w.end;
577 const buffer = try w.writableSlice(abi_size);
578 @memset(buffer, 0);
579 var bits: u16 = 0;
580
581 for (struct_type.field_types.get(ip), 0..) |field_ty, index| {
582 const field_val = switch (aggregate.storage) {
583 .bytes => |bytes| try pt.intern(.{ .int = .{
584 .ty = field_ty,
585 .storage = .{ .u64 = bytes.at(index, ip) },
586 } }),
587 .elems => |elems| elems[index],
588 .repeated_elem => |elem| elem,
589 };
590
591 // pointer may point to a decl which must be marked used
592 // but can also result in a relocation. Therefore we handle those separately.
593 if (Type.fromInterned(field_ty).zigTypeTag(zcu) == .pointer) {
594 const field_offset = std.math.divExact(u16, bits, 8) catch |err| switch (err) {
595 error.DivisionByZero => unreachable,
596 error.UnexpectedRemainder => return error.RelocationNotByteAligned,
597 };
598 w.end = start + field_offset;
599 defer {
600 assert(w.end == start + field_offset + @divExact(target.ptrBitWidth(), 8));
601 w.end = start + abi_size;
602 }
603 try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(field_val), w, reloc_parent);
604 } else {
605 Value.fromInterned(field_val).writeToPackedMemory(.fromInterned(field_ty), pt, buffer, bits) catch unreachable;
606 }
607 bits += @intCast(Type.fromInterned(field_ty).bitSize(zcu));
608 }
609 },
610 .auto, .@"extern" => {
611 const struct_begin = w.end;
612 const field_types = struct_type.field_types.get(ip);
613 const offsets = struct_type.offsets.get(ip);
614
615 var it = struct_type.iterateRuntimeOrder(ip);
616 while (it.next()) |field_index| {
617 const field_ty = field_types[field_index];
618 if (!Type.fromInterned(field_ty).hasRuntimeBits(zcu)) continue;
619
620 const field_val = switch (ip.indexToKey(val.toIntern()).aggregate.storage) {
621 .bytes => |bytes| try pt.intern(.{ .int = .{
622 .ty = field_ty,
623 .storage = .{ .u64 = bytes.at(field_index, ip) },
624 } }),
625 .elems => |elems| elems[field_index],
626 .repeated_elem => |elem| elem,
627 };
628
629 const padding = math.cast(
630 usize,
631 offsets[field_index] - (w.end - struct_begin),
632 ) orelse return error.Overflow;
633 if (padding > 0) try w.splatByteAll(0, padding);
634
635 try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(field_val), w, reloc_parent);
636 }
637
638 const size = struct_type.sizeUnordered(ip);
639 const alignment = struct_type.flagsUnordered(ip).alignment.toByteUnits().?;
640
641 const padding = math.cast(
642 usize,
643 std.mem.alignForward(u64, size, @max(alignment, 1)) - (w.end - struct_begin),
644 ) orelse return error.Overflow;
645 if (padding > 0) try w.splatByteAll(0, padding);
646 },
647 }
648 },
649 else => unreachable,
650 },
651 .un => |un| {
652 const layout = ty.unionGetLayout(zcu);
653
654 if (layout.payload_size == 0) {
655 return generateSymbol(bin_file, pt, src_loc, Value.fromInterned(un.tag), w, reloc_parent);
656 }
657
658 // Check if we should store the tag first.
659 if (layout.tag_size > 0 and layout.tag_align.compare(.gte, layout.payload_align)) {
660 try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(un.tag), w, reloc_parent);
661 }
662
663 const union_obj = zcu.typeToUnion(ty).?;
664 if (un.tag != .none) {
665 const field_index = ty.unionTagFieldIndex(Value.fromInterned(un.tag), zcu).?;
666 const field_ty = Type.fromInterned(union_obj.field_types.get(ip)[field_index]);
667 if (!field_ty.hasRuntimeBits(zcu)) {
668 try w.splatByteAll(0xaa, math.cast(usize, layout.payload_size) orelse return error.Overflow);
669 } else {
670 try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(un.val), w, reloc_parent);
671
672 const padding = math.cast(usize, layout.payload_size - field_ty.abiSize(zcu)) orelse return error.Overflow;
673 if (padding > 0) {
674 try w.splatByteAll(0, padding);
675 }
676 }
677 } else {
678 try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(un.val), w, reloc_parent);
679 }
680
681 if (layout.tag_size > 0 and layout.tag_align.compare(.lt, layout.payload_align)) {
682 try generateSymbol(bin_file, pt, src_loc, Value.fromInterned(un.tag), w, reloc_parent);
683
684 if (layout.padding > 0) {
685 try w.splatByteAll(0, layout.padding);
686 }
687 }
688 },
689 .memoized_call => unreachable,
690 }
691}
692
693fn lowerPtr(
694 bin_file: *link.File,
695 pt: Zcu.PerThread,
696 src_loc: Zcu.LazySrcLoc,
697 ptr_val: InternPool.Index,
698 w: *std.Io.Writer,
699 reloc_parent: link.File.RelocInfo.Parent,
700 prev_offset: u64,
701) (GenerateSymbolError || std.Io.Writer.Error)!void {
702 const zcu = pt.zcu;
703 const ptr = zcu.intern_pool.indexToKey(ptr_val).ptr;
704 const offset: u64 = prev_offset + ptr.byte_offset;
705 return switch (ptr.base_addr) {
706 .nav => |nav| try lowerNavRef(bin_file, pt, nav, w, reloc_parent, offset),
707 .uav => |uav| try lowerUavRef(bin_file, pt, src_loc, uav, w, reloc_parent, offset),
708 .int => try generateSymbol(bin_file, pt, src_loc, try pt.intValue(Type.usize, offset), w, reloc_parent),
709 .eu_payload => |eu_ptr| try lowerPtr(
710 bin_file,
711 pt,
712 src_loc,
713 eu_ptr,
714 w,
715 reloc_parent,
716 offset + errUnionPayloadOffset(
717 Value.fromInterned(eu_ptr).typeOf(zcu).childType(zcu).errorUnionPayload(zcu),
718 zcu,
719 ),
720 ),
721 .opt_payload => |opt_ptr| try lowerPtr(bin_file, pt, src_loc, opt_ptr, w, reloc_parent, offset),
722 .field => |field| {
723 const base_ptr = Value.fromInterned(field.base);
724 const base_ty = base_ptr.typeOf(zcu).childType(zcu);
725 const field_off: u64 = switch (base_ty.zigTypeTag(zcu)) {
726 .pointer => off: {
727 assert(base_ty.isSlice(zcu));
728 break :off switch (field.index) {
729 Value.slice_ptr_index => 0,
730 Value.slice_len_index => @divExact(zcu.getTarget().ptrBitWidth(), 8),
731 else => unreachable,
732 };
733 },
734 .@"struct", .@"union" => switch (base_ty.containerLayout(zcu)) {
735 .auto => base_ty.structFieldOffset(@intCast(field.index), zcu),
736 .@"extern", .@"packed" => unreachable,
737 },
738 else => unreachable,
739 };
740 return lowerPtr(bin_file, pt, src_loc, field.base, w, reloc_parent, offset + field_off);
741 },
742 .arr_elem, .comptime_field, .comptime_alloc => unreachable,
743 };
744}
745
746fn lowerUavRef(
747 lf: *link.File,
748 pt: Zcu.PerThread,
749 src_loc: Zcu.LazySrcLoc,
750 uav: InternPool.Key.Ptr.BaseAddr.Uav,
751 w: *std.Io.Writer,
752 reloc_parent: link.File.RelocInfo.Parent,
753 offset: u64,
754) (GenerateSymbolError || std.Io.Writer.Error)!void {
755 const zcu = pt.zcu;
756 const ip = &zcu.intern_pool;
757 const comp = lf.comp;
758 const target = &comp.root_mod.resolved_target.result;
759 const ptr_width_bytes = @divExact(target.ptrBitWidth(), 8);
760 const uav_val = uav.val;
761 const uav_ty = Type.fromInterned(ip.typeOf(uav_val));
762 const is_fn_body = uav_ty.zigTypeTag(zcu) == .@"fn";
763
764 log.debug("lowerUavRef: ty = {f}", .{uav_ty.fmt(pt)});
765
766 if (!is_fn_body and !uav_ty.hasRuntimeBits(zcu)) {
767 try w.splatByteAll(0xaa, ptr_width_bytes);
768 return;
769 }
770
771 switch (lf.tag) {
772 .c => unreachable,
773 .spirv => unreachable,
774 .wasm => {
775 dev.check(link.File.Tag.wasm.devFeature());
776 const wasm = lf.cast(.wasm).?;
777 assert(reloc_parent == .none);
778 try wasm.addUavReloc(w.end, uav.val, uav.orig_ty, @intCast(offset));
779 try w.splatByteAll(0, ptr_width_bytes);
780 return;
781 },
782 else => {},
783 }
784
785 const uav_align = Type.fromInterned(uav.orig_ty).ptrAlignment(zcu);
786 switch (try lf.lowerUav(pt, uav_val, uav_align, src_loc)) {
787 .sym_index => {},
788 .fail => |em| std.debug.panic("TODO rework lowerUav. internal error: {s}", .{em.msg}),
789 }
790
791 const vaddr = lf.getUavVAddr(uav_val, .{
792 .parent = reloc_parent,
793 .offset = w.end,
794 .addend = @intCast(offset),
795 }) catch |err| switch (err) {
796 error.OutOfMemory => return error.OutOfMemory,
797 else => |e| std.debug.panic("TODO rework lowerUav. internal error: {t}", .{e}),
798 };
799 const endian = target.cpu.arch.endian();
800 switch (ptr_width_bytes) {
801 2 => try w.writeInt(u16, @intCast(vaddr), endian),
802 4 => try w.writeInt(u32, @intCast(vaddr), endian),
803 8 => try w.writeInt(u64, vaddr, endian),
804 else => unreachable,
805 }
806}
807
808fn lowerNavRef(
809 lf: *link.File,
810 pt: Zcu.PerThread,
811 nav_index: InternPool.Nav.Index,
812 w: *std.Io.Writer,
813 reloc_parent: link.File.RelocInfo.Parent,
814 offset: u64,
815) (GenerateSymbolError || std.Io.Writer.Error)!void {
816 const zcu = pt.zcu;
817 const gpa = zcu.gpa;
818 const ip = &zcu.intern_pool;
819 const target = &zcu.navFileScope(nav_index).mod.?.resolved_target.result;
820 const ptr_width_bytes = @divExact(target.ptrBitWidth(), 8);
821 const is_obj = lf.comp.config.output_mode == .Obj;
822 const nav_ty = Type.fromInterned(ip.getNav(nav_index).typeOf(ip));
823 const is_fn_body = nav_ty.zigTypeTag(zcu) == .@"fn";
824
825 if (!is_fn_body and !nav_ty.hasRuntimeBits(zcu)) {
826 try w.splatByteAll(0xaa, ptr_width_bytes);
827 return;
828 }
829
830 switch (lf.tag) {
831 .c => unreachable,
832 .spirv => unreachable,
833 .wasm => {
834 dev.check(link.File.Tag.wasm.devFeature());
835 const wasm = lf.cast(.wasm).?;
836 assert(reloc_parent == .none);
837 if (is_fn_body) {
838 const gop = try wasm.zcu_indirect_function_set.getOrPut(gpa, nav_index);
839 if (!gop.found_existing) gop.value_ptr.* = {};
840 if (is_obj) {
841 @panic("TODO add out_reloc for this");
842 } else {
843 try wasm.func_table_fixups.append(gpa, .{
844 .table_index = @enumFromInt(gop.index),
845 .offset = @intCast(w.end),
846 });
847 }
848 } else {
849 if (is_obj) {
850 try wasm.out_relocs.append(gpa, .{
851 .offset = @intCast(w.end),
852 .pointee = .{ .symbol_index = try wasm.navSymbolIndex(nav_index) },
853 .tag = if (ptr_width_bytes == 4) .memory_addr_i32 else .memory_addr_i64,
854 .addend = @intCast(offset),
855 });
856 } else {
857 try wasm.nav_fixups.ensureUnusedCapacity(gpa, 1);
858 wasm.nav_fixups.appendAssumeCapacity(.{
859 .navs_exe_index = try wasm.refNavExe(nav_index),
860 .offset = @intCast(w.end),
861 .addend = @intCast(offset),
862 });
863 }
864 }
865 try w.splatByteAll(0, ptr_width_bytes);
866 return;
867 },
868 else => {},
869 }
870
871 const vaddr = lf.getNavVAddr(pt, nav_index, .{
872 .parent = reloc_parent,
873 .offset = w.end,
874 .addend = @intCast(offset),
875 }) catch @panic("TODO rework getNavVAddr");
876 const endian = target.cpu.arch.endian();
877 switch (ptr_width_bytes) {
878 2 => try w.writeInt(u16, @intCast(vaddr), endian),
879 4 => try w.writeInt(u32, @intCast(vaddr), endian),
880 8 => try w.writeInt(u64, vaddr, endian),
881 else => unreachable,
882 }
883}
884
885/// Helper struct to denote that the value is in memory but requires a linker relocation fixup:
886/// * got - the value is referenced indirectly via GOT entry index (the linker emits a got-type reloc)
887/// * direct - the value is referenced directly via symbol index index (the linker emits a displacement reloc)
888/// * import - the value is referenced indirectly via import entry index (the linker emits an import-type reloc)
889pub const LinkerLoad = struct {
890 type: enum {
891 got,
892 direct,
893 import,
894 },
895 sym_index: u32,
896};
897
898pub const SymbolResult = union(enum) { sym_index: u32, fail: *ErrorMsg };
899
900pub fn genNavRef(
901 lf: *link.File,
902 pt: Zcu.PerThread,
903 src_loc: Zcu.LazySrcLoc,
904 nav_index: InternPool.Nav.Index,
905 target: *const std.Target,
906) CodeGenError!SymbolResult {
907 const zcu = pt.zcu;
908 const ip = &zcu.intern_pool;
909 const nav = ip.getNav(nav_index);
910 log.debug("genNavRef({f})", .{nav.fqn.fmt(ip)});
911
912 const lib_name, const linkage, const is_threadlocal = if (nav.getExtern(ip)) |e|
913 .{ e.lib_name, e.linkage, e.is_threadlocal and zcu.comp.config.any_non_single_threaded }
914 else
915 .{ .none, .internal, false };
916 if (lf.cast(.elf)) |elf_file| {
917 const zo = elf_file.zigObjectPtr().?;
918 switch (linkage) {
919 .internal => {
920 const sym_index = try zo.getOrCreateMetadataForNav(zcu, nav_index);
921 if (is_threadlocal) zo.symbol(sym_index).flags.is_tls = true;
922 return .{ .sym_index = sym_index };
923 },
924 .strong, .weak => {
925 const sym_index = try elf_file.getGlobalSymbol(nav.name.toSlice(ip), lib_name.toSlice(ip));
926 switch (linkage) {
927 .internal => unreachable,
928 .strong => {},
929 .weak => zo.symbol(sym_index).flags.weak = true,
930 .link_once => unreachable,
931 }
932 if (is_threadlocal) zo.symbol(sym_index).flags.is_tls = true;
933 return .{ .sym_index = sym_index };
934 },
935 .link_once => unreachable,
936 }
937 } else if (lf.cast(.elf2)) |elf| {
938 return .{ .sym_index = @intFromEnum(elf.navSymbol(zcu, nav_index) catch |err| switch (err) {
939 error.OutOfMemory => return error.OutOfMemory,
940 else => |e| return .{ .fail = try ErrorMsg.create(
941 zcu.gpa,
942 src_loc,
943 "linker failed to create a nav: {t}",
944 .{e},
945 ) },
946 }) };
947 } else if (lf.cast(.macho)) |macho_file| {
948 const zo = macho_file.getZigObject().?;
949 switch (linkage) {
950 .internal => {
951 const sym_index = try zo.getOrCreateMetadataForNav(macho_file, nav_index);
952 if (is_threadlocal) zo.symbols.items[sym_index].flags.tlv = true;
953 return .{ .sym_index = sym_index };
954 },
955 .strong, .weak => {
956 const sym_index = try macho_file.getGlobalSymbol(nav.name.toSlice(ip), lib_name.toSlice(ip));
957 switch (linkage) {
958 .internal => unreachable,
959 .strong => {},
960 .weak => zo.symbols.items[sym_index].flags.weak = true,
961 .link_once => unreachable,
962 }
963 if (is_threadlocal) zo.symbols.items[sym_index].flags.tlv = true;
964 return .{ .sym_index = sym_index };
965 },
966 .link_once => unreachable,
967 }
968 } else if (lf.cast(.coff2)) |coff| {
969 return .{ .sym_index = @intFromEnum(try coff.navSymbol(zcu, nav_index)) };
970 } else {
971 const msg = try ErrorMsg.create(zcu.gpa, src_loc, "TODO genNavRef for target {}", .{target});
972 return .{ .fail = msg };
973 }
974}
975
976/// deprecated legacy type
977pub const GenResult = union(enum) {
978 mcv: MCValue,
979 fail: *ErrorMsg,
980
981 const MCValue = union(enum) {
982 none,
983 undef,
984 /// The bit-width of the immediate may be smaller than `u64`. For example, on 32-bit targets
985 /// such as ARM, the immediate will never exceed 32-bits.
986 immediate: u64,
987 /// Decl with address deferred until the linker allocates everything in virtual memory.
988 /// Payload is a symbol index.
989 load_direct: u32,
990 /// Decl with address deferred until the linker allocates everything in virtual memory.
991 /// Payload is a symbol index.
992 lea_direct: u32,
993 /// Decl referenced via GOT with address deferred until the linker allocates
994 /// everything in virtual memory.
995 /// Payload is a symbol index.
996 load_got: u32,
997 /// Direct by-address reference to memory location.
998 memory: u64,
999 /// Reference to memory location but deferred until linker allocated the Decl in memory.
1000 /// Traditionally, this corresponds to emitting a relocation in a relocatable object file.
1001 load_symbol: u32,
1002 /// Reference to memory location but deferred until linker allocated the Decl in memory.
1003 /// Traditionally, this corresponds to emitting a relocation in a relocatable object file.
1004 lea_symbol: u32,
1005 };
1006};
1007
1008/// deprecated legacy code path
1009pub fn genTypedValue(
1010 lf: *link.File,
1011 pt: Zcu.PerThread,
1012 src_loc: Zcu.LazySrcLoc,
1013 val: Value,
1014 target: *const std.Target,
1015) CodeGenError!GenResult {
1016 const res = try lowerValue(pt, val, target);
1017 return switch (res) {
1018 .none => .{ .mcv = .none },
1019 .undef => .{ .mcv = .undef },
1020 .immediate => |imm| .{ .mcv = .{ .immediate = imm } },
1021 .lea_nav => |nav| switch (try genNavRef(lf, pt, src_loc, nav, target)) {
1022 .sym_index => |sym_index| .{ .mcv = .{ .lea_symbol = sym_index } },
1023 .fail => |em| .{ .fail = em },
1024 },
1025 .load_uav, .lea_uav => |uav| switch (try lf.lowerUav(
1026 pt,
1027 uav.val,
1028 Type.fromInterned(uav.orig_ty).ptrAlignment(pt.zcu),
1029 src_loc,
1030 )) {
1031 .sym_index => |sym_index| .{ .mcv = switch (res) {
1032 else => unreachable,
1033 .load_uav => .{ .load_symbol = sym_index },
1034 .lea_uav => .{ .lea_symbol = sym_index },
1035 } },
1036 .fail => |em| .{ .fail = em },
1037 },
1038 };
1039}
1040
1041const LowerResult = union(enum) {
1042 none,
1043 undef,
1044 /// The bit-width of the immediate may be smaller than `u64`. For example, on 32-bit targets
1045 /// such as ARM, the immediate will never exceed 32-bits.
1046 immediate: u64,
1047 lea_nav: InternPool.Nav.Index,
1048 load_uav: InternPool.Key.Ptr.BaseAddr.Uav,
1049 lea_uav: InternPool.Key.Ptr.BaseAddr.Uav,
1050};
1051
1052pub fn lowerValue(pt: Zcu.PerThread, val: Value, target: *const std.Target) Allocator.Error!LowerResult {
1053 const zcu = pt.zcu;
1054 const ip = &zcu.intern_pool;
1055 const ty = val.typeOf(zcu);
1056
1057 log.debug("lowerValue(@as({f}, {f}))", .{ ty.fmt(pt), val.fmtValue(pt) });
1058
1059 if (val.isUndef(zcu)) return .undef;
1060
1061 switch (ty.zigTypeTag(zcu)) {
1062 .void => return .none,
1063 .pointer => switch (ty.ptrSize(zcu)) {
1064 .slice => {},
1065 else => switch (val.toIntern()) {
1066 .null_value => {
1067 return .{ .immediate = 0 };
1068 },
1069 else => switch (ip.indexToKey(val.toIntern())) {
1070 .int => {
1071 return .{ .immediate = val.toUnsignedInt(zcu) };
1072 },
1073 .ptr => |ptr| if (ptr.byte_offset == 0) switch (ptr.base_addr) {
1074 .nav => |nav| {
1075 if (!ty.isFnOrHasRuntimeBitsIgnoreComptime(zcu)) {
1076 const imm: u64 = switch (@divExact(target.ptrBitWidth(), 8)) {
1077 1 => 0xaa,
1078 2 => 0xaaaa,
1079 4 => 0xaaaaaaaa,
1080 8 => 0xaaaaaaaaaaaaaaaa,
1081 else => unreachable,
1082 };
1083 return .{ .immediate = imm };
1084 }
1085
1086 if (ty.castPtrToFn(zcu)) |fn_ty| {
1087 if (zcu.typeToFunc(fn_ty).?.is_generic) {
1088 return .{ .immediate = fn_ty.abiAlignment(zcu).toByteUnits().? };
1089 }
1090 } else if (ty.zigTypeTag(zcu) == .pointer) {
1091 const elem_ty = ty.elemType2(zcu);
1092 if (!elem_ty.hasRuntimeBits(zcu)) {
1093 return .{ .immediate = elem_ty.abiAlignment(zcu).toByteUnits().? };
1094 }
1095 }
1096
1097 return .{ .lea_nav = nav };
1098 },
1099 .uav => |uav| if (Value.fromInterned(uav.val).typeOf(zcu).hasRuntimeBits(zcu))
1100 return .{ .lea_uav = uav }
1101 else
1102 return .{ .immediate = Type.fromInterned(uav.orig_ty).ptrAlignment(zcu)
1103 .forward(@intCast((@as(u66, 1) << @intCast(target.ptrBitWidth() | 1)) / 3)) },
1104 else => {},
1105 },
1106 else => {},
1107 },
1108 },
1109 },
1110 .int => {
1111 const info = ty.intInfo(zcu);
1112 if (info.bits <= target.ptrBitWidth()) {
1113 const unsigned: u64 = switch (info.signedness) {
1114 .signed => @bitCast(val.toSignedInt(zcu)),
1115 .unsigned => val.toUnsignedInt(zcu),
1116 };
1117 return .{ .immediate = unsigned };
1118 }
1119 },
1120 .bool => {
1121 return .{ .immediate = @intFromBool(val.toBool()) };
1122 },
1123 .optional => {
1124 if (ty.isPtrLikeOptional(zcu)) {
1125 return lowerValue(
1126 pt,
1127 val.optionalValue(zcu) orelse return .{ .immediate = 0 },
1128 target,
1129 );
1130 } else if (ty.abiSize(zcu) == 1) {
1131 return .{ .immediate = @intFromBool(!val.isNull(zcu)) };
1132 }
1133 },
1134 .@"enum" => {
1135 const enum_tag = ip.indexToKey(val.toIntern()).enum_tag;
1136 return lowerValue(
1137 pt,
1138 Value.fromInterned(enum_tag.int),
1139 target,
1140 );
1141 },
1142 .error_set => {
1143 const err_name = ip.indexToKey(val.toIntern()).err.name;
1144 const error_index = ip.getErrorValueIfExists(err_name).?;
1145 return .{ .immediate = error_index };
1146 },
1147 .error_union => {
1148 const err_type = ty.errorUnionSet(zcu);
1149 const payload_type = ty.errorUnionPayload(zcu);
1150 if (!payload_type.hasRuntimeBitsIgnoreComptime(zcu)) {
1151 // We use the error type directly as the type.
1152 const err_int_ty = try pt.errorIntType();
1153 switch (ip.indexToKey(val.toIntern()).error_union.val) {
1154 .err_name => |err_name| return lowerValue(
1155 pt,
1156 Value.fromInterned(try pt.intern(.{ .err = .{
1157 .ty = err_type.toIntern(),
1158 .name = err_name,
1159 } })),
1160 target,
1161 ),
1162 .payload => return lowerValue(
1163 pt,
1164 try pt.intValue(err_int_ty, 0),
1165 target,
1166 ),
1167 }
1168 }
1169 },
1170
1171 .comptime_int => unreachable,
1172 .comptime_float => unreachable,
1173 .type => unreachable,
1174 .enum_literal => unreachable,
1175 .noreturn => unreachable,
1176 .undefined => unreachable,
1177 .null => unreachable,
1178 .@"opaque" => unreachable,
1179
1180 else => {},
1181 }
1182
1183 return .{ .load_uav = .{
1184 .val = val.toIntern(),
1185 .orig_ty = (try pt.singleConstPtrType(ty)).toIntern(),
1186 } };
1187}
1188
1189pub fn errUnionPayloadOffset(payload_ty: Type, zcu: *Zcu) u64 {
1190 if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) return 0;
1191 const payload_align = payload_ty.abiAlignment(zcu);
1192 const error_align = Type.anyerror.abiAlignment(zcu);
1193 if (payload_align.compare(.gte, error_align) or !payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
1194 return 0;
1195 } else {
1196 return payload_align.forward(Type.anyerror.abiSize(zcu));
1197 }
1198}
1199
1200pub fn errUnionErrorOffset(payload_ty: Type, zcu: *Zcu) u64 {
1201 if (!payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) return 0;
1202 const payload_align = payload_ty.abiAlignment(zcu);
1203 const error_align = Type.anyerror.abiAlignment(zcu);
1204 if (payload_align.compare(.gte, error_align) and payload_ty.hasRuntimeBitsIgnoreComptime(zcu)) {
1205 return error_align.forward(payload_ty.abiSize(zcu));
1206 } else {
1207 return 0;
1208 }
1209}
1210
1211pub fn fieldOffset(ptr_agg_ty: Type, ptr_field_ty: Type, field_index: u32, zcu: *Zcu) u64 {
1212 const agg_ty = ptr_agg_ty.childType(zcu);
1213 return switch (agg_ty.containerLayout(zcu)) {
1214 .auto, .@"extern" => agg_ty.structFieldOffset(field_index, zcu),
1215 .@"packed" => @divExact(@as(u64, ptr_agg_ty.ptrInfo(zcu).packed_offset.bit_offset) +
1216 (if (zcu.typeToPackedStruct(agg_ty)) |loaded_struct| zcu.structPackedFieldBitOffset(loaded_struct, field_index) else 0) -
1217 ptr_field_ty.ptrInfo(zcu).packed_offset.bit_offset, 8),
1218 };
1219}
1220
1221test {
1222 _ = aarch64;
1223}