Commit eee8fffec7
Changed files (15)
lib
std
src
arch
aarch64
arm
riscv64
sparc64
wasm
x86_64
lib/std/builtin.zig
@@ -846,5 +846,16 @@ pub fn default_panic(msg: []const u8, error_return_trace: ?*StackTrace) noreturn
}
}
+pub noinline fn returnError(maybe_st: ?*StackTrace) void {
+ @setCold(true);
+ const st = maybe_st orelse return;
+ addErrRetTraceAddr(st, @returnAddress());
+}
+
+pub inline fn addErrRetTraceAddr(st: *StackTrace, addr: usize) void {
+ st.instruction_addresses[st.index & (st.instruction_addresses.len - 1)] = addr;
+ st.index +%= 1;
+}
+
const std = @import("std.zig");
const root = @import("root");
src/arch/aarch64/CodeGen.zig
@@ -718,6 +718,8 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
.unwrap_errunion_err_ptr => try self.airUnwrapErrErrPtr(inst),
.unwrap_errunion_payload_ptr=> try self.airUnwrapErrPayloadPtr(inst),
.errunion_payload_ptr_set => try self.airErrUnionPayloadPtrSet(inst),
+ .err_return_trace => try self.airErrReturnTrace(inst),
+ .set_err_return_trace => try self.airSetErrReturnTrace(inst),
.wrap_optional => try self.airWrapOptional(inst),
.wrap_errunion_payload => try self.airWrapErrUnionPayload(inst),
@@ -2330,6 +2332,24 @@ fn airErrUnionPayloadPtrSet(self: *Self, inst: Air.Inst.Index) !void {
return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
}
+fn airErrReturnTrace(self: *Self, inst: Air.Inst.Index) !void {
+ const ty_op = self.air.instructions.items(.data)[inst].ty_op;
+ const result: MCValue = if (self.liveness.isUnused(inst))
+ .dead
+ else
+ return self.fail("TODO implement airErrReturnTrace for {}", .{self.target.cpu.arch});
+ return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
+}
+
+fn airSetErrReturnTrace(self: *Self, inst: Air.Inst.Index) !void {
+ const ty_op = self.air.instructions.items(.data)[inst].ty_op;
+ const result: MCValue = if (self.liveness.isUnused(inst))
+ .dead
+ else
+ return self.fail("TODO implement airSetErrReturnTrace for {}", .{self.target.cpu.arch});
+ return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
+}
+
fn airWrapOptional(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
src/arch/arm/CodeGen.zig
@@ -725,6 +725,8 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
.unwrap_errunion_err_ptr => try self.airUnwrapErrErrPtr(inst),
.unwrap_errunion_payload_ptr=> try self.airUnwrapErrPayloadPtr(inst),
.errunion_payload_ptr_set => try self.airErrUnionPayloadPtrSet(inst),
+ .err_return_trace => try self.airErrReturnTrace(inst),
+ .set_err_return_trace => try self.airSetErrReturnTrace(inst),
.wrap_optional => try self.airWrapOptional(inst),
.wrap_errunion_payload => try self.airWrapErrUnionPayload(inst),
@@ -1843,6 +1845,24 @@ fn airErrUnionPayloadPtrSet(self: *Self, inst: Air.Inst.Index) !void {
return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
}
+fn airErrReturnTrace(self: *Self, inst: Air.Inst.Index) !void {
+ const ty_op = self.air.instructions.items(.data)[inst].ty_op;
+ const result: MCValue = if (self.liveness.isUnused(inst))
+ .dead
+ else
+ return self.fail("TODO implement airErrReturnTrace for {}", .{self.target.cpu.arch});
+ return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
+}
+
+fn airSetErrReturnTrace(self: *Self, inst: Air.Inst.Index) !void {
+ const ty_op = self.air.instructions.items(.data)[inst].ty_op;
+ const result: MCValue = if (self.liveness.isUnused(inst))
+ .dead
+ else
+ return self.fail("TODO implement airSetErrReturnTrace for {}", .{self.target.cpu.arch});
+ return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
+}
+
/// T to E!T
fn airWrapErrUnionPayload(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
src/arch/riscv64/CodeGen.zig
@@ -654,6 +654,8 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
.unwrap_errunion_err_ptr => try self.airUnwrapErrErrPtr(inst),
.unwrap_errunion_payload_ptr=> try self.airUnwrapErrPayloadPtr(inst),
.errunion_payload_ptr_set => try self.airErrUnionPayloadPtrSet(inst),
+ .err_return_trace => try self.airErrReturnTrace(inst),
+ .set_err_return_trace => try self.airSetErrReturnTrace(inst),
.wrap_optional => try self.airWrapOptional(inst),
.wrap_errunion_payload => try self.airWrapErrUnionPayload(inst),
@@ -1267,6 +1269,24 @@ fn airErrUnionPayloadPtrSet(self: *Self, inst: Air.Inst.Index) !void {
return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
}
+fn airErrReturnTrace(self: *Self, inst: Air.Inst.Index) !void {
+ const ty_op = self.air.instructions.items(.data)[inst].ty_op;
+ const result: MCValue = if (self.liveness.isUnused(inst))
+ .dead
+ else
+ return self.fail("TODO implement airErrReturnTrace for {}", .{self.target.cpu.arch});
+ return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
+}
+
+fn airSetErrReturnTrace(self: *Self, inst: Air.Inst.Index) !void {
+ const ty_op = self.air.instructions.items(.data)[inst].ty_op;
+ const result: MCValue = if (self.liveness.isUnused(inst))
+ .dead
+ else
+ return self.fail("TODO implement airSetErrReturnTrace for {}", .{self.target.cpu.arch});
+ return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
+}
+
fn airWrapOptional(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
src/arch/sparc64/CodeGen.zig
@@ -630,6 +630,8 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
.unwrap_errunion_err_ptr => @panic("TODO try self.airUnwrapErrErrPtr(inst)"),
.unwrap_errunion_payload_ptr=> @panic("TODO try self.airUnwrapErrPayloadPtr(inst)"),
.errunion_payload_ptr_set => @panic("TODO try self.airErrUnionPayloadPtrSet(inst)"),
+ .err_return_trace => @panic("TODO try self.airErrReturnTrace(inst)"),
+ .set_err_return_trace => @panic("TODO try self.airSetErrReturnTrace(inst)"),
.wrap_optional => @panic("TODO try self.airWrapOptional(inst)"),
.wrap_errunion_payload => @panic("TODO try self.airWrapErrUnionPayload(inst)"),
src/arch/wasm/CodeGen.zig
@@ -1612,6 +1612,8 @@ fn genInst(self: *Self, inst: Air.Inst.Index) !WValue {
.atomic_store_seq_cst,
.atomic_rmw,
.tag_name,
+ .err_return_trace,
+ .set_err_return_trace,
=> |tag| return self.fail("TODO: Implement wasm inst: {s}", .{@tagName(tag)}),
};
}
src/arch/x86_64/CodeGen.zig
@@ -749,6 +749,8 @@ fn genBody(self: *Self, body: []const Air.Inst.Index) InnerError!void {
.unwrap_errunion_err_ptr => try self.airUnwrapErrErrPtr(inst),
.unwrap_errunion_payload_ptr=> try self.airUnwrapErrPayloadPtr(inst),
.errunion_payload_ptr_set => try self.airErrUnionPayloadPtrSet(inst),
+ .err_return_trace => try self.airErrReturnTrace(inst),
+ .set_err_return_trace => try self.airSetErrReturnTrace(inst),
.wrap_optional => try self.airWrapOptional(inst),
.wrap_errunion_payload => try self.airWrapErrUnionPayload(inst),
@@ -1855,6 +1857,24 @@ fn airErrUnionPayloadPtrSet(self: *Self, inst: Air.Inst.Index) !void {
return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
}
+fn airErrReturnTrace(self: *Self, inst: Air.Inst.Index) !void {
+ const ty_op = self.air.instructions.items(.data)[inst].ty_op;
+ const result: MCValue = if (self.liveness.isUnused(inst))
+ .dead
+ else
+ return self.fail("TODO implement airErrReturnTrace for {}", .{self.target.cpu.arch});
+ return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
+}
+
+fn airSetErrReturnTrace(self: *Self, inst: Air.Inst.Index) !void {
+ const ty_op = self.air.instructions.items(.data)[inst].ty_op;
+ const result: MCValue = if (self.liveness.isUnused(inst))
+ .dead
+ else
+ return self.fail("TODO implement airSetErrReturnTrace for {}", .{self.target.cpu.arch});
+ return self.finishAir(inst, result, .{ ty_op.operand, .none, .none });
+}
+
fn airWrapOptional(self: *Self, inst: Air.Inst.Index) !void {
const ty_op = self.air.instructions.items(.data)[inst].ty_op;
if (self.liveness.isUnused(inst)) {
src/codegen/c.zig
@@ -1911,6 +1911,8 @@ fn genBody(f: *Function, body: []const Air.Inst.Index) error{ AnalysisFail, OutO
.wrap_errunion_payload => try airWrapErrUnionPay(f, inst),
.wrap_errunion_err => try airWrapErrUnionErr(f, inst),
.errunion_payload_ptr_set => try airErrUnionPayloadPtrSet(f, inst),
+ .err_return_trace => try airErrReturnTrace(f, inst),
+ .set_err_return_trace => try airSetErrReturnTrace(f, inst),
.wasm_memory_size => try airWasmMemorySize(f, inst),
.wasm_memory_grow => try airWasmMemoryGrow(f, inst),
@@ -3447,6 +3449,38 @@ fn airErrUnionPayloadPtrSet(f: *Function, inst: Air.Inst.Index) !CValue {
return local;
}
+fn airErrReturnTrace(f: *Function, inst: Air.Inst.Index) !CValue {
+ if (f.liveness.isUnused(inst)) return CValue.none;
+
+ const un_op = f.air.instructions.items(.data)[inst].un_op;
+ const writer = f.object.writer();
+ const inst_ty = f.air.typeOfIndex(inst);
+ const operand = try f.resolveInst(un_op);
+ const local = try f.allocLocal(inst_ty, .Const);
+
+ try writer.writeAll(" = ");
+
+ _ = operand;
+ _ = local;
+ return f.fail("TODO: C backend: implement airErrReturnTrace", .{});
+}
+
+fn airSetErrReturnTrace(f: *Function, inst: Air.Inst.Index) !CValue {
+ if (f.liveness.isUnused(inst)) return CValue.none;
+
+ const un_op = f.air.instructions.items(.data)[inst].un_op;
+ const writer = f.object.writer();
+ const inst_ty = f.air.typeOfIndex(inst);
+ const operand = try f.resolveInst(un_op);
+ const local = try f.allocLocal(inst_ty, .Const);
+
+ try writer.writeAll(" = ");
+
+ _ = operand;
+ _ = local;
+ return f.fail("TODO: C backend: implement airSetErrReturnTrace", .{});
+}
+
fn airWrapErrUnionPay(f: *Function, inst: Air.Inst.Index) !CValue {
if (f.liveness.isUnused(inst))
return CValue.none;
src/codegen/llvm.zig
@@ -636,10 +636,18 @@ pub const Object = struct {
const ret_ptr = if (sret) llvm_func.getParam(0) else null;
const gpa = dg.gpa;
+ const err_return_tracing = fn_info.return_type.isError() and
+ dg.module.comp.bin_file.options.error_return_tracing;
+
+ const err_ret_trace = if (err_return_tracing)
+ llvm_func.getParam(@boolToInt(ret_ptr != null))
+ else
+ null;
+
var args = std.ArrayList(*const llvm.Value).init(gpa);
defer args.deinit();
- const param_offset: c_uint = @boolToInt(ret_ptr != null);
+ const param_offset = @as(c_uint, @boolToInt(ret_ptr != null)) + @boolToInt(err_return_tracing);
for (fn_info.param_types) |param_ty| {
if (!param_ty.hasRuntimeBitsIgnoreComptime()) continue;
@@ -711,6 +719,7 @@ pub const Object = struct {
.base_line = dg.decl.src_line,
.prev_dbg_line = 0,
.prev_dbg_column = 0,
+ .err_ret_trace = err_ret_trace,
};
defer fg.deinit();
@@ -1755,6 +1764,17 @@ pub const Object = struct {
try param_di_types.append(try o.lowerDebugType(Type.void, .full));
}
+ if (fn_info.return_type.isError() and
+ o.module.comp.bin_file.options.error_return_tracing)
+ {
+ var ptr_ty_payload: Type.Payload.ElemType = .{
+ .base = .{ .tag = .single_mut_pointer },
+ .data = o.getStackTraceType(),
+ };
+ const ptr_ty = Type.initPayload(&ptr_ty_payload.base);
+ try param_di_types.append(try o.lowerDebugType(ptr_ty, .full));
+ }
+
for (fn_info.param_types) |param_ty| {
if (!param_ty.hasRuntimeBitsIgnoreComptime()) continue;
@@ -1824,6 +1844,27 @@ pub const Object = struct {
"", // unique id
);
}
+
+ fn getStackTraceType(o: *Object) Type {
+ const mod = o.module;
+
+ const std_pkg = mod.main_pkg.table.get("std").?;
+ const std_file = (mod.importPkg(std_pkg) catch unreachable).file;
+
+ const builtin_str: []const u8 = "builtin";
+ const std_namespace = mod.declPtr(std_file.root_decl.unwrap().?).src_namespace;
+ const builtin_decl = std_namespace.decls
+ .getKeyAdapted(builtin_str, Module.DeclAdapter{ .mod = mod }).?;
+
+ const stack_trace_str: []const u8 = "StackTrace";
+ // buffer is only used for int_type, `builtin` is a struct.
+ const builtin_ty = mod.declPtr(builtin_decl).val.toType(undefined);
+ const builtin_namespace = builtin_ty.getNamespace().?;
+ const stack_trace_decl = builtin_namespace.decls
+ .getKeyAdapted(stack_trace_str, Module.DeclAdapter{ .mod = mod }).?;
+
+ return mod.declPtr(stack_trace_decl).val.toType(undefined);
+ }
};
pub const DeclGen = struct {
@@ -1976,8 +2017,15 @@ pub const DeclGen = struct {
llvm_fn.addSretAttr(0, raw_llvm_ret_ty);
}
+ const err_return_tracing = fn_info.return_type.isError() and
+ dg.module.comp.bin_file.options.error_return_tracing;
+
+ if (err_return_tracing) {
+ dg.addArgAttr(llvm_fn, @boolToInt(sret), "nonnull");
+ }
+
// Set parameter attributes.
- var llvm_param_i: c_uint = @boolToInt(sret);
+ var llvm_param_i: c_uint = @as(c_uint, @boolToInt(sret)) + @boolToInt(err_return_tracing);
for (fn_info.param_types) |param_ty| {
if (!param_ty.hasRuntimeBitsIgnoreComptime()) continue;
@@ -2435,6 +2483,17 @@ pub const DeclGen = struct {
try llvm_params.append(llvm_sret_ty.pointerType(0));
}
+ if (fn_info.return_type.isError() and
+ dg.module.comp.bin_file.options.error_return_tracing)
+ {
+ var ptr_ty_payload: Type.Payload.ElemType = .{
+ .base = .{ .tag = .single_mut_pointer },
+ .data = dg.object.getStackTraceType(),
+ };
+ const ptr_ty = Type.initPayload(&ptr_ty_payload.base);
+ try llvm_params.append(try lowerFnParamTy(dg, fn_info.cc, ptr_ty));
+ }
+
for (fn_info.param_types) |param_ty| {
if (!param_ty.hasRuntimeBitsIgnoreComptime()) continue;
@@ -3449,6 +3508,8 @@ pub const FuncGen = struct {
llvm_func: *const llvm.Value,
+ err_ret_trace: ?*const llvm.Value = null,
+
/// This data structure is used to implement breaking to blocks.
blocks: std.AutoHashMapUnmanaged(Air.Inst.Index, struct {
parent_bb: *const llvm.BasicBlock,
@@ -3678,6 +3739,8 @@ pub const FuncGen = struct {
.unwrap_errunion_err => try self.airErrUnionErr(inst, false),
.unwrap_errunion_err_ptr => try self.airErrUnionErr(inst, true),
.errunion_payload_ptr_set => try self.airErrUnionPayloadPtrSet(inst),
+ .err_return_trace => try self.airErrReturnTrace(inst),
+ .set_err_return_trace => try self.airSetErrReturnTrace(inst),
.wrap_optional => try self.airWrapOptional(inst),
.wrap_errunion_payload => try self.airWrapErrUnionPayload(inst),
@@ -3732,6 +3795,12 @@ pub const FuncGen = struct {
break :blk ret_ptr;
};
+ if (fn_info.return_type.isError() and
+ self.dg.module.comp.bin_file.options.error_return_tracing)
+ {
+ try llvm_args.append(self.err_ret_trace.?);
+ }
+
for (args) |arg| {
const param_ty = self.air.typeOf(arg);
if (!param_ty.hasRuntimeBitsIgnoreComptime()) continue;
@@ -5149,6 +5218,17 @@ pub const FuncGen = struct {
return self.builder.buildInBoundsGEP(operand, &indices, indices.len, "");
}
+ fn airErrReturnTrace(self: *FuncGen, _: Air.Inst.Index) !?*const llvm.Value {
+ return self.err_ret_trace.?;
+ }
+
+ fn airSetErrReturnTrace(self: *FuncGen, inst: Air.Inst.Index) !?*const llvm.Value {
+ const un_op = self.air.instructions.items(.data)[inst].un_op;
+ const operand = try self.resolveInst(un_op);
+ self.err_ret_trace = operand;
+ return null;
+ }
+
fn airWrapOptional(self: *FuncGen, inst: Air.Inst.Index) !?*const llvm.Value {
if (self.liveness.isUnused(inst)) return null;
src/Air.zig
@@ -649,6 +649,12 @@ pub const Inst = struct {
/// flush().
cmp_lt_errors_len,
+ /// Returns pointer to current error return trace.
+ err_return_trace,
+
+ /// Sets the operand as the current error return trace,
+ set_err_return_trace,
+
pub fn fromCmpOp(op: std.math.CompareOperator) Tag {
return switch (op) {
.lt => .cmp_lt,
@@ -961,6 +967,7 @@ pub fn typeOfIndex(air: Air, inst: Air.Inst.Index) Type {
.alloc,
.ret_ptr,
.arg,
+ .err_return_trace,
=> return datas[inst].ty,
.assembly,
@@ -1048,6 +1055,7 @@ pub fn typeOfIndex(air: Air, inst: Air.Inst.Index) Type {
.memcpy,
.set_union_tag,
.prefetch,
+ .set_err_return_trace,
=> return Type.void,
.ptrtoint,
src/Liveness.zig
@@ -362,6 +362,7 @@ fn analyzeInst(
.ret_addr,
.frame_addr,
.wasm_memory_size,
+ .err_return_trace,
=> return trackOperands(a, new_set, inst, main_tomb, .{ .none, .none, .none }),
.not,
@@ -434,6 +435,7 @@ fn analyzeInst(
.round,
.trunc_float,
.cmp_lt_errors_len,
+ .set_err_return_trace,
=> {
const operand = inst_datas[inst].un_op;
return trackOperands(a, new_set, inst, main_tomb, .{ operand, .none, .none });
src/Module.zig
@@ -1427,6 +1427,7 @@ pub const Fn = struct {
state: Analysis,
is_cold: bool = false,
is_noinline: bool = false,
+ calls_or_awaits_errorable_fn: bool = false,
/// Any inferred error sets that this function owns, both its own inferred error set and
/// inferred error sets of any inline/comptime functions called. Not to be confused
@@ -4838,6 +4839,9 @@ pub fn analyzeFnBody(mod: *Module, func: *Fn, arena: Allocator) SemaError!Air {
};
defer sema.deinit();
+ // reset in case case calls to errorable functions are removed.
+ func.calls_or_awaits_errorable_fn = false;
+
// First few indexes of extra are reserved and set at the end.
const reserved_count = @typeInfo(Air.ExtraIndex).Enum.fields.len;
try sema.air_extra.ensureTotalCapacity(gpa, reserved_count);
@@ -4936,6 +4940,8 @@ pub fn analyzeFnBody(mod: *Module, func: *Fn, arena: Allocator) SemaError!Air {
func.state = .in_progress;
log.debug("set {s} to in_progress", .{decl.name});
+ const last_arg_index = inner_block.instructions.items.len;
+
sema.analyzeBody(&inner_block, fn_info.body) catch |err| switch (err) {
// TODO make these unreachable instead of @panic
error.NeededSourceLocation => @panic("zig compiler bug: NeededSourceLocation"),
@@ -4944,6 +4950,21 @@ pub fn analyzeFnBody(mod: *Module, func: *Fn, arena: Allocator) SemaError!Air {
else => |e| return e,
};
+ // If we don't get an error return trace from a caller, create our own.
+ if (func.calls_or_awaits_errorable_fn and
+ mod.comp.bin_file.options.error_return_tracing and
+ !sema.fn_ret_ty.isError())
+ {
+ sema.setupErrorReturnTrace(&inner_block, last_arg_index) catch |err| switch (err) {
+ // TODO make these unreachable instead of @panic
+ error.NeededSourceLocation => @panic("zig compiler bug: NeededSourceLocation"),
+ error.GenericPoison => @panic("zig compiler bug: GenericPoison"),
+ error.ComptimeReturn => @panic("zig compiler bug: ComptimeReturn"),
+ error.ComptimeBreak => @panic("zig compiler bug: ComptimeBreak"),
+ else => |e| return e,
+ };
+ }
+
try wip_captures.finalize();
// Copy the block into place and mark that as the main block.
src/print_air.zig
@@ -170,6 +170,7 @@ const Writer = struct {
.round,
.trunc_float,
.cmp_lt_errors_len,
+ .set_err_return_trace,
=> try w.writeUnOp(s, inst),
.breakpoint,
@@ -182,6 +183,7 @@ const Writer = struct {
.alloc,
.ret_ptr,
.arg,
+ .err_return_trace,
=> try w.writeTy(s, inst),
.not,
src/Sema.zig
@@ -1411,6 +1411,38 @@ fn analyzeAsType(
return ty.copy(sema.arena);
}
+pub fn setupErrorReturnTrace(sema: *Sema, block: *Block, last_arg_index: usize) !void {
+ var err_trace_block = block.makeSubBlock();
+ err_trace_block.is_comptime = false;
+ defer err_trace_block.instructions.deinit(sema.gpa);
+
+ const src: LazySrcLoc = .unneeded;
+
+ // var addrs: [err_return_trace_addr_count]usize = undefined;
+ const err_return_trace_addr_count = 32;
+ const addr_arr_ty = try Type.array(sema.arena, err_return_trace_addr_count, null, Type.usize, sema.mod);
+ const addrs_ptr = try err_trace_block.addTy(.alloc, try Type.Tag.single_mut_pointer.create(sema.arena, addr_arr_ty));
+
+ // var st: StackTrace = undefined;
+ const unresolved_stack_trace_ty = try sema.getBuiltinType(&err_trace_block, src, "StackTrace");
+ const stack_trace_ty = try sema.resolveTypeFields(&err_trace_block, src, unresolved_stack_trace_ty);
+ const st_ptr = try err_trace_block.addTy(.alloc, try Type.Tag.single_mut_pointer.create(sema.arena, stack_trace_ty));
+
+ // st.instruction_addresses = &addrs;
+ const addr_field_ptr = try sema.fieldPtr(&err_trace_block, src, st_ptr, "instruction_addresses", src);
+ try sema.storePtr2(&err_trace_block, src, addr_field_ptr, src, addrs_ptr, src, .store);
+
+ // st.index = 0;
+ const index_field_ptr = try sema.fieldPtr(&err_trace_block, src, st_ptr, "index", src);
+ const zero = try sema.addConstant(Type.usize, Value.zero);
+ try sema.storePtr2(&err_trace_block, src, index_field_ptr, src, zero, src, .store);
+
+ // @errorReturnTrace() = &st;
+ _ = try err_trace_block.addUnOp(.set_err_return_trace, st_ptr);
+
+ try block.instructions.insertSlice(sema.gpa, last_arg_index, err_trace_block.instructions.items);
+}
+
/// May return Value Tags: `variable`, `undef`.
/// See `resolveConstValue` for an alternative.
/// Value Tag `generic_poison` causes `error.GenericPoison` to be returned.
@@ -5236,6 +5268,13 @@ fn analyzeCall(
}
try sema.queueFullTypeResolution(func_ty_info.return_type);
+ if (sema.owner_func != null and func_ty_info.return_type.isError()) {
+ if (!sema.owner_func.?.calls_or_awaits_errorable_fn) {
+ // Ensure the type exists so that backends can assume that.
+ _ = try sema.getBuiltinType(block, call_src, "StackTrace");
+ }
+ sema.owner_func.?.calls_or_awaits_errorable_fn = true;
+ }
try sema.air_extra.ensureUnusedCapacity(gpa, @typeInfo(Air.Call).Struct.fields.len +
args.len);
@@ -5645,6 +5684,15 @@ fn instantiateGenericCall(
try sema.queueFullTypeResolution(new_fn_info.return_type);
}
+
+ if (sema.owner_func != null and new_fn_info.return_type.isError()) {
+ if (!sema.owner_func.?.calls_or_awaits_errorable_fn) {
+ // Ensure the type exists so that backends can assume that.
+ _ = try sema.getBuiltinType(block, call_src, "StackTrace");
+ }
+ sema.owner_func.?.calls_or_awaits_errorable_fn = true;
+ }
+
try sema.air_extra.ensureUnusedCapacity(sema.gpa, @typeInfo(Air.Call).Struct.fields.len +
runtime_args_len);
const func_inst = try block.addInst(.{
@@ -12607,6 +12655,16 @@ fn analyzeRet(
return always_noreturn;
}
+ if (sema.fn_ret_ty.isError() and sema.mod.comp.bin_file.options.error_return_tracing) {
+ const return_err_fn = try sema.getBuiltin(block, src, "returnError");
+ const unresolved_stack_trace_ty = try sema.getBuiltinType(block, src, "StackTrace");
+ const stack_trace_ty = try sema.resolveTypeFields(block, src, unresolved_stack_trace_ty);
+ const ptr_stack_trace_ty = try Type.Tag.optional_single_mut_pointer.create(sema.arena, stack_trace_ty);
+ const err_return_trace = try block.addTy(.err_return_trace, ptr_stack_trace_ty);
+ const args: [1]Air.Inst.Ref = .{err_return_trace};
+ _ = try sema.analyzeCall(block, return_err_fn, src, src, .never_inline, false, &args);
+ }
+
try sema.resolveTypeLayout(block, src, sema.fn_ret_ty);
_ = try block.addUnOp(.ret, operand);
return always_noreturn;
@@ -13338,9 +13396,14 @@ fn zirErrorReturnTrace(
const src: LazySrcLoc = .{ .node_offset = @bitCast(i32, extended.operand) };
const unresolved_stack_trace_ty = try sema.getBuiltinType(block, src, "StackTrace");
const stack_trace_ty = try sema.resolveTypeFields(block, src, unresolved_stack_trace_ty);
- const opt_stack_trace_ty = try Type.optional(sema.arena, stack_trace_ty);
- // https://github.com/ziglang/zig/issues/11259
- return sema.addConstant(opt_stack_trace_ty, Value.@"null");
+ const opt_ptr_stack_trace_ty = try Type.Tag.optional_single_mut_pointer.create(sema.arena, stack_trace_ty);
+ if (sema.owner_func != null and
+ sema.owner_func.?.calls_or_awaits_errorable_fn and
+ sema.mod.comp.bin_file.options.error_return_tracing)
+ {
+ return block.addTy(.err_return_trace, opt_ptr_stack_trace_ty);
+ }
+ return sema.addConstant(opt_ptr_stack_trace_ty, Value.@"null");
}
fn zirFrame(
@@ -21817,11 +21880,7 @@ fn resolvePeerTypes(
info.data.sentinel = chosen_child_ty.sentinel();
info.data.size = .Slice;
info.data.mutable = !(seen_const or chosen_child_ty.isConstPtr());
- info.data.pointee_type = switch (chosen_child_ty.tag()) {
- .array => chosen_child_ty.elemType2(),
- .array_u8, .array_u8_sentinel_0 => Type.initTag(.u8),
- else => unreachable,
- };
+ info.data.pointee_type = chosen_child_ty.elemType2();
const new_ptr_ty = try Type.ptr(sema.arena, sema.mod, info.data);
const opt_ptr_ty = if (any_are_null)
src/type.zig
@@ -4093,6 +4093,13 @@ pub const Type = extern union {
};
}
+ pub fn isError(ty: Type) bool {
+ return switch (ty.zigTypeTag()) {
+ .ErrorUnion, .ErrorSet => true,
+ else => false,
+ };
+ }
+
/// Returns whether ty, which must be an error set, includes an error `name`.
/// Might return a false negative if `ty` is an inferred error set and not fully
/// resolved yet.