Commit 3389890709
Changed files (2)
src/AstGen.zig
@@ -6573,6 +6573,16 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
if (gz.in_defer) return astgen.failNode(node, "cannot return from defer expression", .{});
+ // Ensure debug line/column information is emitted for this return expression.
+ // Then we will save the line/column so that we can emit another one that goes
+ // "backwards" because we want to evaluate the operand, but then put the debug
+ // info back at the return keyword for error return tracing.
+ if (!gz.force_comptime) {
+ try emitDbgNode(gz, node);
+ }
+ const ret_line = astgen.source_line - gz.decl_line;
+ const ret_column = astgen.source_column;
+
const defer_outer = &astgen.fn_block.?.base;
const operand_node = node_datas[node].lhs;
@@ -6591,11 +6601,13 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
const defer_counts = countDefers(astgen, defer_outer, scope);
if (!defer_counts.need_err_code) {
try genDefers(gz, defer_outer, scope, .both_sans_err);
+ try emitRetDbgStmt(gz, ret_line, ret_column);
_ = try gz.addStrTok(.ret_err_value, err_name_str_index, ident_token);
return Zir.Inst.Ref.unreachable_value;
}
const err_code = try gz.addStrTok(.ret_err_value_code, err_name_str_index, ident_token);
try genDefers(gz, defer_outer, scope, .{ .both = err_code });
+ try emitRetDbgStmt(gz, ret_line, ret_column);
_ = try gz.addUnNode(.ret_node, err_code, node);
return Zir.Inst.Ref.unreachable_value;
}
@@ -6614,6 +6626,7 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
.never => {
// Returning a value that cannot be an error; skip error defers.
try genDefers(gz, defer_outer, scope, .normal_only);
+ try emitRetDbgStmt(gz, ret_line, ret_column);
try gz.addRet(rl, operand, node);
return Zir.Inst.Ref.unreachable_value;
},
@@ -6621,6 +6634,7 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
// Value is always an error. Emit both error defers and regular defers.
const err_code = if (rl == .ptr) try gz.addUnNode(.load, rl.ptr, node) else operand;
try genDefers(gz, defer_outer, scope, .{ .both = err_code });
+ try emitRetDbgStmt(gz, ret_line, ret_column);
try gz.addRet(rl, operand, node);
return Zir.Inst.Ref.unreachable_value;
},
@@ -6629,6 +6643,7 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
if (!defer_counts.have_err) {
// Only regular defers; no branch needed.
try genDefers(gz, defer_outer, scope, .normal_only);
+ try emitRetDbgStmt(gz, ret_line, ret_column);
try gz.addRet(rl, operand, node);
return Zir.Inst.Ref.unreachable_value;
}
@@ -6642,6 +6657,7 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
defer then_scope.unstack();
try genDefers(&then_scope, defer_outer, scope, .normal_only);
+ try emitRetDbgStmt(gz, ret_line, ret_column);
try then_scope.addRet(rl, operand, node);
var else_scope = gz.makeSubBlock(scope);
@@ -6651,6 +6667,7 @@ fn ret(gz: *GenZir, scope: *Scope, node: Ast.Node.Index) InnerError!Zir.Inst.Ref
.both = try else_scope.addUnNode(.err_union_code, result, node),
};
try genDefers(&else_scope, defer_outer, scope, which_ones);
+ try emitRetDbgStmt(gz, ret_line, ret_column);
try else_scope.addRet(rl, operand, node);
try setCondBrPayload(condbr, is_non_err, &then_scope, 0, &else_scope, 0);
@@ -11667,3 +11684,14 @@ fn countBodyLenAfterFixups(astgen: *AstGen, body: []const Zir.Inst.Index) u32 {
}
return @intCast(u32, count);
}
+
+fn emitRetDbgStmt(gz: *GenZir, line: u32, column: u32) !void {
+ if (gz.force_comptime) return;
+
+ _ = try gz.add(.{ .tag = .dbg_stmt, .data = .{
+ .dbg_stmt = .{
+ .line = line,
+ .column = column,
+ },
+ } });
+}
src/Sema.zig
@@ -14888,10 +14888,83 @@ fn zirRetLoad(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Zir
const operand = try sema.analyzeLoad(block, src, ret_ptr, src);
return sema.analyzeRet(block, operand, src);
}
+
+ if (sema.wantErrorReturnTracing()) {
+ const is_non_err = try sema.analyzePtrIsNonErr(block, src, ret_ptr);
+ return retWithErrTracing(sema, block, src, is_non_err, .ret_load, ret_ptr);
+ }
+
_ = try block.addUnOp(.ret_load, ret_ptr);
return always_noreturn;
}
+fn retWithErrTracing(
+ sema: *Sema,
+ block: *Block,
+ src: LazySrcLoc,
+ is_non_err: Air.Inst.Ref,
+ ret_tag: Air.Inst.Tag,
+ operand: Air.Inst.Ref,
+) CompileError!Zir.Inst.Index {
+ const need_check = switch (is_non_err) {
+ .bool_true => {
+ _ = try block.addUnOp(ret_tag, operand);
+ return always_noreturn;
+ },
+ .bool_false => false,
+ else => true,
+ };
+ const gpa = sema.gpa;
+ const unresolved_stack_trace_ty = try sema.getBuiltinType(block, src, "StackTrace");
+ const stack_trace_ty = try sema.resolveTypeFields(block, src, unresolved_stack_trace_ty);
+ const ptr_stack_trace_ty = try Type.Tag.single_mut_pointer.create(sema.arena, stack_trace_ty);
+ const err_return_trace = try block.addTy(.err_return_trace, ptr_stack_trace_ty);
+ const return_err_fn = try sema.getBuiltin(block, src, "returnError");
+ const args: [1]Air.Inst.Ref = .{err_return_trace};
+
+ if (!need_check) {
+ _ = try sema.analyzeCall(block, return_err_fn, src, src, .never_inline, false, &args, null);
+ _ = try block.addUnOp(ret_tag, operand);
+ return always_noreturn;
+ }
+
+ var then_block = block.makeSubBlock();
+ defer then_block.instructions.deinit(gpa);
+ _ = try then_block.addUnOp(ret_tag, operand);
+
+ var else_block = block.makeSubBlock();
+ defer else_block.instructions.deinit(gpa);
+ _ = try sema.analyzeCall(&else_block, return_err_fn, src, src, .never_inline, false, &args, null);
+ _ = try else_block.addUnOp(ret_tag, operand);
+
+ try sema.air_extra.ensureUnusedCapacity(gpa, @typeInfo(Air.CondBr).Struct.fields.len +
+ then_block.instructions.items.len + else_block.instructions.items.len +
+ @typeInfo(Air.Block).Struct.fields.len + 1);
+
+ const cond_br_payload = sema.addExtraAssumeCapacity(Air.CondBr{
+ .then_body_len = @intCast(u32, then_block.instructions.items.len),
+ .else_body_len = @intCast(u32, else_block.instructions.items.len),
+ });
+ sema.air_extra.appendSliceAssumeCapacity(then_block.instructions.items);
+ sema.air_extra.appendSliceAssumeCapacity(else_block.instructions.items);
+
+ _ = try block.addInst(.{ .tag = .cond_br, .data = .{ .pl_op = .{
+ .operand = is_non_err,
+ .payload = cond_br_payload,
+ } } });
+
+ return always_noreturn;
+}
+
+fn wantErrorReturnTracing(sema: *Sema) bool {
+ // TODO implement this feature in all the backends and then delete this check.
+ const backend_supports_error_return_tracing = sema.mod.comp.bin_file.options.use_llvm;
+
+ return sema.fn_ret_ty.isError() and
+ sema.mod.comp.bin_file.options.error_return_tracing and
+ backend_supports_error_return_tracing;
+}
+
fn addToInferredErrorSet(sema: *Sema, uncasted_operand: Air.Inst.Ref) !void {
assert(sema.fn_ret_ty.zigTypeTag() == .ErrorUnion);
@@ -14915,8 +14988,6 @@ fn analyzeRet(
uncasted_operand: Air.Inst.Ref,
src: LazySrcLoc,
) CompileError!Zir.Inst.Index {
- const gpa = sema.gpa;
-
// Special case for returning an error to an inferred error set; we need to
// add the error tag to the inferred error set of the in-scope function, so
// that the coercion below works correctly.
@@ -14934,65 +15005,18 @@ fn analyzeRet(
return error.ComptimeReturn;
}
// We are inlining a function call; rewrite the `ret` as a `break`.
- try inlining.merges.results.append(gpa, operand);
+ try inlining.merges.results.append(sema.gpa, operand);
_ = try block.addBr(inlining.merges.block_inst, operand);
return always_noreturn;
}
try sema.resolveTypeLayout(block, src, sema.fn_ret_ty);
- // TODO implement this feature in all the backends and then delete this check.
- const backend_supports_error_return_tracing =
- sema.mod.comp.bin_file.options.use_llvm;
-
- if (sema.fn_ret_ty.isError() and
- sema.mod.comp.bin_file.options.error_return_tracing and
- backend_supports_error_return_tracing)
- ret_err: {
+ if (sema.wantErrorReturnTracing()) {
// Avoid adding a frame to the error return trace in case the value is comptime-known
// to be not an error.
- const is_non_err = try sema.analyzeIsNonErrComptimeOnly(block, src, operand);
- const need_check = switch (is_non_err) {
- .bool_true => break :ret_err,
- .bool_false => false,
- else => true,
- };
-
- const unresolved_stack_trace_ty = try sema.getBuiltinType(block, src, "StackTrace");
- const stack_trace_ty = try sema.resolveTypeFields(block, src, unresolved_stack_trace_ty);
- const ptr_stack_trace_ty = try Type.Tag.single_mut_pointer.create(sema.arena, stack_trace_ty);
- const err_return_trace = try block.addTy(.err_return_trace, ptr_stack_trace_ty);
- const return_err_fn = try sema.getBuiltin(block, src, "returnError");
- const args: [1]Air.Inst.Ref = .{err_return_trace};
-
- if (!need_check) {
- _ = try sema.analyzeCall(block, return_err_fn, src, src, .never_inline, false, &args, null);
- break :ret_err;
- }
-
- const block_inst = @intCast(Air.Inst.Index, sema.air_instructions.len);
- try sema.air_instructions.append(gpa, .{
- .tag = .block,
- .data = .{ .ty_pl = .{
- .ty = .void_type,
- .payload = undefined,
- } },
- });
-
- var child_block = block.makeSubBlock();
- defer child_block.instructions.deinit(gpa);
-
- var then_block = child_block.makeSubBlock();
- defer then_block.instructions.deinit(gpa);
- _ = try then_block.addUnOp(.ret, operand);
-
- var else_block = child_block.makeSubBlock();
- defer else_block.instructions.deinit(gpa);
- _ = try sema.analyzeCall(block, return_err_fn, src, src, .never_inline, false, &args, null);
- _ = try else_block.addUnOp(.ret, operand);
-
- _ = try finishCondBr(sema, block, &child_block, &then_block, &else_block, is_non_err, block_inst);
- return always_noreturn;
+ const is_non_err = try sema.analyzeIsNonErr(block, src, operand);
+ return retWithErrTracing(sema, block, src, is_non_err, .ret, operand);
}
_ = try block.addUnOp(.ret, operand);
@@ -25474,6 +25498,27 @@ fn analyzeIsNull(
return block.addUnOp(air_tag, operand);
}
+fn analyzePtrIsNonErrComptimeOnly(
+ sema: *Sema,
+ block: *Block,
+ src: LazySrcLoc,
+ operand: Air.Inst.Ref,
+) CompileError!Air.Inst.Ref {
+ const ptr_ty = sema.typeOf(operand);
+ assert(ptr_ty.zigTypeTag() == .Pointer);
+ const child_ty = ptr_ty.childType();
+
+ const child_tag = child_ty.zigTypeTag();
+ if (child_tag != .ErrorSet and child_tag != .ErrorUnion) return Air.Inst.Ref.bool_true;
+ if (child_tag == .ErrorSet) return Air.Inst.Ref.bool_false;
+ assert(child_tag == .ErrorUnion);
+
+ _ = block;
+ _ = src;
+
+ return Air.Inst.Ref.none;
+}
+
fn analyzeIsNonErrComptimeOnly(
sema: *Sema,
block: *Block,
@@ -25572,6 +25617,21 @@ fn analyzeIsNonErr(
}
}
+fn analyzePtrIsNonErr(
+ sema: *Sema,
+ block: *Block,
+ src: LazySrcLoc,
+ operand: Air.Inst.Ref,
+) CompileError!Air.Inst.Ref {
+ const result = try sema.analyzePtrIsNonErrComptimeOnly(block, src, operand);
+ if (result == .none) {
+ try sema.requireRuntimeBlock(block, src, null);
+ return block.addUnOp(.is_non_err_ptr, operand);
+ } else {
+ return result;
+ }
+}
+
fn analyzeSlice(
sema: *Sema,
block: *Block,