Commit 13f04e3012
Changed files (11)
src/codegen/c.zig
@@ -39,7 +39,12 @@ const BlockData = struct {
};
pub const CValueMap = std.AutoHashMap(*Inst, CValue);
-pub const TypedefMap = std.HashMap(Type, struct { name: []const u8, rendered: []u8 }, Type.HashContext, std.hash_map.default_max_load_percentage);
+pub const TypedefMap = std.HashMap(
+ Type,
+ struct { name: []const u8, rendered: []u8 },
+ Type.HashContext,
+ std.hash_map.default_max_load_percentage,
+);
fn formatTypeAsCIdentifier(
data: Type,
@@ -151,14 +156,49 @@ pub const Object = struct {
render_ty = render_ty.elemType();
}
- try o.dg.renderType(w, render_ty);
-
- const const_prefix = switch (mutability) {
- .Const => "const ",
- .Mut => "",
- };
- try w.print(" {s}", .{const_prefix});
- try o.writeCValue(w, name);
+ if (render_ty.zigTypeTag() == .Fn) {
+ const ret_ty = render_ty.fnReturnType();
+ if (ret_ty.zigTypeTag() == .NoReturn) {
+ // noreturn attribute is not allowed here.
+ try w.writeAll("void");
+ } else {
+ try o.dg.renderType(w, ret_ty);
+ }
+ try w.writeAll(" (*");
+ switch (mutability) {
+ .Const => try w.writeAll("const "),
+ .Mut => {},
+ }
+ try o.writeCValue(w, name);
+ try w.writeAll(")(");
+ const param_len = render_ty.fnParamLen();
+ const is_var_args = render_ty.fnIsVarArgs();
+ if (param_len == 0 and !is_var_args)
+ try w.writeAll("void")
+ else {
+ var index: usize = 0;
+ while (index < param_len) : (index += 1) {
+ if (index > 0) {
+ try w.writeAll(", ");
+ }
+ try o.dg.renderType(w, render_ty.fnParamType(index));
+ }
+ }
+ if (is_var_args) {
+ if (param_len != 0) try w.writeAll(", ");
+ try w.writeAll("...");
+ }
+ try w.writeByte(')');
+ } else {
+ try o.dg.renderType(w, render_ty);
+
+ const const_prefix = switch (mutability) {
+ .Const => "const ",
+ .Mut => "",
+ };
+ try w.print(" {s}", .{const_prefix});
+ try o.writeCValue(w, name);
+ }
try w.writeAll(suffix.items);
}
};
@@ -196,35 +236,72 @@ pub const DeclGen = struct {
return writer.print("{d}", .{val.toSignedInt()});
return writer.print("{d}", .{val.toUnsignedInt()});
},
- .Pointer => switch (val.tag()) {
- .null_value, .zero => try writer.writeAll("NULL"),
- .one => try writer.writeAll("1"),
- .decl_ref => {
- const decl = val.castTag(.decl_ref).?.data;
-
- // Determine if we must pointer cast.
- assert(decl.has_tv);
- if (t.eql(decl.ty)) {
- try writer.print("&{s}", .{decl.name});
- } else {
- try writer.writeAll("(");
- try dg.renderType(writer, t);
- try writer.print(")&{s}", .{decl.name});
- }
- },
- .function => {
- const func = val.castTag(.function).?.data;
- try writer.print("{s}", .{func.owner_decl.name});
+ .Pointer => switch (t.ptrSize()) {
+ .Slice => {
+ try writer.writeByte('(');
+ try dg.renderType(writer, t);
+ try writer.writeAll("){");
+ var buf: Type.Payload.ElemType = undefined;
+ try dg.renderValue(writer, t.slicePtrFieldType(&buf), val);
+ try writer.writeAll(", ");
+ try writer.print("{d}", .{val.sliceLen()});
+ try writer.writeAll("}");
},
- .extern_fn => {
- const decl = val.castTag(.extern_fn).?.data;
- try writer.print("{s}", .{decl.name});
+ else => switch (val.tag()) {
+ .null_value, .zero => try writer.writeAll("NULL"),
+ .one => try writer.writeAll("1"),
+ .decl_ref => {
+ const decl = val.castTag(.decl_ref).?.data;
+
+ // Determine if we must pointer cast.
+ assert(decl.has_tv);
+ if (t.eql(decl.ty)) {
+ try writer.print("&{s}", .{decl.name});
+ } else {
+ try writer.writeAll("(");
+ try dg.renderType(writer, t);
+ try writer.print(")&{s}", .{decl.name});
+ }
+ },
+ .function => {
+ const func = val.castTag(.function).?.data;
+ try writer.print("{s}", .{func.owner_decl.name});
+ },
+ .extern_fn => {
+ const decl = val.castTag(.extern_fn).?.data;
+ try writer.print("{s}", .{decl.name});
+ },
+ else => switch (t.ptrSize()) {
+ .Slice => unreachable,
+ .Many => {
+ if (val.castTag(.ref_val)) |ref_val_payload| {
+ const sub_val = ref_val_payload.data;
+ if (sub_val.castTag(.bytes)) |bytes_payload| {
+ const bytes = bytes_payload.data;
+ try writer.writeByte('(');
+ try dg.renderType(writer, t);
+ // TODO: make our own C string escape instead of using std.zig.fmtEscapes
+ try writer.print(")\"{}\"", .{std.zig.fmtEscapes(bytes)});
+ } else {
+ unreachable;
+ }
+ } else {
+ unreachable;
+ }
+ },
+ .One => {
+ var arena = std.heap.ArenaAllocator.init(dg.module.gpa);
+ defer arena.deinit();
+
+ const elem_ty = t.elemType();
+ const elem_val = try val.pointerDeref(&arena.allocator);
+
+ try writer.writeAll("&");
+ try dg.renderValue(writer, elem_ty, elem_val);
+ },
+ .C => unreachable,
+ },
},
- else => |e| return dg.fail(
- .{ .node_offset = 0 },
- "TODO: C backend: implement Pointer value {s}",
- .{@tagName(e)},
- ),
},
.Array => {
// First try specific tag representations for more efficiency.
@@ -329,6 +406,32 @@ pub const DeclGen = struct {
},
}
},
+ .Fn => switch (val.tag()) {
+ .null_value, .zero => try writer.writeAll("NULL"),
+ .one => try writer.writeAll("1"),
+ .decl_ref => {
+ const decl = val.castTag(.decl_ref).?.data;
+
+ // Determine if we must pointer cast.
+ assert(decl.has_tv);
+ if (t.eql(decl.ty)) {
+ try writer.print("&{s}", .{decl.name});
+ } else {
+ try writer.writeAll("(");
+ try dg.renderType(writer, t);
+ try writer.print(")&{s}", .{decl.name});
+ }
+ },
+ .function => {
+ const func = val.castTag(.function).?.data;
+ try writer.print("{s}", .{func.owner_decl.name});
+ },
+ .extern_fn => {
+ const decl = val.castTag(.extern_fn).?.data;
+ try writer.print("{s}", .{decl.name});
+ },
+ else => unreachable,
+ },
else => |e| return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement value {s}", .{
@tagName(e),
}),
@@ -339,6 +442,12 @@ pub const DeclGen = struct {
if (!is_global) {
try w.writeAll("static ");
}
+ if (dg.decl.val.castTag(.function)) |func_payload| {
+ const func: *Module.Fn = func_payload.data;
+ if (func.is_cold) {
+ try w.writeAll("ZIG_COLD ");
+ }
+ }
try dg.renderType(w, dg.decl.ty.fnReturnType());
const decl_name = mem.span(dg.decl.name);
try w.print(" {s}(", .{decl_name});
@@ -413,7 +522,35 @@ pub const DeclGen = struct {
.Pointer => {
if (t.isSlice()) {
- return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement slices", .{});
+ if (dg.typedefs.get(t)) |some| {
+ return w.writeAll(some.name);
+ }
+
+ var buffer = std.ArrayList(u8).init(dg.typedefs.allocator);
+ defer buffer.deinit();
+ const bw = buffer.writer();
+
+ try bw.writeAll("typedef struct { ");
+ const elem_type = t.elemType();
+ try dg.renderType(bw, elem_type);
+ try bw.writeAll(" *");
+ if (t.isConstPtr()) {
+ try bw.writeAll("const ");
+ }
+ if (t.isVolatilePtr()) {
+ try bw.writeAll("volatile ");
+ }
+ try bw.writeAll("ptr; size_t len; } ");
+ const name_index = buffer.items.len;
+ try bw.print("zig_L_{s};\n", .{typeToCIdentifier(elem_type)});
+
+ const rendered = buffer.toOwnedSlice();
+ errdefer dg.typedefs.allocator.free(rendered);
+ const name = rendered[name_index .. rendered.len - 2];
+
+ try dg.typedefs.ensureUnusedCapacity(1);
+ try w.writeAll(name);
+ dg.typedefs.putAssumeCapacityNoClobber(t, .{ .name = name, .rendered = rendered });
} else {
try dg.renderType(w, t.elemType());
try w.writeAll(" *");
@@ -446,13 +583,13 @@ pub const DeclGen = struct {
try dg.renderType(bw, child_type);
try bw.writeAll(" payload; bool is_null; } ");
const name_index = buffer.items.len;
- try bw.print("zig_opt_{s}_t;\n", .{typeToCIdentifier(child_type)});
+ try bw.print("zig_Q_{s};\n", .{typeToCIdentifier(child_type)});
const rendered = buffer.toOwnedSlice();
errdefer dg.typedefs.allocator.free(rendered);
const name = rendered[name_index .. rendered.len - 2];
- try dg.typedefs.ensureCapacity(dg.typedefs.capacity() + 1);
+ try dg.typedefs.ensureUnusedCapacity(1);
try w.writeAll(name);
dg.typedefs.putAssumeCapacityNoClobber(t, .{ .name = name, .rendered = rendered });
},
@@ -465,7 +602,7 @@ pub const DeclGen = struct {
return w.writeAll(some.name);
}
const child_type = t.errorUnionChild();
- const set_type = t.errorUnionSet();
+ const err_set_type = t.errorUnionSet();
var buffer = std.ArrayList(u8).init(dg.typedefs.allocator);
defer buffer.deinit();
@@ -475,13 +612,20 @@ pub const DeclGen = struct {
try dg.renderType(bw, child_type);
try bw.writeAll(" payload; uint16_t error; } ");
const name_index = buffer.items.len;
- try bw.print("zig_err_union_{s}_{s}_t;\n", .{ typeToCIdentifier(set_type), typeToCIdentifier(child_type) });
+ if (err_set_type.castTag(.error_set_inferred)) |inf_err_set_payload| {
+ const func = inf_err_set_payload.data;
+ try bw.print("zig_E_{s};\n", .{func.owner_decl.name});
+ } else {
+ try bw.print("zig_E_{s}_{s};\n", .{
+ typeToCIdentifier(err_set_type), typeToCIdentifier(child_type),
+ });
+ }
const rendered = buffer.toOwnedSlice();
errdefer dg.typedefs.allocator.free(rendered);
const name = rendered[name_index .. rendered.len - 2];
- try dg.typedefs.ensureCapacity(dg.typedefs.capacity() + 1);
+ try dg.typedefs.ensureUnusedCapacity(1);
try w.writeAll(name);
dg.typedefs.putAssumeCapacityNoClobber(t, .{ .name = name, .rendered = rendered });
},
@@ -514,7 +658,7 @@ pub const DeclGen = struct {
errdefer dg.typedefs.allocator.free(rendered);
const name = rendered[name_start .. rendered.len - 2];
- try dg.typedefs.ensureCapacity(dg.typedefs.capacity() + 1);
+ try dg.typedefs.ensureUnusedCapacity(1);
try w.writeAll(name);
dg.typedefs.putAssumeCapacityNoClobber(t, .{ .name = name, .rendered = rendered });
},
@@ -526,7 +670,28 @@ pub const DeclGen = struct {
try dg.renderType(w, int_tag_ty);
},
.Union => return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement type Union", .{}),
- .Fn => return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement type Fn", .{}),
+ .Fn => {
+ try dg.renderType(w, t.fnReturnType());
+ try w.writeAll(" (*)(");
+ const param_len = t.fnParamLen();
+ const is_var_args = t.fnIsVarArgs();
+ if (param_len == 0 and !is_var_args)
+ try w.writeAll("void")
+ else {
+ var index: usize = 0;
+ while (index < param_len) : (index += 1) {
+ if (index > 0) {
+ try w.writeAll(", ");
+ }
+ try dg.renderType(w, t.fnParamType(index));
+ }
+ }
+ if (is_var_args) {
+ if (param_len != 0) try w.writeAll(", ");
+ try w.writeAll("...");
+ }
+ try w.writeByte(')');
+ },
.Opaque => return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement type Opaque", .{}),
.Frame => return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement type Frame", .{}),
.AnyFrame => return dg.fail(.{ .node_offset = 0 }, "TODO: C backend: implement type AnyFrame", .{}),
@@ -569,23 +734,27 @@ pub fn genDecl(o: *Object) !void {
.val = o.dg.decl.val,
};
if (tv.val.castTag(.function)) |func_payload| {
- const is_global = o.dg.declIsGlobal(tv);
- const fwd_decl_writer = o.dg.fwd_decl.writer();
- if (is_global) {
- try fwd_decl_writer.writeAll("ZIG_EXTERN_C ");
- }
- try o.dg.renderFunctionSignature(fwd_decl_writer, is_global);
- try fwd_decl_writer.writeAll(";\n");
-
const func: *Module.Fn = func_payload.data;
- try o.indent_writer.insertNewline();
- try o.dg.renderFunctionSignature(o.writer(), is_global);
+ if (func.owner_decl == o.dg.decl) {
+ const is_global = o.dg.declIsGlobal(tv);
+ const fwd_decl_writer = o.dg.fwd_decl.writer();
+ if (is_global) {
+ try fwd_decl_writer.writeAll("ZIG_EXTERN_C ");
+ }
+ try o.dg.renderFunctionSignature(fwd_decl_writer, is_global);
+ try fwd_decl_writer.writeAll(";\n");
- try o.writer().writeByte(' ');
- try genBody(o, func.body);
+ try o.indent_writer.insertNewline();
+ try o.dg.renderFunctionSignature(o.writer(), is_global);
- try o.indent_writer.insertNewline();
- } else if (tv.val.tag() == .extern_fn) {
+ try o.writer().writeByte(' ');
+ try genBody(o, func.body);
+
+ try o.indent_writer.insertNewline();
+ return;
+ }
+ }
+ if (tv.val.tag() == .extern_fn) {
const writer = o.writer();
try writer.writeAll("ZIG_EXTERN_C ");
try o.dg.renderFunctionSignature(writer, true);
@@ -644,9 +813,9 @@ pub fn genHeader(dg: *DeclGen) error{ AnalysisFail, OutOfMemory }!void {
const is_global = dg.declIsGlobal(tv);
if (is_global) {
try writer.writeAll("ZIG_EXTERN_C ");
+ try dg.renderFunctionSignature(writer, is_global);
+ try dg.fwd_decl.appendSlice(";\n");
}
- try dg.renderFunctionSignature(writer, is_global);
- try dg.fwd_decl.appendSlice(";\n");
},
else => {},
}
src/link/C/zig.h
@@ -12,6 +12,12 @@
#define zig_threadlocal zig_threadlocal_unavailable
#endif
+#if __GNUC__
+#define ZIG_COLD __attribute__ ((cold))
+#else
+#define ZIG_COLD
+#endif
+
#if __STDC_VERSION__ >= 199901L
#define ZIG_RESTRICT restrict
#elif defined(__GNUC__)
src/link/C.zig
@@ -207,7 +207,7 @@ pub fn flushModule(self: *C, comp: *Compilation) !void {
}
var fn_count: usize = 0;
- var typedefs = std.HashMap(Type, []const u8, Type.HashContext, std.hash_map.default_max_load_percentage).init(comp.gpa);
+ var typedefs = std.HashMap(Type, void, Type.HashContext, std.hash_map.default_max_load_percentage).init(comp.gpa);
defer typedefs.deinit();
// Typedefs, forward decls and non-functions first.
@@ -217,14 +217,12 @@ pub fn flushModule(self: *C, comp: *Compilation) !void {
if (!decl.has_tv) continue;
const buf = buf: {
if (decl.val.castTag(.function)) |_| {
+ try typedefs.ensureUnusedCapacity(decl.fn_link.c.typedefs.count());
var it = decl.fn_link.c.typedefs.iterator();
while (it.next()) |new| {
- if (typedefs.get(new.key_ptr.*)) |previous| {
- try err_typedef_writer.print("typedef {s} {s};\n", .{ previous, new.value_ptr.name });
- } else {
- try typedefs.ensureCapacity(typedefs.capacity() + 1);
+ const gop = typedefs.getOrPutAssumeCapacity(new.key_ptr.*);
+ if (!gop.found_existing) {
try err_typedef_writer.writeAll(new.value_ptr.rendered);
- typedefs.putAssumeCapacityNoClobber(new.key_ptr.*, new.value_ptr.name);
}
}
fn_count += 1;
src/air.zig
@@ -672,15 +672,15 @@ pub const Body = struct {
/// For debugging purposes, prints a function representation to stderr.
pub fn dumpFn(old_module: Module, module_fn: *Module.Fn) void {
const allocator = old_module.gpa;
- var ctx: DumpTzir = .{
+ var ctx: DumpAir = .{
.allocator = allocator,
.arena = std.heap.ArenaAllocator.init(allocator),
.old_module = &old_module,
.module_fn = module_fn,
.indent = 2,
- .inst_table = DumpTzir.InstTable.init(allocator),
- .partial_inst_table = DumpTzir.InstTable.init(allocator),
- .const_table = DumpTzir.InstTable.init(allocator),
+ .inst_table = DumpAir.InstTable.init(allocator),
+ .partial_inst_table = DumpAir.InstTable.init(allocator),
+ .const_table = DumpAir.InstTable.init(allocator),
};
defer ctx.inst_table.deinit();
defer ctx.partial_inst_table.deinit();
@@ -695,12 +695,12 @@ pub fn dumpFn(old_module: Module, module_fn: *Module.Fn) void {
.dependency_failure => std.debug.print("(dependency_failure)", .{}),
.success => {
const writer = std.io.getStdErr().writer();
- ctx.dump(module_fn.body, writer) catch @panic("failed to dump TZIR");
+ ctx.dump(module_fn.body, writer) catch @panic("failed to dump AIR");
},
}
}
-const DumpTzir = struct {
+const DumpAir = struct {
allocator: *std.mem.Allocator,
arena: std.heap.ArenaAllocator,
old_module: *const Module,
@@ -718,7 +718,7 @@ const DumpTzir = struct {
/// TODO: Improve this code to include a stack of Body and store the instructions
/// in there. Now we are putting all the instructions in a function local table,
/// however instructions that are in a Body can be thown away when the Body ends.
- fn dump(dtz: *DumpTzir, body: Body, writer: std.fs.File.Writer) !void {
+ fn dump(dtz: *DumpAir, body: Body, writer: std.fs.File.Writer) !void {
// First pass to pre-populate the table so that we can show even invalid references.
// Must iterate the same order we iterate the second time.
// We also look for constants and put them in the const_table.
@@ -737,7 +737,7 @@ const DumpTzir = struct {
return dtz.dumpBody(body, writer);
}
- fn fetchInstsAndResolveConsts(dtz: *DumpTzir, body: Body) error{OutOfMemory}!void {
+ fn fetchInstsAndResolveConsts(dtz: *DumpAir, body: Body) error{OutOfMemory}!void {
for (body.instructions) |inst| {
try dtz.inst_table.put(inst, dtz.next_index);
dtz.next_index += 1;
@@ -865,7 +865,7 @@ const DumpTzir = struct {
}
}
- fn dumpBody(dtz: *DumpTzir, body: Body, writer: std.fs.File.Writer) (std.fs.File.WriteError || error{OutOfMemory})!void {
+ fn dumpBody(dtz: *DumpAir, body: Body, writer: std.fs.File.Writer) (std.fs.File.WriteError || error{OutOfMemory})!void {
for (body.instructions) |inst| {
const my_index = dtz.next_partial_index;
try dtz.partial_inst_table.put(inst, my_index);
@@ -1150,7 +1150,7 @@ const DumpTzir = struct {
}
}
- fn writeInst(dtz: *DumpTzir, writer: std.fs.File.Writer, inst: *Inst) !?usize {
+ fn writeInst(dtz: *DumpAir, writer: std.fs.File.Writer, inst: *Inst) !?usize {
if (dtz.partial_inst_table.get(inst)) |operand_index| {
try writer.print("%{d}", .{operand_index});
return null;
@@ -1166,7 +1166,7 @@ const DumpTzir = struct {
}
}
- fn findConst(dtz: *DumpTzir, operand: *Inst) !void {
+ fn findConst(dtz: *DumpAir, operand: *Inst) !void {
if (operand.tag == .constant) {
try dtz.const_table.put(operand, dtz.next_const_index);
dtz.next_const_index += 1;
src/AstGen.zig
@@ -1860,7 +1860,7 @@ fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const ast.Nod
}
}
- try genDefers(gz, parent_scope, scope, .none);
+ try genDefers(gz, parent_scope, scope, .normal_only);
try checkUsed(gz, parent_scope, scope);
}
@@ -2102,6 +2102,7 @@ fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: ast.Node.Index) Inner
.@"resume",
.@"await",
.await_nosuspend,
+ .ret_err_value_code,
.extended,
=> break :b false,
@@ -2113,6 +2114,7 @@ fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: ast.Node.Index) Inner
.compile_error,
.ret_node,
.ret_coerce,
+ .ret_err_value,
.@"unreachable",
.repeat,
.repeat_inline,
@@ -2162,13 +2164,63 @@ fn unusedResultExpr(gz: *GenZir, scope: *Scope, statement: ast.Node.Index) Inner
return noreturn_src_node;
}
+fn countDefers(astgen: *AstGen, outer_scope: *Scope, inner_scope: *Scope) struct {
+ have_any: bool,
+ have_normal: bool,
+ have_err: bool,
+ need_err_code: bool,
+} {
+ const tree = astgen.tree;
+ const node_datas = tree.nodes.items(.data);
+
+ var have_normal = false;
+ var have_err = false;
+ var need_err_code = false;
+ var scope = inner_scope;
+ while (scope != outer_scope) {
+ switch (scope.tag) {
+ .gen_zir => scope = scope.cast(GenZir).?.parent,
+ .local_val => scope = scope.cast(Scope.LocalVal).?.parent,
+ .local_ptr => scope = scope.cast(Scope.LocalPtr).?.parent,
+ .defer_normal => {
+ const defer_scope = scope.cast(Scope.Defer).?;
+ scope = defer_scope.parent;
+
+ have_normal = true;
+ },
+ .defer_error => {
+ const defer_scope = scope.cast(Scope.Defer).?;
+ scope = defer_scope.parent;
+
+ have_err = true;
+
+ const have_err_payload = node_datas[defer_scope.defer_node].lhs != 0;
+ need_err_code = need_err_code or have_err_payload;
+ },
+ .namespace => unreachable,
+ .top => unreachable,
+ }
+ }
+ return .{
+ .have_any = have_normal or have_err,
+ .have_normal = have_normal,
+ .have_err = have_err,
+ .need_err_code = need_err_code,
+ };
+}
+
+const DefersToEmit = union(enum) {
+ both: Zir.Inst.Ref, // err code
+ both_sans_err,
+ normal_only,
+};
+
fn genDefers(
gz: *GenZir,
outer_scope: *Scope,
inner_scope: *Scope,
- err_code: Zir.Inst.Ref,
+ which_ones: DefersToEmit,
) InnerError!void {
- _ = err_code;
const astgen = gz.astgen;
const tree = astgen.tree;
const node_datas = tree.nodes.items(.data);
@@ -2191,12 +2243,37 @@ fn genDefers(
.defer_error => {
const defer_scope = scope.cast(Scope.Defer).?;
scope = defer_scope.parent;
- if (err_code == .none) continue;
- const expr_node = node_datas[defer_scope.defer_node].rhs;
- const prev_in_defer = gz.in_defer;
- gz.in_defer = true;
- defer gz.in_defer = prev_in_defer;
- _ = try unusedResultExpr(gz, defer_scope.parent, expr_node);
+ switch (which_ones) {
+ .both_sans_err => {
+ const expr_node = node_datas[defer_scope.defer_node].rhs;
+ const prev_in_defer = gz.in_defer;
+ gz.in_defer = true;
+ defer gz.in_defer = prev_in_defer;
+ _ = try unusedResultExpr(gz, defer_scope.parent, expr_node);
+ },
+ .both => |err_code| {
+ const expr_node = node_datas[defer_scope.defer_node].rhs;
+ const payload_token = node_datas[defer_scope.defer_node].lhs;
+ const prev_in_defer = gz.in_defer;
+ gz.in_defer = true;
+ defer gz.in_defer = prev_in_defer;
+ var local_val_scope: Scope.LocalVal = undefined;
+ const sub_scope = if (payload_token == 0) defer_scope.parent else blk: {
+ const ident_name = try astgen.identAsString(payload_token);
+ local_val_scope = .{
+ .parent = defer_scope.parent,
+ .gen_zir = gz,
+ .name = ident_name,
+ .inst = err_code,
+ .token_src = payload_token,
+ .id_cat = .@"capture",
+ };
+ break :blk &local_val_scope.base;
+ };
+ _ = try unusedResultExpr(gz, sub_scope, expr_node);
+ },
+ .normal_only => continue,
+ }
},
.namespace => unreachable,
.top => unreachable,
@@ -4564,7 +4641,7 @@ fn tryExpr(
defer then_scope.instructions.deinit(astgen.gpa);
const err_code = try then_scope.addUnNode(err_ops[1], operand, node);
- try genDefers(&then_scope, &fn_block.base, scope, err_code);
+ try genDefers(&then_scope, &fn_block.base, scope, .{ .both = err_code });
const then_result = try then_scope.addUnNode(.ret_node, err_code, node);
var else_scope = parent_gz.makeSubBlock(scope);
@@ -6090,17 +6167,37 @@ fn ret(gz: *GenZir, scope: *Scope, node: ast.Node.Index) InnerError!Zir.Inst.Ref
const astgen = gz.astgen;
const tree = astgen.tree;
const node_datas = tree.nodes.items(.data);
+ const node_tags = tree.nodes.items(.tag);
if (gz.in_defer) return astgen.failNode(node, "cannot return from defer expression", .{});
+ const defer_outer = &astgen.fn_block.?.base;
+
const operand_node = node_datas[node].lhs;
if (operand_node == 0) {
// Returning a void value; skip error defers.
- try genDefers(gz, &astgen.fn_block.?.base, scope, .none);
+ try genDefers(gz, defer_outer, scope, .normal_only);
_ = try gz.addUnNode(.ret_node, .void_value, node);
return Zir.Inst.Ref.unreachable_value;
}
+ if (node_tags[operand_node] == .error_value) {
+ // Hot path for `return error.Foo`. This bypasses result location logic as well as logic
+ // for detecting whether to add something to the function's inferred error set.
+ const ident_token = node_datas[operand_node].rhs;
+ const err_name_str_index = try astgen.identAsString(ident_token);
+ const defer_counts = countDefers(astgen, defer_outer, scope);
+ if (!defer_counts.need_err_code) {
+ try genDefers(gz, defer_outer, scope, .both_sans_err);
+ _ = try gz.addStrTok(.ret_err_value, err_name_str_index, ident_token);
+ return Zir.Inst.Ref.unreachable_value;
+ }
+ const err_code = try gz.addStrTok(.ret_err_value_code, err_name_str_index, ident_token);
+ try genDefers(gz, defer_outer, scope, .{ .both = err_code });
+ _ = try gz.addUnNode(.ret_node, err_code, node);
+ return Zir.Inst.Ref.unreachable_value;
+ }
+
const rl: ResultLoc = if (nodeMayNeedMemoryLocation(tree, operand_node)) .{
.ptr = try gz.addNodeExtended(.ret_ptr, node),
} else .{
@@ -6111,31 +6208,41 @@ fn ret(gz: *GenZir, scope: *Scope, node: ast.Node.Index) InnerError!Zir.Inst.Ref
switch (nodeMayEvalToError(tree, operand_node)) {
.never => {
// Returning a value that cannot be an error; skip error defers.
- try genDefers(gz, &astgen.fn_block.?.base, scope, .none);
+ try genDefers(gz, defer_outer, scope, .normal_only);
_ = try gz.addUnNode(.ret_node, operand, node);
return Zir.Inst.Ref.unreachable_value;
},
.always => {
// Value is always an error. Emit both error defers and regular defers.
const err_code = try gz.addUnNode(.err_union_code, operand, node);
- try genDefers(gz, &astgen.fn_block.?.base, scope, err_code);
+ try genDefers(gz, defer_outer, scope, .{ .both = err_code });
_ = try gz.addUnNode(.ret_node, operand, node);
return Zir.Inst.Ref.unreachable_value;
},
.maybe => {
+ const defer_counts = countDefers(astgen, defer_outer, scope);
+ if (!defer_counts.have_err) {
+ // Only regular defers; no branch needed.
+ try genDefers(gz, defer_outer, scope, .normal_only);
+ _ = try gz.addUnNode(.ret_node, operand, node);
+ return Zir.Inst.Ref.unreachable_value;
+ }
+
// Emit conditional branch for generating errdefers.
const is_err = try gz.addUnNode(.is_err, operand, node);
const condbr = try gz.addCondBr(.condbr, node);
var then_scope = gz.makeSubBlock(scope);
defer then_scope.instructions.deinit(astgen.gpa);
- const err_code = try then_scope.addUnNode(.err_union_code, operand, node);
- try genDefers(&then_scope, &astgen.fn_block.?.base, scope, err_code);
+ const which_ones: DefersToEmit = if (!defer_counts.need_err_code) .both_sans_err else .{
+ .both = try then_scope.addUnNode(.err_union_code, operand, node),
+ };
+ try genDefers(&then_scope, defer_outer, scope, which_ones);
_ = try then_scope.addUnNode(.ret_node, operand, node);
var else_scope = gz.makeSubBlock(scope);
defer else_scope.instructions.deinit(astgen.gpa);
- try genDefers(&else_scope, &astgen.fn_block.?.base, scope, .none);
+ try genDefers(&else_scope, defer_outer, scope, .normal_only);
_ = try else_scope.addUnNode(.ret_node, operand, node);
try setCondBrPayload(condbr, is_err, &then_scope, &else_scope);
@@ -6885,7 +6992,7 @@ fn builtinCall(
.field => {
const field_name = try comptimeExpr(gz, scope, .{ .ty = .const_slice_u8_type }, params[1]);
if (rl == .ref) {
- return try gz.addPlNode(.field_ptr_named, node, Zir.Inst.FieldNamed{
+ return gz.addPlNode(.field_ptr_named, node, Zir.Inst.FieldNamed{
.lhs = try expr(gz, scope, .ref, params[0]),
.field_name = field_name,
});
src/Module.zig
@@ -755,6 +755,7 @@ pub const Fn = struct {
rbrace_column: u16,
state: Analysis,
+ is_cold: bool = false,
pub const Analysis = enum {
queued,
@@ -3453,6 +3454,9 @@ pub fn clearDecl(
for (decl.dependencies.keys()) |dep| {
dep.removeDependant(decl);
if (dep.dependants.count() == 0 and !dep.deletion_flag) {
+ log.debug("insert {*} ({s}) dependant {*} ({s}) into deletion set", .{
+ decl, decl.name, dep, dep.name,
+ });
// We don't recursively perform a deletion here, because during the update,
// another reference to it may turn up.
dep.deletion_flag = true;
src/Sema.zig
@@ -244,6 +244,7 @@ pub fn analyzeBody(
.ptr_type => try sema.zirPtrType(block, inst),
.ptr_type_simple => try sema.zirPtrTypeSimple(block, inst),
.ref => try sema.zirRef(block, inst),
+ .ret_err_value_code => try sema.zirRetErrValueCode(block, inst),
.shl => try sema.zirShl(block, inst),
.shr => try sema.zirShr(block, inst),
.slice_end => try sema.zirSliceEnd(block, inst),
@@ -380,8 +381,9 @@ pub fn analyzeBody(
.condbr => return sema.zirCondbr(block, inst),
.@"break" => return sema.zirBreak(block, inst),
.compile_error => return sema.zirCompileError(block, inst),
- .ret_coerce => return sema.zirRetTok(block, inst, true),
+ .ret_coerce => return sema.zirRetCoerce(block, inst, true),
.ret_node => return sema.zirRetNode(block, inst),
+ .ret_err_value => return sema.zirRetErrValue(block, inst),
.@"unreachable" => return sema.zirUnreachable(block, inst),
.repeat => return sema.zirRepeat(block, inst),
.panic => return sema.zirPanic(block, inst),
@@ -587,6 +589,19 @@ pub fn resolveInst(sema: *Sema, zir_ref: Zir.Inst.Ref) error{OutOfMemory}!*ir.In
return sema.inst_map.get(@intCast(u32, i)).?;
}
+fn resolveConstBool(
+ sema: *Sema,
+ block: *Scope.Block,
+ src: LazySrcLoc,
+ zir_ref: Zir.Inst.Ref,
+) !bool {
+ const air_inst = try sema.resolveInst(zir_ref);
+ const wanted_type = Type.initTag(.bool);
+ const coerced_inst = try sema.coerce(block, wanted_type, air_inst, src);
+ const val = try sema.resolveConstValue(block, src, coerced_inst);
+ return val.toBool();
+}
+
fn resolveConstString(
sema: *Sema,
block: *Scope.Block,
@@ -1754,8 +1769,9 @@ fn zirRepeat(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!
fn zirPanic(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!Zir.Inst.Index {
const inst_data = sema.code.instructions.items(.data)[inst].un_node;
const src: LazySrcLoc = inst_data.src();
- return sema.mod.fail(&block.base, src, "TODO: implement Sema.zirPanic", .{});
- //return always_noreturn;
+ const msg_inst = try sema.resolveInst(inst_data.operand);
+
+ return sema.panicWithMsg(block, src, msg_inst);
}
fn zirLoop(sema: *Sema, parent_block: *Scope.Block, inst: Zir.Inst.Index) InnerError!*Inst {
@@ -2028,8 +2044,10 @@ fn zirSetAlignStack(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) Inne
fn zirSetCold(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!void {
const inst_data = sema.code.instructions.items(.data)[inst].un_node;
- const src: LazySrcLoc = inst_data.src();
- return sema.mod.fail(&block.base, src, "TODO: implement Sema.zirSetCold", .{});
+ const operand_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = inst_data.src_node };
+ const is_cold = try sema.resolveConstBool(block, operand_src, inst_data.operand);
+ const func = sema.func orelse return; // does nothing outside a function
+ func.is_cold = is_cold;
}
fn zirSetFloatMode(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!void {
@@ -2041,11 +2059,7 @@ fn zirSetFloatMode(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) Inner
fn zirSetRuntimeSafety(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!void {
const inst_data = sema.code.instructions.items(.data)[inst].un_node;
const operand_src: LazySrcLoc = .{ .node_offset_builtin_call_arg0 = inst_data.src_node };
-
- const op = try sema.resolveInst(inst_data.operand);
- const op_coerced = try sema.coerce(block, Type.initTag(.bool), op, operand_src);
- const b = (try sema.resolveConstValue(block, operand_src, op_coerced)).toBool();
- block.want_safety = b;
+ block.want_safety = try sema.resolveConstBool(block, operand_src, inst_data.operand);
}
fn zirBreakpoint(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!void {
@@ -2190,21 +2204,27 @@ fn zirCall(
const extra = sema.code.extraData(Zir.Inst.Call, inst_data.payload_index);
const args = sema.code.refSlice(extra.end, extra.data.args_len);
- return sema.analyzeCall(block, extra.data.callee, func_src, call_src, modifier, ensure_result_used, args);
+ const func = try sema.resolveInst(extra.data.callee);
+ // TODO handle function calls of generic functions
+ const resolved_args = try sema.arena.alloc(*Inst, args.len);
+ for (args) |zir_arg, i| {
+ // the args are already casted to the result of a param type instruction.
+ resolved_args[i] = try sema.resolveInst(zir_arg);
+ }
+
+ return sema.analyzeCall(block, func, func_src, call_src, modifier, ensure_result_used, resolved_args);
}
fn analyzeCall(
sema: *Sema,
block: *Scope.Block,
- zir_func: Zir.Inst.Ref,
+ func: *ir.Inst,
func_src: LazySrcLoc,
call_src: LazySrcLoc,
modifier: std.builtin.CallOptions.Modifier,
ensure_result_used: bool,
- zir_args: []const Zir.Inst.Ref,
+ args: []const *ir.Inst,
) InnerError!*ir.Inst {
- const func = try sema.resolveInst(zir_func);
-
if (func.ty.zigTypeTag() != .Fn)
return sema.mod.fail(&block.base, func_src, "type '{}' not a function", .{func.ty});
@@ -2221,22 +2241,22 @@ fn analyzeCall(
const fn_params_len = func.ty.fnParamLen();
if (func.ty.fnIsVarArgs()) {
assert(cc == .C);
- if (zir_args.len < fn_params_len) {
+ if (args.len < fn_params_len) {
// TODO add error note: declared here
return sema.mod.fail(
&block.base,
func_src,
"expected at least {d} argument(s), found {d}",
- .{ fn_params_len, zir_args.len },
+ .{ fn_params_len, args.len },
);
}
- } else if (fn_params_len != zir_args.len) {
+ } else if (fn_params_len != args.len) {
// TODO add error note: declared here
return sema.mod.fail(
&block.base,
func_src,
"expected {d} argument(s), found {d}",
- .{ fn_params_len, zir_args.len },
+ .{ fn_params_len, args.len },
);
}
@@ -2256,13 +2276,6 @@ fn analyzeCall(
}),
}
- // TODO handle function calls of generic functions
- const casted_args = try sema.arena.alloc(*Inst, zir_args.len);
- for (zir_args) |zir_arg, i| {
- // the args are already casted to the result of a param type instruction.
- casted_args[i] = try sema.resolveInst(zir_arg);
- }
-
const ret_type = func.ty.fnReturnType();
const is_comptime_call = block.is_comptime or modifier == .compile_time;
@@ -2323,7 +2336,7 @@ fn analyzeCall(
defer sema.func = parent_func;
const parent_param_inst_list = sema.param_inst_list;
- sema.param_inst_list = casted_args;
+ sema.param_inst_list = args;
defer sema.param_inst_list = parent_param_inst_list;
const parent_next_arg_index = sema.next_arg_index;
@@ -2357,7 +2370,7 @@ fn analyzeCall(
break :res result;
} else res: {
try sema.requireRuntimeBlock(block, call_src);
- break :res try block.addCall(call_src, ret_type, func, casted_args);
+ break :res try block.addCall(call_src, ret_type, func, args);
};
if (ensure_result_used) {
@@ -3081,28 +3094,31 @@ fn funcCommon(
) InnerError!*Inst {
const src: LazySrcLoc = .{ .node_offset = src_node_offset };
const ret_ty_src: LazySrcLoc = .{ .node_offset_fn_type_ret_ty = src_node_offset };
- const return_type = try sema.resolveType(block, ret_ty_src, zir_return_type);
+ const bare_return_type = try sema.resolveType(block, ret_ty_src, zir_return_type);
const mod = sema.mod;
+ const new_func = if (body_inst == 0) undefined else try sema.gpa.create(Module.Fn);
+ errdefer if (body_inst != 0) sema.gpa.destroy(new_func);
+
const fn_ty: Type = fn_ty: {
// Hot path for some common function types.
if (zir_param_types.len == 0 and !var_args and align_val.tag() == .null_value and
!inferred_error_set)
{
- if (return_type.zigTypeTag() == .NoReturn and cc == .Unspecified) {
+ if (bare_return_type.zigTypeTag() == .NoReturn and cc == .Unspecified) {
break :fn_ty Type.initTag(.fn_noreturn_no_args);
}
- if (return_type.zigTypeTag() == .Void and cc == .Unspecified) {
+ if (bare_return_type.zigTypeTag() == .Void and cc == .Unspecified) {
break :fn_ty Type.initTag(.fn_void_no_args);
}
- if (return_type.zigTypeTag() == .NoReturn and cc == .Naked) {
+ if (bare_return_type.zigTypeTag() == .NoReturn and cc == .Naked) {
break :fn_ty Type.initTag(.fn_naked_noreturn_no_args);
}
- if (return_type.zigTypeTag() == .Void and cc == .C) {
+ if (bare_return_type.zigTypeTag() == .Void and cc == .C) {
break :fn_ty Type.initTag(.fn_ccc_void_no_args);
}
}
@@ -3120,9 +3136,13 @@ fn funcCommon(
return mod.fail(&block.base, src, "TODO implement support for function prototypes to have alignment specified", .{});
}
- if (inferred_error_set) {
- return mod.fail(&block.base, src, "TODO implement functions with inferred error sets", .{});
- }
+ const return_type = if (!inferred_error_set) bare_return_type else blk: {
+ const error_set_ty = try Type.Tag.error_set_inferred.create(sema.arena, new_func);
+ break :blk try Type.Tag.error_union.create(sema.arena, .{
+ .error_set = error_set_ty,
+ .payload = bare_return_type,
+ });
+ };
break :fn_ty try Type.Tag.function.create(sema.arena, .{
.param_types = param_types,
@@ -3188,7 +3208,6 @@ fn funcCommon(
const anal_state: Module.Fn.Analysis = if (is_inline) .inline_only else .queued;
const fn_payload = try sema.arena.create(Value.Payload.Function);
- const new_func = try sema.gpa.create(Module.Fn);
new_func.* = .{
.state = anal_state,
.zir_body_inst = body_inst,
@@ -4542,6 +4561,12 @@ fn zirImport(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!
return mod.constType(sema.arena, src, file_root_decl.ty);
}
+fn zirRetErrValueCode(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!*Inst {
+ _ = block;
+ _ = inst;
+ return sema.mod.fail(&block.base, sema.src, "TODO implement zirRetErrValueCode", .{});
+}
+
fn zirShl(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -5388,7 +5413,24 @@ fn zirUnreachable(sema: *Sema, block: *Scope.Block, inst: Zir.Inst.Index) InnerE
}
}
-fn zirRetTok(
+fn zirRetErrValue(
+ sema: *Sema,
+ block: *Scope.Block,
+ inst: Zir.Inst.Index,
+) InnerError!Zir.Inst.Index {
+ const inst_data = sema.code.instructions.items(.data)[inst].str_tok;
+ const err_name = inst_data.get(sema.code);
+ const src = inst_data.src();
+
+ // Add the error tag to the inferred error set of the in-scope function.
+ // Return the error code from the function.
+
+ _ = inst_data;
+ _ = err_name;
+ return sema.mod.fail(&block.base, src, "TODO: Sema.zirRetErrValueCode", .{});
+}
+
+fn zirRetCoerce(
sema: *Sema,
block: *Scope.Block,
inst: Zir.Inst.Index,
@@ -6195,6 +6237,10 @@ fn zirFuncExtended(
src_locs = sema.code.extraData(Zir.Inst.Func.SrcLocs, extra_index).data;
}
+ const is_var_args = small.is_var_args;
+ const is_inferred_error = small.is_inferred_error;
+ const is_extern = small.is_extern;
+
return sema.funcCommon(
block,
extra.data.src_node,
@@ -6203,9 +6249,9 @@ fn zirFuncExtended(
extra.data.return_type,
cc,
align_val,
- small.is_var_args,
- small.is_inferred_error,
- small.is_extern,
+ is_var_args,
+ is_inferred_error,
+ is_extern,
src_locs,
lib_name,
);
@@ -6357,15 +6403,51 @@ fn addSafetyCheck(sema: *Sema, parent_block: *Scope.Block, ok: *Inst, panic_id:
try parent_block.instructions.append(sema.gpa, &block_inst.base);
}
-fn safetyPanic(sema: *Sema, block: *Scope.Block, src: LazySrcLoc, panic_id: PanicId) !Zir.Inst.Index {
- _ = sema;
- _ = panic_id;
- // TODO Once we have a panic function to call, call it here instead of breakpoint.
- _ = try block.addNoOp(src, Type.initTag(.void), .breakpoint);
- _ = try block.addNoOp(src, Type.initTag(.noreturn), .unreach);
+fn panicWithMsg(
+ sema: *Sema,
+ block: *Scope.Block,
+ src: LazySrcLoc,
+ msg_inst: *ir.Inst,
+) !Zir.Inst.Index {
+ const mod = sema.mod;
+ const arena = sema.arena;
+ const panic_fn = try sema.getBuiltin(block, src, "panic");
+ const unresolved_stack_trace_ty = try sema.getBuiltinType(block, src, "StackTrace");
+ const stack_trace_ty = try sema.resolveTypeFields(block, src, unresolved_stack_trace_ty);
+ const ptr_stack_trace_ty = try mod.simplePtrType(arena, stack_trace_ty, true, .One);
+ const null_stack_trace = try mod.constInst(arena, src, .{
+ .ty = try mod.optionalType(arena, ptr_stack_trace_ty),
+ .val = Value.initTag(.null_value),
+ });
+ const args = try arena.create([2]*ir.Inst);
+ args.* = .{ msg_inst, null_stack_trace };
+ _ = try sema.analyzeCall(block, panic_fn, src, src, .auto, false, args);
return always_noreturn;
}
+fn safetyPanic(
+ sema: *Sema,
+ block: *Scope.Block,
+ src: LazySrcLoc,
+ panic_id: PanicId,
+) !Zir.Inst.Index {
+ const mod = sema.mod;
+ const arena = sema.arena;
+ const msg = switch (panic_id) {
+ .unreach => "reached unreachable code",
+ .unwrap_null => "attempt to use null value",
+ .unwrap_errunion => "unreachable error occurred",
+ .cast_to_null => "cast causes pointer to be null",
+ .incorrect_alignment => "incorrect alignment",
+ .invalid_error_code => "invalid error code",
+ };
+ const msg_inst = try mod.constInst(arena, src, .{
+ .ty = Type.initTag(.const_slice_u8),
+ .val = try Value.Tag.ref_val.create(arena, try Value.Tag.bytes.create(arena, msg)),
+ });
+ return sema.panicWithMsg(block, src, msg_inst);
+}
+
fn emitBackwardBranch(sema: *Sema, block: *Scope.Block, src: LazySrcLoc) !void {
sema.branch_count += 1;
if (sema.branch_count > sema.branch_quota) {
@@ -7377,15 +7459,13 @@ fn wrapOptional(sema: *Sema, block: *Scope.Block, dest_type: Type, inst: *Inst)
}
fn wrapErrorUnion(sema: *Sema, block: *Scope.Block, dest_type: Type, inst: *Inst) !*Inst {
- // TODO deal with inferred error sets
const err_union = dest_type.castTag(.error_union).?;
if (inst.value()) |val| {
- const to_wrap = if (inst.ty.zigTypeTag() != .ErrorSet) blk: {
+ if (inst.ty.zigTypeTag() != .ErrorSet) {
_ = try sema.coerce(block, err_union.data.payload, inst, inst.src);
- break :blk val;
} else switch (err_union.data.error_set.tag()) {
- .anyerror => val,
- .error_set_single => blk: {
+ .anyerror => {},
+ .error_set_single => {
const expected_name = val.castTag(.@"error").?.data.name;
const n = err_union.data.error_set.castTag(.error_set_single).?.data;
if (!mem.eql(u8, expected_name, n)) {
@@ -7396,9 +7476,8 @@ fn wrapErrorUnion(sema: *Sema, block: *Scope.Block, dest_type: Type, inst: *Inst
.{ err_union.data.error_set, inst.ty },
);
}
- break :blk val;
},
- .error_set => blk: {
+ .error_set => {
const expected_name = val.castTag(.@"error").?.data.name;
const error_set = err_union.data.error_set.castTag(.error_set).?.data;
const names = error_set.names_ptr[0..error_set.names_len];
@@ -7415,18 +7494,14 @@ fn wrapErrorUnion(sema: *Sema, block: *Scope.Block, dest_type: Type, inst: *Inst
.{ err_union.data.error_set, inst.ty },
);
}
- break :blk val;
},
else => unreachable,
- };
+ }
return sema.mod.constInst(sema.arena, inst.src, .{
.ty = dest_type,
// creating a SubValue for the error_union payload
- .val = try Value.Tag.error_union.create(
- sema.arena,
- to_wrap,
- ),
+ .val = try Value.Tag.error_union.create(sema.arena, val),
});
}
@@ -7573,12 +7648,12 @@ fn resolveBuiltinTypeFields(
return sema.resolveTypeFields(block, src, resolved_ty);
}
-fn getBuiltinType(
+fn getBuiltin(
sema: *Sema,
block: *Scope.Block,
src: LazySrcLoc,
name: []const u8,
-) InnerError!Type {
+) InnerError!*ir.Inst {
const mod = sema.mod;
const std_pkg = mod.root_pkg.table.get("std").?;
const std_file = (mod.importPkg(std_pkg) catch unreachable).file;
@@ -7596,7 +7671,16 @@ fn getBuiltinType(
builtin_ty.getNamespace().?,
name,
);
- const ty_inst = try sema.analyzeLoad(block, src, opt_ty_inst.?, src);
+ return sema.analyzeLoad(block, src, opt_ty_inst.?, src);
+}
+
+fn getBuiltinType(
+ sema: *Sema,
+ block: *Scope.Block,
+ src: LazySrcLoc,
+ name: []const u8,
+) InnerError!Type {
+ const ty_inst = try sema.getBuiltin(block, src, name);
return sema.resolveAirAsType(block, src, ty_inst);
}
@@ -7662,6 +7746,7 @@ fn typeHasOnePossibleValue(
.error_union,
.error_set,
.error_set_single,
+ .error_set_inferred,
.@"opaque",
.var_args_param,
.manyptr_u8,
src/type.zig
@@ -58,7 +58,7 @@ pub const Type = extern union {
.bool => return .Bool,
.void => return .Void,
.type => return .Type,
- .error_set, .error_set_single, .anyerror => return .ErrorSet,
+ .error_set, .error_set_single, .anyerror, .error_set_inferred => return .ErrorSet,
.comptime_int => return .ComptimeInt,
.comptime_float => return .ComptimeFloat,
.noreturn => return .NoReturn,
@@ -689,7 +689,15 @@ pub const Type = extern union {
.optional_single_mut_pointer,
.optional_single_const_pointer,
.anyframe_T,
- => return self.copyPayloadShallow(allocator, Payload.ElemType),
+ => {
+ const payload = self.cast(Payload.ElemType).?;
+ const new_payload = try allocator.create(Payload.ElemType);
+ new_payload.* = .{
+ .base = .{ .tag = payload.base.tag },
+ .data = try payload.data.copy(allocator),
+ };
+ return Type{ .ptr_otherwise = &new_payload.base };
+ },
.int_signed,
.int_unsigned,
@@ -756,6 +764,7 @@ pub const Type = extern union {
});
},
.error_set => return self.copyPayloadShallow(allocator, Payload.ErrorSet),
+ .error_set_inferred => return self.copyPayloadShallow(allocator, Payload.ErrorSetInferred),
.error_set_single => return self.copyPayloadShallow(allocator, Payload.Name),
.empty_struct => return self.copyPayloadShallow(allocator, Payload.ContainerScope),
.@"struct" => return self.copyPayloadShallow(allocator, Payload.Struct),
@@ -1031,6 +1040,10 @@ pub const Type = extern union {
const error_set = ty.castTag(.error_set).?.data;
return writer.writeAll(std.mem.spanZ(error_set.owner_decl.name));
},
+ .error_set_inferred => {
+ const func = ty.castTag(.error_set_inferred).?.data;
+ return writer.print("(inferred error set of {s})", .{func.owner_decl.name});
+ },
.error_set_single => {
const name = ty.castTag(.error_set_single).?.data;
return writer.print("error{{{s}}}", .{name});
@@ -1144,6 +1157,7 @@ pub const Type = extern union {
.anyerror_void_error_union,
.error_set,
.error_set_single,
+ .error_set_inferred,
.manyptr_u8,
.manyptr_const_u8,
.atomic_ordering,
@@ -1161,6 +1175,9 @@ pub const Type = extern union {
.@"struct" => {
// TODO introduce lazy value mechanism
const struct_obj = self.castTag(.@"struct").?.data;
+ assert(struct_obj.status == .have_field_types or
+ struct_obj.status == .layout_wip or
+ struct_obj.status == .have_layout);
for (struct_obj.fields.values()) |value| {
if (value.ty.hasCodeGenBits())
return true;
@@ -1348,6 +1365,7 @@ pub const Type = extern union {
.error_set_single,
.anyerror_void_error_union,
.anyerror,
+ .error_set_inferred,
=> return 2, // TODO revisit this when we have the concept of the error tag type
.array, .array_sentinel => return self.elemType().abiAlignment(target),
@@ -1580,6 +1598,7 @@ pub const Type = extern union {
.error_set_single,
.anyerror_void_error_union,
.anyerror,
+ .error_set_inferred,
=> return 2, // TODO revisit this when we have the concept of the error tag type
.int_signed, .int_unsigned => {
@@ -1744,6 +1763,7 @@ pub const Type = extern union {
.error_set_single,
.anyerror_void_error_union,
.anyerror,
+ .error_set_inferred,
=> return 16, // TODO revisit this when we have the concept of the error tag type
.int_signed, .int_unsigned => self.cast(Payload.Bits).?.data,
@@ -1863,6 +1883,48 @@ pub const Type = extern union {
};
}
+ pub fn slicePtrFieldType(self: Type, buffer: *Payload.ElemType) Type {
+ switch (self.tag()) {
+ .const_slice_u8 => return Type.initTag(.manyptr_const_u8),
+
+ .const_slice => {
+ const elem_type = self.castTag(.const_slice).?.data;
+ buffer.* = .{
+ .base = .{ .tag = .many_const_pointer },
+ .data = elem_type,
+ };
+ return Type.initPayload(&buffer.base);
+ },
+ .mut_slice => {
+ const elem_type = self.castTag(.mut_slice).?.data;
+ buffer.* = .{
+ .base = .{ .tag = .many_mut_pointer },
+ .data = elem_type,
+ };
+ return Type.initPayload(&buffer.base);
+ },
+
+ .pointer => {
+ const payload = self.castTag(.pointer).?.data;
+ assert(payload.size == .Slice);
+ if (payload.mutable) {
+ buffer.* = .{
+ .base = .{ .tag = .many_mut_pointer },
+ .data = payload.pointee_type,
+ };
+ } else {
+ buffer.* = .{
+ .base = .{ .tag = .many_const_pointer },
+ .data = payload.pointee_type,
+ };
+ }
+ return Type.initPayload(&buffer.base);
+ },
+
+ else => unreachable,
+ }
+ }
+
pub fn isConstPtr(self: Type) bool {
return switch (self.tag()) {
.single_const_pointer,
@@ -1915,7 +1977,10 @@ pub const Type = extern union {
/// Asserts that the type is an optional
pub fn isPtrLikeOptional(self: Type) bool {
switch (self.tag()) {
- .optional_single_const_pointer, .optional_single_mut_pointer => return true,
+ .optional_single_const_pointer,
+ .optional_single_mut_pointer,
+ => return true,
+
.optional => {
var buf: Payload.ElemType = undefined;
const child_type = self.optionalChild(&buf);
@@ -2400,6 +2465,7 @@ pub const Type = extern union {
.error_union,
.error_set,
.error_set_single,
+ .error_set_inferred,
.@"opaque",
.var_args_param,
.manyptr_u8,
@@ -2892,6 +2958,8 @@ pub const Type = extern union {
anyframe_T,
error_set,
error_set_single,
+ /// The type is the inferred error set of a specific function.
+ error_set_inferred,
empty_struct,
@"opaque",
@"struct",
@@ -2989,6 +3057,7 @@ pub const Type = extern union {
=> Payload.Bits,
.error_set => Payload.ErrorSet,
+ .error_set_inferred => Payload.ErrorSetInferred,
.array, .vector => Payload.Array,
.array_sentinel => Payload.ArraySentinel,
@@ -3081,6 +3150,13 @@ pub const Type = extern union {
data: *Module.ErrorSet,
};
+ pub const ErrorSetInferred = struct {
+ pub const base_tag = Tag.error_set_inferred;
+
+ base: Payload = Payload{ .tag = base_tag },
+ data: *Module.Fn,
+ };
+
pub const Pointer = struct {
pub const base_tag = Tag.pointer;
src/value.zig
@@ -483,13 +483,13 @@ pub const Value = extern union {
/// TODO this should become a debug dump() function. In order to print values in a meaningful way
/// we also need access to the type.
pub fn format(
- self: Value,
+ start_val: Value,
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
out_stream: anytype,
) !void {
comptime assert(fmt.len == 0);
- var val = self;
+ var val = start_val;
while (true) switch (val.tag()) {
.u8_type => return out_stream.writeAll("u8"),
.i8_type => return out_stream.writeAll("i8"),
@@ -598,9 +598,9 @@ pub const Value = extern union {
val = field_ptr.container_ptr;
},
.empty_array => return out_stream.writeAll(".{}"),
- .enum_literal => return out_stream.print(".{}", .{std.zig.fmtId(self.castTag(.enum_literal).?.data)}),
- .enum_field_index => return out_stream.print("(enum field {d})", .{self.castTag(.enum_field_index).?.data}),
- .bytes => return out_stream.print("\"{}\"", .{std.zig.fmtEscapes(self.castTag(.bytes).?.data)}),
+ .enum_literal => return out_stream.print(".{}", .{std.zig.fmtId(val.castTag(.enum_literal).?.data)}),
+ .enum_field_index => return out_stream.print("(enum field {d})", .{val.castTag(.enum_field_index).?.data}),
+ .bytes => return out_stream.print("\"{}\"", .{std.zig.fmtEscapes(val.castTag(.bytes).?.data)}),
.repeated => {
try out_stream.writeAll("(repeated) ");
val = val.castTag(.repeated).?.data;
@@ -1336,6 +1336,23 @@ pub const Value = extern union {
};
}
+ pub fn sliceLen(val: Value) u64 {
+ return switch (val.tag()) {
+ .empty_array => 0,
+ .bytes => val.castTag(.bytes).?.data.len,
+ .ref_val => sliceLen(val.castTag(.ref_val).?.data),
+ .decl_ref => {
+ const decl = val.castTag(.decl_ref).?.data;
+ if (decl.ty.zigTypeTag() == .Array) {
+ return decl.ty.arrayLen();
+ } else {
+ return 1;
+ }
+ },
+ else => unreachable,
+ };
+ }
+
/// Asserts the value is a single-item pointer to an array, or an array,
/// or an unknown-length pointer, and returns the element value at the index.
pub fn elemValue(self: Value, allocator: *Allocator, index: usize) error{OutOfMemory}!Value {
src/Zir.zig
@@ -1,7 +1,7 @@
//! Zig Intermediate Representation. Astgen.zig converts AST nodes to these
-//! untyped IR instructions. Next, Sema.zig processes these into TZIR.
+//! untyped IR instructions. Next, Sema.zig processes these into AIR.
//! The minimum amount of information needed to represent a list of ZIR instructions.
-//! Once this structure is completed, it can be used to generate TZIR, followed by
+//! Once this structure is completed, it can be used to generate AIR, followed by
//! machine code, without any memory access into the AST tree token list, node list,
//! or source bytes. Exceptions include:
//! * Compile errors, which may need to reach into these data structures to
@@ -416,8 +416,8 @@ pub const Inst = struct {
/// A labeled block of code that loops forever. At the end of the body will have either
/// a `repeat` instruction or a `repeat_inline` instruction.
/// Uses the `pl_node` field. The AST node is either a for loop or while loop.
- /// This ZIR instruction is needed because TZIR does not (yet?) match ZIR, and Sema
- /// needs to emit more than 1 TZIR block for this instruction.
+ /// This ZIR instruction is needed because AIR does not (yet?) match ZIR, and Sema
+ /// needs to emit more than 1 AIR block for this instruction.
/// The payload is `Block`.
loop,
/// Sends runtime control flow back to the beginning of the current block.
@@ -466,6 +466,19 @@ pub const Inst = struct {
/// Uses the `un_tok` union field.
/// The operand needs to get coerced to the function's return type.
ret_coerce,
+ /// Sends control flow back to the function's callee.
+ /// The return operand is `error.foo` where `foo` is given by the string.
+ /// If the current function has an inferred error set, the error given by the
+ /// name is added to it.
+ /// Uses the `str_tok` union field.
+ ret_err_value,
+ /// A string name is provided which is an anonymous error set value.
+ /// If the current function has an inferred error set, the error given by the
+ /// name is added to it.
+ /// Results in the error code. Note that control flow is not diverted with
+ /// this instruction; a following 'ret' instruction will do the diversion.
+ /// Uses the `str_tok` union field.
+ ret_err_value_code,
/// Create a pointer type that does not have a sentinel, alignment, or bit range specified.
/// Uses the `ptr_type_simple` union field.
ptr_type_simple,
@@ -1193,6 +1206,7 @@ pub const Inst = struct {
.@"resume",
.@"await",
.await_nosuspend,
+ .ret_err_value_code,
.extended,
=> false,
@@ -1203,6 +1217,7 @@ pub const Inst = struct {
.compile_error,
.ret_node,
.ret_coerce,
+ .ret_err_value,
.@"unreachable",
.repeat,
.repeat_inline,
@@ -1307,6 +1322,8 @@ pub const Inst = struct {
.ref = .un_tok,
.ret_node = .un_node,
.ret_coerce = .un_tok,
+ .ret_err_value = .str_tok,
+ .ret_err_value_code = .str_tok,
.ptr_type_simple = .ptr_type_simple,
.ptr_type = .ptr_type,
.slice_start = .pl_node,
@@ -3077,6 +3094,8 @@ const Writer = struct {
.decl_val,
.import,
.arg,
+ .ret_err_value,
+ .ret_err_value_code,
=> try self.writeStrTok(stream, inst),
.func => try self.writeFunc(stream, inst, false),
test/stage2/cbe.zig
@@ -804,19 +804,6 @@ pub fn addCases(ctx: *TestContext) !void {
});
}
- ctx.c("empty start function", linux_x64,
- \\export fn _start() noreturn {
- \\ unreachable;
- \\}
- ,
- \\ZIG_EXTERN_C zig_noreturn void _start(void);
- \\
- \\zig_noreturn void _start(void) {
- \\ zig_breakpoint();
- \\ zig_unreachable();
- \\}
- \\
- );
ctx.h("simple header", linux_x64,
\\export fn start() void{}
,