Commit 572cb24d1a
Changed files (15)
lib/std/zig/ErrorBundle.zig
@@ -0,0 +1,419 @@
+//! To support incremental compilation, errors are stored in various places
+//! so that they can be created and destroyed appropriately. This structure
+//! is used to collect all the errors from the various places into one
+//! convenient place for API users to consume.
+
+string_bytes: std.ArrayListUnmanaged(u8),
+/// The first thing in this array is a ErrorMessageListIndex.
+extra: std.ArrayListUnmanaged(u32),
+
+// An index into `extra` pointing at an `ErrorMessage`.
+pub const MessageIndex = enum(u32) {
+ _,
+};
+
+/// After the header is:
+/// * string_bytes
+/// * extra (little endian)
+pub const Header = struct {
+ string_bytes_len: u32,
+ extra_len: u32,
+};
+
+/// Trailing: ErrorMessage for each len
+pub const ErrorMessageList = struct {
+ len: u32,
+ start: u32,
+};
+
+/// Trailing:
+/// * ReferenceTrace for each reference_trace_len
+pub const SourceLocation = struct {
+ /// null terminated string index
+ src_path: u32,
+ line: u32,
+ column: u32,
+ /// byte offset of starting token
+ span_start: u32,
+ /// byte offset of main error location
+ span_main: u32,
+ /// byte offset of end of last token
+ span_end: u32,
+ /// null terminated string index, possibly null.
+ /// Does not include the trailing newline.
+ source_line: u32 = 0,
+ reference_trace_len: u32 = 0,
+};
+
+/// Trailing:
+/// * ErrorMessage for each notes_len.
+pub const ErrorMessage = struct {
+ /// null terminated string index
+ msg: u32,
+ /// Usually one, but incremented for redundant messages.
+ count: u32 = 1,
+ /// 0 or the index into extra of a SourceLocation
+ src_loc: u32 = 0,
+ notes_len: u32 = 0,
+};
+
+pub const ReferenceTrace = struct {
+ /// null terminated string index
+ /// Except for the sentinel ReferenceTrace element, in which case:
+ /// * 0 means remaining references hidden
+ /// * >0 means N references hidden
+ decl_name: u32,
+ /// Index into extra of a SourceLocation
+ /// If this is 0, this is the sentinel ReferenceTrace element.
+ src_loc: u32,
+};
+
+pub fn init(eb: *ErrorBundle, gpa: Allocator) !void {
+ eb.* = .{
+ .string_bytes = .{},
+ .extra = .{},
+ };
+
+ // So that 0 can be used to indicate a null string.
+ try eb.string_bytes.append(gpa, 0);
+
+ _ = try addExtra(eb, gpa, ErrorMessageList{
+ .len = 0,
+ .start = 0,
+ });
+}
+
+pub fn deinit(eb: *ErrorBundle, gpa: Allocator) void {
+ eb.string_bytes.deinit(gpa);
+ eb.extra.deinit(gpa);
+ eb.* = undefined;
+}
+
+pub fn addString(eb: *ErrorBundle, gpa: Allocator, s: []const u8) !u32 {
+ const index = @intCast(u32, eb.string_bytes.items.len);
+ try eb.string_bytes.ensureUnusedCapacity(gpa, s.len + 1);
+ eb.string_bytes.appendSliceAssumeCapacity(s);
+ eb.string_bytes.appendAssumeCapacity(0);
+ return index;
+}
+
+pub fn printString(eb: *ErrorBundle, gpa: Allocator, comptime fmt: []const u8, args: anytype) !u32 {
+ const index = @intCast(u32, eb.string_bytes.items.len);
+ try eb.string_bytes.writer(gpa).print(fmt, args);
+ try eb.string_bytes.append(gpa, 0);
+ return index;
+}
+
+pub fn addErrorMessage(eb: *ErrorBundle, gpa: Allocator, em: ErrorMessage) !void {
+ if (eb.errorMessageCount() == 0) {
+ eb.setStartIndex(@intCast(u32, eb.extra.items.len));
+ }
+ _ = try addExtra(eb, gpa, em);
+}
+
+pub fn addSourceLocation(eb: *ErrorBundle, gpa: Allocator, sl: SourceLocation) !u32 {
+ return addExtra(eb, gpa, sl);
+}
+
+pub fn addReferenceTrace(eb: *ErrorBundle, gpa: Allocator, rt: ReferenceTrace) !void {
+ _ = try addExtra(eb, gpa, rt);
+}
+
+pub fn addBundle(eb: *ErrorBundle, gpa: Allocator, other: ErrorBundle) !void {
+ // Skip over the initial ErrorMessageList len field.
+ const root_fields_len = @typeInfo(ErrorMessageList).Struct.fields.len;
+ const other_list = other.extraData(ErrorMessageList, 0).data;
+ const other_extra = other.extra.items[root_fields_len..];
+
+ try eb.string_bytes.ensureUnusedCapacity(gpa, other.string_bytes.items.len);
+ try eb.extra.ensureUnusedCapacity(gpa, other_extra.len);
+
+ const new_string_base = @intCast(u32, eb.string_bytes.items.len);
+ const new_data_base = @intCast(u32, eb.extra.items.len - root_fields_len);
+
+ eb.string_bytes.appendSliceAssumeCapacity(other.string_bytes.items);
+ eb.extra.appendSliceAssumeCapacity(other_extra);
+
+ // Now we must offset the string indexes and extra indexes of the newly
+ // added extra.
+ var index = new_data_base + other_list.start;
+ for (0..other_list.len) |_| {
+ index = try patchMessage(eb, index, new_string_base, new_data_base);
+ }
+}
+
+fn patchMessage(eb: *ErrorBundle, msg_idx: usize, new_string_base: u32, new_data_base: u32) !u32 {
+ var msg = eb.extraData(ErrorMessage, msg_idx);
+ if (msg.data.msg != 0) msg.data.msg += new_string_base;
+ if (msg.data.src_loc != 0) msg.data.src_loc += new_data_base;
+ eb.setExtra(msg_idx, msg.data);
+
+ try patchSrcLoc(eb, msg.data.src_loc, new_string_base, new_data_base);
+
+ var index = @intCast(u32, msg.end);
+ for (0..msg.data.notes_len) |_| {
+ index = try patchMessage(eb, index, new_string_base, new_data_base);
+ }
+ return index;
+}
+
+fn patchSrcLoc(eb: *ErrorBundle, idx: usize, new_string_base: u32, new_data_base: u32) !void {
+ if (idx == 0) return;
+
+ var src_loc = eb.extraData(SourceLocation, idx);
+ if (src_loc.data.src_path != 0) src_loc.data.src_path += new_string_base;
+ if (src_loc.data.source_line != 0) src_loc.data.source_line += new_string_base;
+ eb.setExtra(idx, src_loc.data);
+
+ var index = src_loc.end;
+ for (0..src_loc.data.reference_trace_len) |_| {
+ var ref_trace = eb.extraData(ReferenceTrace, index);
+ if (ref_trace.data.decl_name != 0) ref_trace.data.decl_name += new_string_base;
+ if (ref_trace.data.src_loc != 0) ref_trace.data.src_loc += new_data_base;
+ eb.setExtra(index, ref_trace.data);
+ try patchSrcLoc(eb, ref_trace.data.src_loc, new_string_base, new_data_base);
+ index = ref_trace.end;
+ }
+}
+
+fn addExtra(eb: *ErrorBundle, gpa: Allocator, extra: anytype) Allocator.Error!u32 {
+ const fields = @typeInfo(@TypeOf(extra)).Struct.fields;
+ try eb.extra.ensureUnusedCapacity(gpa, fields.len);
+ return addExtraAssumeCapacity(eb, extra);
+}
+
+fn addExtraAssumeCapacity(eb: *ErrorBundle, extra: anytype) u32 {
+ const fields = @typeInfo(@TypeOf(extra)).Struct.fields;
+ const result = @intCast(u32, eb.extra.items.len);
+ eb.extra.items.len += fields.len;
+ setExtra(eb, result, extra);
+ return result;
+}
+
+fn setExtra(eb: *ErrorBundle, index: usize, extra: anytype) void {
+ const fields = @typeInfo(@TypeOf(extra)).Struct.fields;
+ var i = index;
+ inline for (fields) |field| {
+ eb.extra.items[i] = switch (field.type) {
+ u32 => @field(extra, field.name),
+ else => @compileError("bad field type"),
+ };
+ i += 1;
+ }
+}
+
+pub fn errorMessageCount(eb: ErrorBundle) u32 {
+ return eb.extra.items[0];
+}
+
+pub fn setErrorMessageCount(eb: *ErrorBundle, count: u32) void {
+ eb.extra.items[0] = count;
+}
+
+pub fn incrementCount(eb: *ErrorBundle, delta: u32) void {
+ eb.extra.items[0] += delta;
+}
+
+pub fn getStartIndex(eb: ErrorBundle) u32 {
+ return eb.extra.items[1];
+}
+
+pub fn setStartIndex(eb: *ErrorBundle, index: u32) void {
+ eb.extra.items[1] = index;
+}
+
+pub fn getErrorMessage(eb: ErrorBundle, index: MessageIndex) ErrorMessage {
+ return eb.extraData(ErrorMessage, @enumToInt(index)).data;
+}
+
+pub fn getSourceLocation(eb: ErrorBundle, index: u32) SourceLocation {
+ assert(index != 0);
+ return eb.extraData(SourceLocation, index).data;
+}
+
+/// Returns the requested data, as well as the new index which is at the start of the
+/// trailers for the object.
+fn extraData(eb: ErrorBundle, comptime T: type, index: usize) struct { data: T, end: usize } {
+ const fields = @typeInfo(T).Struct.fields;
+ var i: usize = index;
+ var result: T = undefined;
+ inline for (fields) |field| {
+ @field(result, field.name) = switch (field.type) {
+ u32 => eb.extra.items[i],
+ else => @compileError("bad field type"),
+ };
+ i += 1;
+ }
+ return .{
+ .data = result,
+ .end = i,
+ };
+}
+
+/// Given an index into `string_bytes` returns the null-terminated string found there.
+pub fn nullTerminatedString(eb: ErrorBundle, index: usize) [:0]const u8 {
+ const string_bytes = eb.string_bytes.items;
+ var end: usize = index;
+ while (string_bytes[end] != 0) {
+ end += 1;
+ }
+ return string_bytes[index..end :0];
+}
+
+pub fn renderToStdErr(eb: ErrorBundle, ttyconf: std.debug.TTY.Config) void {
+ std.debug.getStderrMutex().lock();
+ defer std.debug.getStderrMutex().unlock();
+ const stderr = std.io.getStdErr();
+ return renderToWriter(eb, ttyconf, stderr.writer()) catch return;
+}
+
+pub fn renderToWriter(
+ eb: ErrorBundle,
+ ttyconf: std.debug.TTY.Config,
+ writer: anytype,
+) anyerror!void {
+ const list = eb.extraData(ErrorMessageList, 0).data;
+ var index: usize = list.start;
+ for (0..list.len) |_| {
+ const err_msg = eb.extraData(ErrorMessage, index);
+ index = try renderErrorMessageToWriter(eb, err_msg.data, err_msg.end, ttyconf, writer, "error", .Red, 0);
+ }
+}
+
+fn renderErrorMessageToWriter(
+ eb: ErrorBundle,
+ err_msg: ErrorMessage,
+ end_index: usize,
+ ttyconf: std.debug.TTY.Config,
+ stderr: anytype,
+ kind: []const u8,
+ color: std.debug.TTY.Color,
+ indent: usize,
+) anyerror!usize {
+ var counting_writer = std.io.countingWriter(stderr);
+ const counting_stderr = counting_writer.writer();
+ if (err_msg.src_loc != 0) {
+ const src = eb.extraData(SourceLocation, err_msg.src_loc);
+ try counting_stderr.writeByteNTimes(' ', indent);
+ try ttyconf.setColor(stderr, .Bold);
+ try counting_stderr.print("{s}:{d}:{d}: ", .{
+ eb.nullTerminatedString(src.data.src_path),
+ src.data.line + 1,
+ src.data.column + 1,
+ });
+ try ttyconf.setColor(stderr, color);
+ try counting_stderr.writeAll(kind);
+ try counting_stderr.writeAll(": ");
+ // This is the length of the part before the error message:
+ // e.g. "file.zig:4:5: error: "
+ const prefix_len = @intCast(usize, counting_stderr.context.bytes_written);
+ try ttyconf.setColor(stderr, .Reset);
+ try ttyconf.setColor(stderr, .Bold);
+ if (err_msg.count == 1) {
+ try writeMsg(eb, err_msg, stderr, prefix_len);
+ try stderr.writeByte('\n');
+ } else {
+ try writeMsg(eb, err_msg, stderr, prefix_len);
+ try ttyconf.setColor(stderr, .Dim);
+ try stderr.print(" ({d} times)\n", .{err_msg.count});
+ }
+ try ttyconf.setColor(stderr, .Reset);
+ if (src.data.source_line != 0) {
+ const line = eb.nullTerminatedString(src.data.source_line);
+ for (line) |b| switch (b) {
+ '\t' => try stderr.writeByte(' '),
+ else => try stderr.writeByte(b),
+ };
+ try stderr.writeByte('\n');
+ // TODO basic unicode code point monospace width
+ const before_caret = src.data.span_main - src.data.span_start;
+ // -1 since span.main includes the caret
+ const after_caret = src.data.span_end - src.data.span_main -| 1;
+ try stderr.writeByteNTimes(' ', src.data.column - before_caret);
+ try ttyconf.setColor(stderr, .Green);
+ try stderr.writeByteNTimes('~', before_caret);
+ try stderr.writeByte('^');
+ try stderr.writeByteNTimes('~', after_caret);
+ try stderr.writeByte('\n');
+ try ttyconf.setColor(stderr, .Reset);
+ }
+ var index = end_index;
+ for (0..err_msg.notes_len) |_| {
+ const note = eb.extraData(ErrorMessage, index);
+ index = try renderErrorMessageToWriter(eb, note.data, note.end, ttyconf, stderr, "note", .Cyan, indent);
+ }
+ if (src.data.reference_trace_len > 0) {
+ try ttyconf.setColor(stderr, .Reset);
+ try ttyconf.setColor(stderr, .Dim);
+ try stderr.print("referenced by:\n", .{});
+ var ref_index = src.end;
+ for (0..src.data.reference_trace_len) |_| {
+ const ref_trace = eb.extraData(ReferenceTrace, ref_index);
+ ref_index = ref_trace.end;
+ if (ref_trace.data.src_loc != 0) {
+ const ref_src = eb.getSourceLocation(ref_trace.data.src_loc);
+ try stderr.print(" {s}: {s}:{d}:{d}\n", .{
+ eb.nullTerminatedString(ref_trace.data.decl_name),
+ eb.nullTerminatedString(ref_src.src_path),
+ ref_src.line + 1,
+ ref_src.column + 1,
+ });
+ } else if (ref_trace.data.decl_name != 0) {
+ const count = ref_trace.data.decl_name;
+ try stderr.print(
+ " {d} reference(s) hidden; use '-freference-trace={d}' to see all references\n",
+ .{ count, count + src.data.reference_trace_len - 1 },
+ );
+ } else {
+ try stderr.print(
+ " remaining reference traces hidden; use '-freference-trace' to see all reference traces\n",
+ .{},
+ );
+ }
+ }
+ try stderr.writeByte('\n');
+ try ttyconf.setColor(stderr, .Reset);
+ }
+ return index;
+ } else {
+ try ttyconf.setColor(stderr, color);
+ try stderr.writeByteNTimes(' ', indent);
+ try stderr.writeAll(kind);
+ try stderr.writeAll(": ");
+ try ttyconf.setColor(stderr, .Reset);
+ const msg = eb.nullTerminatedString(err_msg.msg);
+ if (err_msg.count == 1) {
+ try stderr.print("{s}\n", .{msg});
+ } else {
+ try stderr.print("{s}", .{msg});
+ try ttyconf.setColor(stderr, .Dim);
+ try stderr.print(" ({d} times)\n", .{err_msg.count});
+ }
+ try ttyconf.setColor(stderr, .Reset);
+ var index = end_index;
+ for (0..err_msg.notes_len) |_| {
+ const note = eb.extraData(ErrorMessage, index);
+ index = try renderErrorMessageToWriter(eb, note.data, note.end, ttyconf, stderr, "note", .Cyan, indent + 4);
+ }
+ return index;
+ }
+}
+
+/// Splits the error message up into lines to properly indent them
+/// to allow for long, good-looking error messages.
+///
+/// This is used to split the message in `@compileError("hello\nworld")` for example.
+fn writeMsg(eb: ErrorBundle, err_msg: ErrorMessage, stderr: anytype, indent: usize) !void {
+ var lines = std.mem.split(u8, eb.nullTerminatedString(err_msg.msg), "\n");
+ while (lines.next()) |line| {
+ try stderr.writeAll(line);
+ if (lines.index == null) break;
+ try stderr.writeByte('\n');
+ try stderr.writeByteNTimes(' ', indent);
+ }
+}
+
+const std = @import("std");
+const ErrorBundle = @This();
+const Allocator = std.mem.Allocator;
+const assert = std.debug.assert;
lib/std/zig.zig
@@ -3,6 +3,7 @@ const tokenizer = @import("zig/tokenizer.zig");
const fmt = @import("zig/fmt.zig");
const assert = std.debug.assert;
+pub const ErrorBundle = @import("zig/ErrorBundle.zig");
pub const Token = tokenizer.Token;
pub const Tokenizer = tokenizer.Tokenizer;
pub const fmtId = fmt.fmtId;
src/AstGen.zig
@@ -133,6 +133,8 @@ pub fn generate(gpa: Allocator, tree: Ast) Allocator.Error!Zir {
try astgen.extra.ensureTotalCapacity(gpa, tree.nodes.len + reserved_count);
astgen.extra.items.len += reserved_count;
+ try lowerAstErrors(&astgen);
+
var top_scope: Scope.Top = .{};
var gz_instructions: std.ArrayListUnmanaged(Zir.Inst.Index) = .{};
@@ -10401,27 +10403,11 @@ fn appendErrorTokNotes(
args: anytype,
notes: []const u32,
) !void {
- @setCold(true);
- const string_bytes = &astgen.string_bytes;
- const msg = @intCast(u32, string_bytes.items.len);
- try string_bytes.writer(astgen.gpa).print(format ++ "\x00", args);
- const notes_index: u32 = if (notes.len != 0) blk: {
- const notes_start = astgen.extra.items.len;
- try astgen.extra.ensureTotalCapacity(astgen.gpa, notes_start + 1 + notes.len);
- astgen.extra.appendAssumeCapacity(@intCast(u32, notes.len));
- astgen.extra.appendSliceAssumeCapacity(notes);
- break :blk @intCast(u32, notes_start);
- } else 0;
- try astgen.compile_errors.append(astgen.gpa, .{
- .msg = msg,
- .node = 0,
- .token = token,
- .byte_offset = 0,
- .notes = notes_index,
- });
+ return appendErrorTokNotesOff(astgen, token, 0, format, args, notes);
}
-/// Same as `fail`, except given an absolute byte offset.
+/// Same as `fail`, except given a token plus an offset from its starting byte
+/// offset.
fn failOff(
astgen: *AstGen,
token: Ast.TokenIndex,
@@ -10429,27 +10415,36 @@ fn failOff(
comptime format: []const u8,
args: anytype,
) InnerError {
- try appendErrorOff(astgen, token, byte_offset, format, args);
+ try appendErrorTokNotesOff(astgen, token, byte_offset, format, args, &.{});
return error.AnalysisFail;
}
-fn appendErrorOff(
+fn appendErrorTokNotesOff(
astgen: *AstGen,
token: Ast.TokenIndex,
byte_offset: u32,
comptime format: []const u8,
args: anytype,
-) Allocator.Error!void {
+ notes: []const u32,
+) !void {
@setCold(true);
+ const gpa = astgen.gpa;
const string_bytes = &astgen.string_bytes;
const msg = @intCast(u32, string_bytes.items.len);
- try string_bytes.writer(astgen.gpa).print(format ++ "\x00", args);
- try astgen.compile_errors.append(astgen.gpa, .{
+ try string_bytes.writer(gpa).print(format ++ "\x00", args);
+ const notes_index: u32 = if (notes.len != 0) blk: {
+ const notes_start = astgen.extra.items.len;
+ try astgen.extra.ensureTotalCapacity(gpa, notes_start + 1 + notes.len);
+ astgen.extra.appendAssumeCapacity(@intCast(u32, notes.len));
+ astgen.extra.appendSliceAssumeCapacity(notes);
+ break :blk @intCast(u32, notes_start);
+ } else 0;
+ try astgen.compile_errors.append(gpa, .{
.msg = msg,
.node = 0,
.token = token,
.byte_offset = byte_offset,
- .notes = 0,
+ .notes = notes_index,
});
}
@@ -10458,6 +10453,16 @@ fn errNoteTok(
token: Ast.TokenIndex,
comptime format: []const u8,
args: anytype,
+) Allocator.Error!u32 {
+ return errNoteTokOff(astgen, token, 0, format, args);
+}
+
+fn errNoteTokOff(
+ astgen: *AstGen,
+ token: Ast.TokenIndex,
+ byte_offset: u32,
+ comptime format: []const u8,
+ args: anytype,
) Allocator.Error!u32 {
@setCold(true);
const string_bytes = &astgen.string_bytes;
@@ -10467,7 +10472,7 @@ fn errNoteTok(
.msg = msg,
.node = 0,
.token = token,
- .byte_offset = 0,
+ .byte_offset = byte_offset,
.notes = 0,
});
}
@@ -12634,3 +12639,42 @@ fn emitDbgStmt(gz: *GenZir, line: u32, column: u32) !void {
},
} });
}
+
+fn lowerAstErrors(astgen: *AstGen) !void {
+ const tree = astgen.tree;
+ if (tree.errors.len == 0) return;
+
+ const gpa = astgen.gpa;
+ const parse_err = tree.errors[0];
+
+ var msg: std.ArrayListUnmanaged(u8) = .{};
+ defer msg.deinit(gpa);
+
+ const token_starts = tree.tokens.items(.start);
+ const token_tags = tree.tokens.items(.tag);
+
+ var notes: std.ArrayListUnmanaged(u32) = .{};
+ defer notes.deinit(gpa);
+
+ if (token_tags[parse_err.token + @boolToInt(parse_err.token_is_prev)] == .invalid) {
+ const tok = parse_err.token + @boolToInt(parse_err.token_is_prev);
+ const bad_off = @intCast(u32, tree.tokenSlice(parse_err.token + @boolToInt(parse_err.token_is_prev)).len);
+ const byte_abs = token_starts[parse_err.token + @boolToInt(parse_err.token_is_prev)] + bad_off;
+ try notes.append(gpa, try astgen.errNoteTokOff(tok, bad_off, "invalid byte: '{'}'", .{
+ std.zig.fmtEscapes(tree.source[byte_abs..][0..1]),
+ }));
+ }
+
+ for (tree.errors[1..]) |note| {
+ if (!note.is_note) break;
+
+ msg.clearRetainingCapacity();
+ try tree.renderError(note, msg.writer(gpa));
+ try notes.append(gpa, try astgen.errNoteTok(note.token, "{s}", .{msg.items}));
+ }
+
+ const extra_offset = tree.errorOffset(parse_err);
+ msg.clearRetainingCapacity();
+ try tree.renderError(parse_err, msg.writer(gpa));
+ try astgen.appendErrorTokNotesOff(parse_err.token, extra_offset, "{s}", .{msg.items}, notes.items);
+}
src/Compilation.zig
@@ -9,6 +9,7 @@ const log = std.log.scoped(.compilation);
const Target = std.Target;
const ThreadPool = std.Thread.Pool;
const WaitGroup = std.Thread.WaitGroup;
+const ErrorBundle = std.zig.ErrorBundle;
const Value = @import("value.zig").Value;
const Type = @import("type.zig").Type;
@@ -334,12 +335,41 @@ pub const MiscTask = enum {
libssp,
zig_libc,
analyze_pkg,
+
+ @"musl crti.o",
+ @"musl crtn.o",
+ @"musl crt1.o",
+ @"musl rcrt1.o",
+ @"musl Scrt1.o",
+ @"musl libc.a",
+ @"musl libc.so",
+
+ @"wasi crt1-reactor.o",
+ @"wasi crt1-command.o",
+ @"wasi libc.a",
+ @"libwasi-emulated-process-clocks.a",
+ @"libwasi-emulated-getpid.a",
+ @"libwasi-emulated-mman.a",
+ @"libwasi-emulated-signal.a",
+
+ @"glibc crti.o",
+ @"glibc crtn.o",
+ @"glibc Scrt1.o",
+ @"glibc libc_nonshared.a",
+ @"glibc shared object",
+
+ @"mingw-w64 crt2.o",
+ @"mingw-w64 dllcrt2.o",
+ @"mingw-w64 mingw32.lib",
+ @"mingw-w64 msvcrt-os.lib",
+ @"mingw-w64 mingwex.lib",
+ @"mingw-w64 uuid.lib",
};
pub const MiscError = struct {
/// Allocated with gpa.
msg: []u8,
- children: ?AllErrors = null,
+ children: ?ErrorBundle = null,
pub fn deinit(misc_err: *MiscError, gpa: Allocator) void {
gpa.free(misc_err.msg);
@@ -365,448 +395,6 @@ pub const LldError = struct {
}
};
-/// To support incremental compilation, errors are stored in various places
-/// so that they can be created and destroyed appropriately. This structure
-/// is used to collect all the errors from the various places into one
-/// convenient place for API users to consume. It is allocated into 1 arena
-/// and freed all at once.
-pub const AllErrors = struct {
- arena: std.heap.ArenaAllocator.State,
- list: []const Message,
-
- pub const Message = union(enum) {
- src: struct {
- msg: []const u8,
- src_path: []const u8,
- line: u32,
- column: u32,
- span: Module.SrcLoc.Span,
- /// Usually one, but incremented for redundant messages.
- count: u32 = 1,
- /// Does not include the trailing newline.
- source_line: ?[]const u8,
- notes: []const Message = &.{},
- reference_trace: []Message = &.{},
-
- /// Splits the error message up into lines to properly indent them
- /// to allow for long, good-looking error messages.
- ///
- /// This is used to split the message in `@compileError("hello\nworld")` for example.
- fn writeMsg(src: @This(), stderr: anytype, indent: usize) !void {
- var lines = mem.split(u8, src.msg, "\n");
- while (lines.next()) |line| {
- try stderr.writeAll(line);
- if (lines.index == null) break;
- try stderr.writeByte('\n');
- try stderr.writeByteNTimes(' ', indent);
- }
- }
- },
- plain: struct {
- msg: []const u8,
- notes: []Message = &.{},
- /// Usually one, but incremented for redundant messages.
- count: u32 = 1,
- },
-
- pub fn incrementCount(msg: *Message) void {
- switch (msg.*) {
- .src => |*src| {
- src.count += 1;
- },
- .plain => |*plain| {
- plain.count += 1;
- },
- }
- }
-
- pub fn renderToStdErr(msg: Message, ttyconf: std.debug.TTY.Config) void {
- std.debug.getStderrMutex().lock();
- defer std.debug.getStderrMutex().unlock();
- const stderr = std.io.getStdErr();
- return msg.renderToWriter(ttyconf, stderr.writer(), "error", .Red, 0) catch return;
- }
-
- pub fn renderToWriter(
- msg: Message,
- ttyconf: std.debug.TTY.Config,
- stderr: anytype,
- kind: []const u8,
- color: std.debug.TTY.Color,
- indent: usize,
- ) anyerror!void {
- var counting_writer = std.io.countingWriter(stderr);
- const counting_stderr = counting_writer.writer();
- switch (msg) {
- .src => |src| {
- try counting_stderr.writeByteNTimes(' ', indent);
- try ttyconf.setColor(stderr, .Bold);
- try counting_stderr.print("{s}:{d}:{d}: ", .{
- src.src_path,
- src.line + 1,
- src.column + 1,
- });
- try ttyconf.setColor(stderr, color);
- try counting_stderr.writeAll(kind);
- try counting_stderr.writeAll(": ");
- // This is the length of the part before the error message:
- // e.g. "file.zig:4:5: error: "
- const prefix_len = @intCast(usize, counting_stderr.context.bytes_written);
- try ttyconf.setColor(stderr, .Reset);
- try ttyconf.setColor(stderr, .Bold);
- if (src.count == 1) {
- try src.writeMsg(stderr, prefix_len);
- try stderr.writeByte('\n');
- } else {
- try src.writeMsg(stderr, prefix_len);
- try ttyconf.setColor(stderr, .Dim);
- try stderr.print(" ({d} times)\n", .{src.count});
- }
- try ttyconf.setColor(stderr, .Reset);
- if (src.source_line) |line| {
- for (line) |b| switch (b) {
- '\t' => try stderr.writeByte(' '),
- else => try stderr.writeByte(b),
- };
- try stderr.writeByte('\n');
- // TODO basic unicode code point monospace width
- const before_caret = src.span.main - src.span.start;
- // -1 since span.main includes the caret
- const after_caret = src.span.end - src.span.main -| 1;
- try stderr.writeByteNTimes(' ', src.column - before_caret);
- try ttyconf.setColor(stderr, .Green);
- try stderr.writeByteNTimes('~', before_caret);
- try stderr.writeByte('^');
- try stderr.writeByteNTimes('~', after_caret);
- try stderr.writeByte('\n');
- try ttyconf.setColor(stderr, .Reset);
- }
- for (src.notes) |note| {
- try note.renderToWriter(ttyconf, stderr, "note", .Cyan, indent);
- }
- if (src.reference_trace.len != 0) {
- try ttyconf.setColor(stderr, .Reset);
- try ttyconf.setColor(stderr, .Dim);
- try stderr.print("referenced by:\n", .{});
- for (src.reference_trace) |reference| {
- switch (reference) {
- .src => |ref_src| try stderr.print(" {s}: {s}:{d}:{d}\n", .{
- ref_src.msg,
- ref_src.src_path,
- ref_src.line + 1,
- ref_src.column + 1,
- }),
- .plain => |plain| if (plain.count != 0) {
- try stderr.print(
- " {d} reference(s) hidden; use '-freference-trace={d}' to see all references\n",
- .{ plain.count, plain.count + src.reference_trace.len - 1 },
- );
- } else {
- try stderr.print(
- " remaining reference traces hidden; use '-freference-trace' to see all reference traces\n",
- .{},
- );
- },
- }
- }
- try stderr.writeByte('\n');
- try ttyconf.setColor(stderr, .Reset);
- }
- },
- .plain => |plain| {
- try ttyconf.setColor(stderr, color);
- try stderr.writeByteNTimes(' ', indent);
- try stderr.writeAll(kind);
- try stderr.writeAll(": ");
- try ttyconf.setColor(stderr, .Reset);
- if (plain.count == 1) {
- try stderr.print("{s}\n", .{plain.msg});
- } else {
- try stderr.print("{s}", .{plain.msg});
- try ttyconf.setColor(stderr, .Dim);
- try stderr.print(" ({d} times)\n", .{plain.count});
- }
- try ttyconf.setColor(stderr, .Reset);
- for (plain.notes) |note| {
- try note.renderToWriter(ttyconf, stderr, "note", .Cyan, indent + 4);
- }
- },
- }
- }
-
- pub const HashContext = struct {
- pub fn hash(ctx: HashContext, key: *Message) u64 {
- _ = ctx;
- var hasher = std.hash.Wyhash.init(0);
-
- switch (key.*) {
- .src => |src| {
- hasher.update(src.msg);
- hasher.update(src.src_path);
- std.hash.autoHash(&hasher, src.line);
- std.hash.autoHash(&hasher, src.column);
- std.hash.autoHash(&hasher, src.span.main);
- },
- .plain => |plain| {
- hasher.update(plain.msg);
- },
- }
-
- return hasher.final();
- }
-
- pub fn eql(ctx: HashContext, a: *Message, b: *Message) bool {
- _ = ctx;
- switch (a.*) {
- .src => |a_src| switch (b.*) {
- .src => |b_src| {
- return mem.eql(u8, a_src.msg, b_src.msg) and
- mem.eql(u8, a_src.src_path, b_src.src_path) and
- a_src.line == b_src.line and
- a_src.column == b_src.column and
- a_src.span.main == b_src.span.main;
- },
- .plain => return false,
- },
- .plain => |a_plain| switch (b.*) {
- .src => return false,
- .plain => |b_plain| {
- return mem.eql(u8, a_plain.msg, b_plain.msg);
- },
- },
- }
- }
- };
- };
-
- pub fn deinit(self: *AllErrors, gpa: Allocator) void {
- self.arena.promote(gpa).deinit();
- }
-
- pub fn add(
- module: *Module,
- arena: *std.heap.ArenaAllocator,
- errors: *std.ArrayList(Message),
- module_err_msg: Module.ErrorMsg,
- ) !void {
- const allocator = arena.allocator();
-
- const notes_buf = try allocator.alloc(Message, module_err_msg.notes.len);
- var note_i: usize = 0;
-
- // De-duplicate error notes. The main use case in mind for this is
- // too many "note: called from here" notes when eval branch quota is reached.
- var seen_notes = std.HashMap(
- *Message,
- void,
- Message.HashContext,
- std.hash_map.default_max_load_percentage,
- ).init(allocator);
- const err_source = module_err_msg.src_loc.file_scope.getSource(module.gpa) catch |err| {
- const file_path = try module_err_msg.src_loc.file_scope.fullPath(allocator);
- try errors.append(.{
- .plain = .{
- .msg = try std.fmt.allocPrint(allocator, "unable to load '{s}': {s}", .{
- file_path, @errorName(err),
- }),
- },
- });
- return;
- };
- const err_span = try module_err_msg.src_loc.span(module.gpa);
- const err_loc = std.zig.findLineColumn(err_source.bytes, err_span.main);
-
- for (module_err_msg.notes) |module_note| {
- const source = try module_note.src_loc.file_scope.getSource(module.gpa);
- const span = try module_note.src_loc.span(module.gpa);
- const loc = std.zig.findLineColumn(source.bytes, span.main);
- const file_path = try module_note.src_loc.file_scope.fullPath(allocator);
- const note = ¬es_buf[note_i];
- note.* = .{
- .src = .{
- .src_path = file_path,
- .msg = try allocator.dupe(u8, module_note.msg),
- .span = span,
- .line = @intCast(u32, loc.line),
- .column = @intCast(u32, loc.column),
- .source_line = if (err_loc.eql(loc)) null else try allocator.dupe(u8, loc.source_line),
- },
- };
- const gop = try seen_notes.getOrPut(note);
- if (gop.found_existing) {
- gop.key_ptr.*.incrementCount();
- } else {
- note_i += 1;
- }
- }
-
- const reference_trace = try allocator.alloc(Message, module_err_msg.reference_trace.len);
- for (reference_trace, 0..) |*reference, i| {
- const module_reference = module_err_msg.reference_trace[i];
- if (module_reference.hidden != 0) {
- reference.* = .{ .plain = .{ .msg = undefined, .count = module_reference.hidden } };
- break;
- } else if (module_reference.decl == null) {
- reference.* = .{ .plain = .{ .msg = undefined, .count = 0 } };
- break;
- }
- const source = try module_reference.src_loc.file_scope.getSource(module.gpa);
- const span = try module_reference.src_loc.span(module.gpa);
- const loc = std.zig.findLineColumn(source.bytes, span.main);
- const file_path = try module_reference.src_loc.file_scope.fullPath(allocator);
- reference.* = .{
- .src = .{
- .src_path = file_path,
- .msg = try allocator.dupe(u8, std.mem.sliceTo(module_reference.decl.?, 0)),
- .span = span,
- .line = @intCast(u32, loc.line),
- .column = @intCast(u32, loc.column),
- .source_line = null,
- },
- };
- }
- const file_path = try module_err_msg.src_loc.file_scope.fullPath(allocator);
- try errors.append(.{
- .src = .{
- .src_path = file_path,
- .msg = try allocator.dupe(u8, module_err_msg.msg),
- .span = err_span,
- .line = @intCast(u32, err_loc.line),
- .column = @intCast(u32, err_loc.column),
- .notes = notes_buf[0..note_i],
- .reference_trace = reference_trace,
- .source_line = if (module_err_msg.src_loc.lazy == .entire_file) null else try allocator.dupe(u8, err_loc.source_line),
- },
- });
- }
-
- pub fn addZir(
- arena: Allocator,
- errors: *std.ArrayList(Message),
- file: *Module.File,
- ) !void {
- assert(file.zir_loaded);
- assert(file.tree_loaded);
- assert(file.source_loaded);
- const payload_index = file.zir.extra[@enumToInt(Zir.ExtraIndex.compile_errors)];
- assert(payload_index != 0);
-
- const header = file.zir.extraData(Zir.Inst.CompileErrors, payload_index);
- const items_len = header.data.items_len;
- var extra_index = header.end;
- var item_i: usize = 0;
- while (item_i < items_len) : (item_i += 1) {
- const item = file.zir.extraData(Zir.Inst.CompileErrors.Item, extra_index);
- extra_index = item.end;
- const err_span = blk: {
- if (item.data.node != 0) {
- break :blk Module.SrcLoc.nodeToSpan(&file.tree, item.data.node);
- }
- const token_starts = file.tree.tokens.items(.start);
- const start = token_starts[item.data.token] + item.data.byte_offset;
- const end = start + @intCast(u32, file.tree.tokenSlice(item.data.token).len) - item.data.byte_offset;
- break :blk Module.SrcLoc.Span{ .start = start, .end = end, .main = start };
- };
- const err_loc = std.zig.findLineColumn(file.source, err_span.main);
-
- var notes: []Message = &[0]Message{};
- if (item.data.notes != 0) {
- const block = file.zir.extraData(Zir.Inst.Block, item.data.notes);
- const body = file.zir.extra[block.end..][0..block.data.body_len];
- notes = try arena.alloc(Message, body.len);
- for (notes, 0..) |*note, i| {
- const note_item = file.zir.extraData(Zir.Inst.CompileErrors.Item, body[i]);
- const msg = file.zir.nullTerminatedString(note_item.data.msg);
- const span = blk: {
- if (note_item.data.node != 0) {
- break :blk Module.SrcLoc.nodeToSpan(&file.tree, note_item.data.node);
- }
- const token_starts = file.tree.tokens.items(.start);
- const start = token_starts[note_item.data.token] + note_item.data.byte_offset;
- const end = start + @intCast(u32, file.tree.tokenSlice(note_item.data.token).len) - item.data.byte_offset;
- break :blk Module.SrcLoc.Span{ .start = start, .end = end, .main = start };
- };
- const loc = std.zig.findLineColumn(file.source, span.main);
-
- note.* = .{
- .src = .{
- .src_path = try file.fullPath(arena),
- .msg = try arena.dupe(u8, msg),
- .span = span,
- .line = @intCast(u32, loc.line),
- .column = @intCast(u32, loc.column),
- .notes = &.{}, // TODO rework this function to be recursive
- .source_line = if (loc.eql(err_loc)) null else try arena.dupe(u8, loc.source_line),
- },
- };
- }
- }
-
- const msg = file.zir.nullTerminatedString(item.data.msg);
- try errors.append(.{
- .src = .{
- .src_path = try file.fullPath(arena),
- .msg = try arena.dupe(u8, msg),
- .span = err_span,
- .line = @intCast(u32, err_loc.line),
- .column = @intCast(u32, err_loc.column),
- .notes = notes,
- .source_line = try arena.dupe(u8, err_loc.source_line),
- },
- });
- }
- }
-
- fn addPlain(
- arena: *std.heap.ArenaAllocator,
- errors: *std.ArrayList(Message),
- msg: []const u8,
- ) !void {
- _ = arena;
- try errors.append(.{ .plain = .{ .msg = msg } });
- }
-
- fn addPlainWithChildren(
- arena: *std.heap.ArenaAllocator,
- errors: *std.ArrayList(Message),
- msg: []const u8,
- optional_children: ?AllErrors,
- ) !void {
- const allocator = arena.allocator();
- const duped_msg = try allocator.dupe(u8, msg);
- if (optional_children) |*children| {
- try errors.append(.{ .plain = .{
- .msg = duped_msg,
- .notes = try dupeList(children.list, allocator),
- } });
- } else {
- try errors.append(.{ .plain = .{ .msg = duped_msg } });
- }
- }
-
- fn dupeList(list: []const Message, arena: Allocator) Allocator.Error![]Message {
- const duped_list = try arena.alloc(Message, list.len);
- for (list, 0..) |item, i| {
- duped_list[i] = switch (item) {
- .src => |src| .{ .src = .{
- .msg = try arena.dupe(u8, src.msg),
- .src_path = try arena.dupe(u8, src.src_path),
- .line = src.line,
- .column = src.column,
- .span = src.span,
- .source_line = if (src.source_line) |s| try arena.dupe(u8, s) else null,
- .notes = try dupeList(src.notes, arena),
- } },
- .plain => |plain| .{ .plain = .{
- .msg = try arena.dupe(u8, plain.msg),
- .notes = try dupeList(plain.notes, arena),
- } },
- };
- }
- return duped_list;
- }
-};
-
pub const Directory = Cache.Directory;
pub const EmitLoc = struct {
@@ -2891,7 +2479,7 @@ pub fn makeBinFileWritable(self: *Compilation) !void {
}
/// This function is temporally single-threaded.
-pub fn totalErrorCount(self: *Compilation) usize {
+pub fn totalErrorCount(self: *Compilation) u32 {
var total: usize = self.failed_c_objects.count() + self.misc_failures.count() +
@boolToInt(self.alloc_failure_occurred) + self.lld_errors.items.len;
@@ -2951,17 +2539,16 @@ pub fn totalErrorCount(self: *Compilation) usize {
}
}
- return total;
+ return @intCast(u32, total);
}
/// This function is temporally single-threaded.
-pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
- var arena = std.heap.ArenaAllocator.init(self.gpa);
- errdefer arena.deinit();
- const arena_allocator = arena.allocator();
+pub fn getAllErrorsAlloc(self: *Compilation) !ErrorBundle {
+ const gpa = self.gpa;
- var errors = std.ArrayList(AllErrors.Message).init(self.gpa);
- defer errors.deinit();
+ var bundle: ErrorBundle = undefined;
+ try bundle.init(gpa);
+ errdefer bundle.deinit(gpa);
{
var it = self.failed_c_objects.iterator();
@@ -2970,53 +2557,63 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
const err_msg = entry.value_ptr.*;
// TODO these fields will need to be adjusted when we have proper
// C error reporting bubbling up.
- try errors.append(.{
- .src = .{
- .src_path = try arena_allocator.dupe(u8, c_object.src.src_path),
- .msg = try std.fmt.allocPrint(arena_allocator, "unable to build C object: {s}", .{
- err_msg.msg,
- }),
- .span = .{ .start = 0, .end = 1, .main = 0 },
+ try bundle.addErrorMessage(gpa, .{
+ .msg = try bundle.printString(gpa, "unable to build C object: {s}", .{
+ err_msg.msg,
+ }),
+ .src_loc = try bundle.addSourceLocation(gpa, .{
+ .src_path = try bundle.addString(gpa, c_object.src.src_path),
+ .span_start = 0,
+ .span_main = 0,
+ .span_end = 1,
.line = err_msg.line,
.column = err_msg.column,
- .source_line = null, // TODO
- },
+ .source_line = 0, // TODO
+ }),
});
+ bundle.incrementCount(1);
}
}
- for (self.lld_errors.items) |lld_error| {
- const notes = try arena_allocator.alloc(AllErrors.Message, lld_error.context_lines.len);
- for (lld_error.context_lines, 0..) |context_line, i| {
- notes[i] = .{ .plain = .{
- .msg = try arena_allocator.dupe(u8, context_line),
- } };
- }
- try errors.append(.{
- .plain = .{
- .msg = try arena_allocator.dupe(u8, lld_error.msg),
- .notes = notes,
- },
+ for (self.lld_errors.items) |lld_error| {
+ try bundle.addErrorMessage(gpa, .{
+ .msg = try bundle.addString(gpa, lld_error.msg),
+ .notes_len = @intCast(u32, lld_error.context_lines.len),
});
+ bundle.incrementCount(1);
+
+ for (lld_error.context_lines) |context_line| {
+ try bundle.addErrorMessage(gpa, .{
+ .msg = try bundle.addString(gpa, context_line),
+ });
+ }
}
for (self.misc_failures.values()) |*value| {
- try AllErrors.addPlainWithChildren(&arena, &errors, value.msg, value.children);
+ try bundle.addErrorMessage(gpa, .{
+ .msg = try bundle.addString(gpa, value.msg),
+ .notes_len = if (value.children) |b| b.errorMessageCount() else 0,
+ });
+ if (value.children) |b| try bundle.addBundle(gpa, b);
+ bundle.incrementCount(1);
}
if (self.alloc_failure_occurred) {
- try AllErrors.addPlain(&arena, &errors, "memory allocation failure");
+ try bundle.addErrorMessage(gpa, .{
+ .msg = try bundle.addString(gpa, "memory allocation failure"),
+ });
+ bundle.incrementCount(1);
}
if (self.bin_file.options.module) |module| {
{
var it = module.failed_files.iterator();
while (it.next()) |entry| {
if (entry.value_ptr.*) |msg| {
- try AllErrors.add(module, &arena, &errors, msg.*);
+ try addModuleErrorMsg(gpa, &bundle, msg.*);
} else {
// Must be ZIR errors. In order for ZIR errors to exist, the parsing
// must have completed successfully.
const tree = try entry.key_ptr.*.getTree(module.gpa);
assert(tree.errors.len == 0);
- try AllErrors.addZir(arena_allocator, &errors, entry.key_ptr.*);
+ try addZirErrorMessages(gpa, &bundle, entry.key_ptr.*);
}
}
}
@@ -3024,7 +2621,7 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
var it = module.failed_embed_files.iterator();
while (it.next()) |entry| {
const msg = entry.value_ptr.*;
- try AllErrors.add(module, &arena, &errors, msg.*);
+ try addModuleErrorMsg(gpa, &bundle, msg.*);
}
}
{
@@ -3034,23 +2631,21 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
// Skip errors for Decls within files that had a parse failure.
// We'll try again once parsing succeeds.
if (decl.getFileScope().okToReportErrors()) {
- try AllErrors.add(module, &arena, &errors, entry.value_ptr.*.*);
+ try addModuleErrorMsg(gpa, &bundle, entry.value_ptr.*.*);
if (module.cimport_errors.get(entry.key_ptr.*)) |cimport_errors| for (cimport_errors) |c_error| {
- if (c_error.path) |some|
- try errors.append(.{
- .src = .{
- .src_path = try arena_allocator.dupe(u8, std.mem.span(some)),
- .span = .{ .start = c_error.offset, .end = c_error.offset + 1, .main = c_error.offset },
- .msg = try arena_allocator.dupe(u8, std.mem.span(c_error.msg)),
- .line = c_error.line,
- .column = c_error.column,
- .source_line = if (c_error.source_line) |line| try arena_allocator.dupe(u8, std.mem.span(line)) else null,
- },
- })
- else
- try errors.append(.{
- .plain = .{ .msg = try arena_allocator.dupe(u8, std.mem.span(c_error.msg)) },
- });
+ try bundle.addErrorMessage(gpa, .{
+ .msg = try bundle.addString(gpa, std.mem.span(c_error.msg)),
+ .src_loc = if (c_error.path) |some| try bundle.addSourceLocation(gpa, .{
+ .src_path = try bundle.addString(gpa, std.mem.span(some)),
+ .span_start = c_error.offset,
+ .span_main = c_error.offset,
+ .span_end = c_error.offset + 1,
+ .line = c_error.line,
+ .column = c_error.column,
+ .source_line = if (c_error.source_line) |line| try bundle.addString(gpa, std.mem.span(line)) else 0,
+ }) else 0,
+ });
+ bundle.incrementCount(1);
};
}
}
@@ -3062,45 +2657,40 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
// Skip errors for Decls within files that had a parse failure.
// We'll try again once parsing succeeds.
if (decl.getFileScope().okToReportErrors()) {
- try AllErrors.add(module, &arena, &errors, entry.value_ptr.*.*);
+ try addModuleErrorMsg(gpa, &bundle, entry.value_ptr.*.*);
}
}
}
for (module.failed_exports.values()) |value| {
- try AllErrors.add(module, &arena, &errors, value.*);
+ try addModuleErrorMsg(gpa, &bundle, value.*);
}
}
- if (errors.items.len == 0) {
+ if (bundle.errorMessageCount() == 0) {
if (self.link_error_flags.no_entry_point_found) {
- try errors.append(.{
- .plain = .{
- .msg = try std.fmt.allocPrint(arena_allocator, "no entry point found", .{}),
- },
+ try bundle.addErrorMessage(gpa, .{
+ .msg = try bundle.addString(gpa, "no entry point found"),
});
+ bundle.incrementCount(1);
}
}
if (self.link_error_flags.missing_libc) {
- const notes = try arena_allocator.create([2]AllErrors.Message);
- notes.* = .{
- .{ .plain = .{
- .msg = try arena_allocator.dupe(u8, "run 'zig libc -h' to learn about libc installations"),
- } },
- .{ .plain = .{
- .msg = try arena_allocator.dupe(u8, "run 'zig targets' to see the targets for which zig can always provide libc"),
- } },
- };
- try errors.append(.{
- .plain = .{
- .msg = try std.fmt.allocPrint(arena_allocator, "libc not available", .{}),
- .notes = notes,
- },
+ try bundle.addErrorMessage(gpa, .{
+ .msg = try bundle.addString(gpa, "libc not available"),
+ .notes_len = 2,
+ });
+ try bundle.addErrorMessage(gpa, .{
+ .msg = try bundle.addString(gpa, "run 'zig libc -h' to learn about libc installations"),
});
+ try bundle.addErrorMessage(gpa, .{
+ .msg = try bundle.addString(gpa, "run 'zig targets' to see the targets for which zig can always provide libc"),
+ });
+ bundle.incrementCount(1);
}
if (self.bin_file.options.module) |module| {
- if (errors.items.len == 0 and module.compile_log_decls.count() != 0) {
+ if (bundle.errorMessageCount() == 0 and module.compile_log_decls.count() != 0) {
const keys = module.compile_log_decls.keys();
const values = module.compile_log_decls.values();
// First one will be the error; subsequent ones will be notes.
@@ -3121,16 +2711,259 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
};
}
- try AllErrors.add(module, &arena, &errors, err_msg);
+ try addModuleErrorMsg(gpa, &bundle, err_msg);
}
}
- assert(errors.items.len == self.totalErrorCount());
+ assert(self.totalErrorCount() == bundle.errorMessageCount());
+
+ return bundle;
+}
+
+pub const ErrorNoteHashContext = struct {
+ eb: *const ErrorBundle,
+
+ pub fn hash(ctx: ErrorNoteHashContext, key: ErrorBundle.ErrorMessage) u32 {
+ var hasher = std.hash.Wyhash.init(0);
- return AllErrors{
- .list = try arena_allocator.dupe(AllErrors.Message, errors.items),
- .arena = arena.state,
+ hasher.update(ctx.eb.nullTerminatedString(key.msg));
+ if (key.src_loc != 0) {
+ const src = ctx.eb.getSourceLocation(key.src_loc);
+ hasher.update(ctx.eb.nullTerminatedString(src.src_path));
+ std.hash.autoHash(&hasher, src.line);
+ std.hash.autoHash(&hasher, src.column);
+ std.hash.autoHash(&hasher, src.span_main);
+ }
+
+ return @truncate(u32, hasher.final());
+ }
+
+ pub fn eql(
+ ctx: ErrorNoteHashContext,
+ a: ErrorBundle.ErrorMessage,
+ b: ErrorBundle.ErrorMessage,
+ b_index: usize,
+ ) bool {
+ _ = b_index;
+ const msg_a = ctx.eb.nullTerminatedString(a.msg);
+ const msg_b = ctx.eb.nullTerminatedString(b.msg);
+ if (!std.mem.eql(u8, msg_a, msg_b)) return false;
+
+ if (a.src_loc == 0 and b.src_loc == 0) return true;
+ if (a.src_loc == 0 or b.src_loc == 0) return false;
+ const src_a = ctx.eb.getSourceLocation(a.src_loc);
+ const src_b = ctx.eb.getSourceLocation(b.src_loc);
+
+ const src_path_a = ctx.eb.nullTerminatedString(src_a.src_path);
+ const src_path_b = ctx.eb.nullTerminatedString(src_b.src_path);
+
+ return std.mem.eql(u8, src_path_a, src_path_b) and
+ src_a.line == src_b.line and
+ src_a.column == src_b.column and
+ src_a.span_main == src_b.span_main;
+ }
+};
+
+pub fn addModuleErrorMsg(gpa: Allocator, eb: *ErrorBundle, module_err_msg: Module.ErrorMsg) !void {
+ const err_source = module_err_msg.src_loc.file_scope.getSource(gpa) catch |err| {
+ const file_path = try module_err_msg.src_loc.file_scope.fullPath(gpa);
+ defer gpa.free(file_path);
+ try eb.addErrorMessage(gpa, .{
+ .msg = try eb.printString(gpa, "unable to load '{s}': {s}", .{
+ file_path, @errorName(err),
+ }),
+ });
+ eb.incrementCount(1);
+ return;
};
+ const err_span = try module_err_msg.src_loc.span(gpa);
+ const err_loc = std.zig.findLineColumn(err_source.bytes, err_span.main);
+ const file_path = try module_err_msg.src_loc.file_scope.fullPath(gpa);
+ defer gpa.free(file_path);
+
+ var ref_traces: std.ArrayListUnmanaged(ErrorBundle.ReferenceTrace) = .{};
+ defer ref_traces.deinit(gpa);
+
+ for (module_err_msg.reference_trace) |module_reference| {
+ if (module_reference.hidden != 0) {
+ try ref_traces.append(gpa, .{
+ .decl_name = module_reference.hidden,
+ .src_loc = 0,
+ });
+ break;
+ } else if (module_reference.decl == null) {
+ try ref_traces.append(gpa, .{
+ .decl_name = 0,
+ .src_loc = 0,
+ });
+ break;
+ }
+ const source = try module_reference.src_loc.file_scope.getSource(gpa);
+ const span = try module_reference.src_loc.span(gpa);
+ const loc = std.zig.findLineColumn(source.bytes, span.main);
+ const rt_file_path = try module_reference.src_loc.file_scope.fullPath(gpa);
+ defer gpa.free(rt_file_path);
+ try ref_traces.append(gpa, .{
+ .decl_name = try eb.addString(gpa, std.mem.sliceTo(module_reference.decl.?, 0)),
+ .src_loc = try eb.addSourceLocation(gpa, .{
+ .src_path = try eb.addString(gpa, rt_file_path),
+ .span_start = span.start,
+ .span_main = span.main,
+ .span_end = span.end,
+ .line = @intCast(u32, loc.line),
+ .column = @intCast(u32, loc.column),
+ .source_line = 0,
+ }),
+ });
+ }
+
+ const src_loc = try eb.addSourceLocation(gpa, .{
+ .src_path = try eb.addString(gpa, file_path),
+ .span_start = err_span.start,
+ .span_main = err_span.main,
+ .span_end = err_span.end,
+ .line = @intCast(u32, err_loc.line),
+ .column = @intCast(u32, err_loc.column),
+ .source_line = if (module_err_msg.src_loc.lazy == .entire_file)
+ 0
+ else
+ try eb.addString(gpa, err_loc.source_line),
+ .reference_trace_len = @intCast(u32, ref_traces.items.len),
+ });
+
+ for (ref_traces.items) |rt| {
+ try eb.addReferenceTrace(gpa, rt);
+ }
+
+ // De-duplicate error notes. The main use case in mind for this is
+ // too many "note: called from here" notes when eval branch quota is reached.
+ var notes: std.ArrayHashMapUnmanaged(ErrorBundle.ErrorMessage, void, ErrorNoteHashContext, true) = .{};
+ defer notes.deinit(gpa);
+
+ for (module_err_msg.notes) |module_note| {
+ const source = try module_note.src_loc.file_scope.getSource(gpa);
+ const span = try module_note.src_loc.span(gpa);
+ const loc = std.zig.findLineColumn(source.bytes, span.main);
+ const note_file_path = try module_note.src_loc.file_scope.fullPath(gpa);
+ defer gpa.free(note_file_path);
+
+ const gop = try notes.getOrPutContext(gpa, .{
+ .msg = try eb.addString(gpa, module_note.msg),
+ .src_loc = try eb.addSourceLocation(gpa, .{
+ .src_path = try eb.addString(gpa, note_file_path),
+ .span_start = span.start,
+ .span_main = span.main,
+ .span_end = span.end,
+ .line = @intCast(u32, loc.line),
+ .column = @intCast(u32, loc.column),
+ .source_line = if (err_loc.eql(loc)) 0 else try eb.addString(gpa, loc.source_line),
+ }),
+ }, .{ .eb = eb });
+ if (gop.found_existing) {
+ gop.key_ptr.count += 1;
+ }
+ }
+
+ try eb.addErrorMessage(gpa, .{
+ .msg = try eb.addString(gpa, module_err_msg.msg),
+ .src_loc = src_loc,
+ .notes_len = @intCast(u32, notes.entries.len),
+ });
+ eb.incrementCount(1);
+
+ for (notes.keys()) |note| {
+ try eb.addErrorMessage(gpa, note);
+ }
+}
+
+pub fn addZirErrorMessages(gpa: Allocator, eb: *ErrorBundle, file: *Module.File) !void {
+ assert(file.zir_loaded);
+ assert(file.tree_loaded);
+ assert(file.source_loaded);
+ const payload_index = file.zir.extra[@enumToInt(Zir.ExtraIndex.compile_errors)];
+ assert(payload_index != 0);
+
+ const header = file.zir.extraData(Zir.Inst.CompileErrors, payload_index);
+ const items_len = header.data.items_len;
+ var extra_index = header.end;
+ for (0..items_len) |_| {
+ const item = file.zir.extraData(Zir.Inst.CompileErrors.Item, extra_index);
+ extra_index = item.end;
+ const err_span = blk: {
+ if (item.data.node != 0) {
+ break :blk Module.SrcLoc.nodeToSpan(&file.tree, item.data.node);
+ }
+ const token_starts = file.tree.tokens.items(.start);
+ const start = token_starts[item.data.token] + item.data.byte_offset;
+ const end = start + @intCast(u32, file.tree.tokenSlice(item.data.token).len) - item.data.byte_offset;
+ break :blk Module.SrcLoc.Span{ .start = start, .end = end, .main = start };
+ };
+ const err_loc = std.zig.findLineColumn(file.source, err_span.main);
+
+ var notes: []ErrorBundle.ErrorMessage = &.{};
+ defer gpa.free(notes);
+
+ if (item.data.notes != 0) {
+ const block = file.zir.extraData(Zir.Inst.Block, item.data.notes);
+ const body = file.zir.extra[block.end..][0..block.data.body_len];
+ notes = try gpa.alloc(ErrorBundle.ErrorMessage, body.len);
+ for (notes, body) |*note, body_elem| {
+ const note_item = file.zir.extraData(Zir.Inst.CompileErrors.Item, body_elem);
+ const msg = file.zir.nullTerminatedString(note_item.data.msg);
+ const span = blk: {
+ if (note_item.data.node != 0) {
+ break :blk Module.SrcLoc.nodeToSpan(&file.tree, note_item.data.node);
+ }
+ const token_starts = file.tree.tokens.items(.start);
+ const start = token_starts[note_item.data.token] + note_item.data.byte_offset;
+ const end = start + @intCast(u32, file.tree.tokenSlice(note_item.data.token).len) - item.data.byte_offset;
+ break :blk Module.SrcLoc.Span{ .start = start, .end = end, .main = start };
+ };
+ const loc = std.zig.findLineColumn(file.source, span.main);
+ const src_path = try file.fullPath(gpa);
+ defer gpa.free(src_path);
+
+ note.* = .{
+ .msg = try eb.addString(gpa, msg),
+ .src_loc = try eb.addSourceLocation(gpa, .{
+ .src_path = try eb.addString(gpa, src_path),
+ .span_start = span.start,
+ .span_main = span.main,
+ .span_end = span.end,
+ .line = @intCast(u32, loc.line),
+ .column = @intCast(u32, loc.column),
+ .source_line = if (loc.eql(err_loc))
+ 0
+ else
+ try eb.addString(gpa, loc.source_line),
+ }),
+ .notes_len = 0, // TODO rework this function to be recursive
+ };
+ }
+ }
+
+ const msg = file.zir.nullTerminatedString(item.data.msg);
+ const src_path = try file.fullPath(gpa);
+ defer gpa.free(src_path);
+ try eb.addErrorMessage(gpa, .{
+ .msg = try eb.addString(gpa, msg),
+ .src_loc = try eb.addSourceLocation(gpa, .{
+ .src_path = try eb.addString(gpa, src_path),
+ .span_start = err_span.start,
+ .span_main = err_span.main,
+ .span_end = err_span.end,
+ .line = @intCast(u32, err_loc.line),
+ .column = @intCast(u32, err_loc.column),
+ .source_line = try eb.addString(gpa, err_loc.source_line),
+ }),
+ .notes_len = @intCast(u32, notes.len),
+ });
+
+ for (notes) |note| {
+ try eb.addErrorMessage(gpa, note);
+ }
+ }
+ eb.incrementCount(items_len);
}
pub fn getCompileLogOutput(self: *Compilation) []const u8 {
@@ -5417,34 +5250,29 @@ pub fn generateBuiltinZigSource(comp: *Compilation, allocator: Allocator) Alloca
return buffer.toOwnedSliceSentinel(0);
}
-pub fn updateSubCompilation(sub_compilation: *Compilation) !void {
- try sub_compilation.update();
-
- // Look for compilation errors in this sub_compilation
- // TODO instead of logging these errors, handle them in the callsites
- // of updateSubCompilation and attach them as sub-errors, properly
- // surfacing the errors. You can see an example of this already
- // done inside buildOutputFromZig.
- var errors = try sub_compilation.getAllErrorsAlloc();
- defer errors.deinit(sub_compilation.gpa);
-
- if (errors.list.len != 0) {
- for (errors.list) |full_err_msg| {
- switch (full_err_msg) {
- .src => |src| {
- log.err("{s}:{d}:{d}: {s}", .{
- src.src_path,
- src.line + 1,
- src.column + 1,
- src.msg,
- });
- },
- .plain => |plain| {
- log.err("{s}", .{plain.msg});
- },
- }
- }
- return error.BuildingLibCObjectFailed;
+pub fn updateSubCompilation(
+ parent_comp: *Compilation,
+ sub_comp: *Compilation,
+ misc_task: MiscTask,
+) !void {
+ try sub_comp.update();
+
+ // Look for compilation errors in this sub compilation
+ const gpa = parent_comp.gpa;
+ var keep_errors = false;
+ var errors = try sub_comp.getAllErrorsAlloc();
+ defer if (!keep_errors) errors.deinit(gpa);
+
+ if (errors.errorMessageCount() > 0) {
+ try parent_comp.misc_failures.ensureUnusedCapacity(gpa, 1);
+ parent_comp.misc_failures.putAssumeCapacityNoClobber(misc_task, .{
+ .msg = try std.fmt.allocPrint(gpa, "sub-compilation of {s} failed", .{
+ @tagName(misc_task),
+ }),
+ .children = errors,
+ });
+ keep_errors = true;
+ return error.SubCompilationFailed;
}
}
@@ -5520,23 +5348,7 @@ fn buildOutputFromZig(
});
defer sub_compilation.destroy();
- try sub_compilation.update();
- // Look for compilation errors in this sub_compilation.
- var keep_errors = false;
- var errors = try sub_compilation.getAllErrorsAlloc();
- defer if (!keep_errors) errors.deinit(sub_compilation.gpa);
-
- if (errors.list.len != 0) {
- try comp.misc_failures.ensureUnusedCapacity(comp.gpa, 1);
- comp.misc_failures.putAssumeCapacityNoClobber(misc_task_tag, .{
- .msg = try std.fmt.allocPrint(comp.gpa, "sub-compilation of {s} failed", .{
- @tagName(misc_task_tag),
- }),
- .children = errors,
- });
- keep_errors = true;
- return error.SubCompilationFailed;
- }
+ try comp.updateSubCompilation(sub_compilation, misc_task_tag);
assert(out.* == null);
out.* = Compilation.CRTFile{
@@ -5551,6 +5363,7 @@ pub fn build_crt_file(
comp: *Compilation,
root_name: []const u8,
output_mode: std.builtin.OutputMode,
+ misc_task_tag: MiscTask,
c_source_files: []const Compilation.CSourceFile,
) !void {
const tracy_trace = trace(@src());
@@ -5611,7 +5424,7 @@ pub fn build_crt_file(
});
defer sub_compilation.destroy();
- try sub_compilation.updateSubCompilation();
+ try comp.updateSubCompilation(sub_compilation, misc_task_tag);
try comp.crt_files.ensureUnusedCapacity(comp.gpa, 1);
src/glibc.zig
@@ -196,7 +196,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
"-DASSEMBLER",
"-Wa,--noexecstack",
});
- return comp.build_crt_file("crti", .Obj, &[1]Compilation.CSourceFile{
+ return comp.build_crt_file("crti", .Obj, .@"glibc crti.o", &[1]Compilation.CSourceFile{
.{
.src_path = try start_asm_path(comp, arena, "crti.S"),
.cache_exempt_flags = args.items,
@@ -215,7 +215,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
"-DASSEMBLER",
"-Wa,--noexecstack",
});
- return comp.build_crt_file("crtn", .Obj, &[1]Compilation.CSourceFile{
+ return comp.build_crt_file("crtn", .Obj, .@"glibc crtn.o", &[1]Compilation.CSourceFile{
.{
.src_path = try start_asm_path(comp, arena, "crtn.S"),
.cache_exempt_flags = args.items,
@@ -265,7 +265,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
.cache_exempt_flags = args.items,
};
};
- return comp.build_crt_file("Scrt1", .Obj, &[_]Compilation.CSourceFile{ start_o, abi_note_o });
+ return comp.build_crt_file("Scrt1", .Obj, .@"glibc Scrt1.o", &[_]Compilation.CSourceFile{ start_o, abi_note_o });
},
.libc_nonshared_a => {
const s = path.sep_str;
@@ -366,7 +366,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
files_index += 1;
}
const files = files_buf[0..files_index];
- return comp.build_crt_file("c_nonshared", .Lib, files);
+ return comp.build_crt_file("c_nonshared", .Lib, .@"glibc libc_nonshared.a", files);
},
}
}
@@ -1105,7 +1105,7 @@ fn buildSharedLib(
});
defer sub_compilation.destroy();
- try sub_compilation.updateSubCompilation();
+ try comp.updateSubCompilation(sub_compilation, .@"glibc shared object");
}
// Return true if glibc has crti/crtn sources for that architecture.
src/libcxx.zig
@@ -258,7 +258,7 @@ pub fn buildLibCXX(comp: *Compilation) !void {
});
defer sub_compilation.destroy();
- try sub_compilation.updateSubCompilation();
+ try comp.updateSubCompilation(sub_compilation, .libcxx);
assert(comp.libcxx_static_lib == null);
comp.libcxx_static_lib = Compilation.CRTFile{
@@ -418,7 +418,7 @@ pub fn buildLibCXXABI(comp: *Compilation) !void {
});
defer sub_compilation.destroy();
- try sub_compilation.updateSubCompilation();
+ try comp.updateSubCompilation(sub_compilation, .libcxxabi);
assert(comp.libcxxabi_static_lib == null);
comp.libcxxabi_static_lib = Compilation.CRTFile{
src/libtsan.zig
@@ -235,7 +235,7 @@ pub fn buildTsan(comp: *Compilation) !void {
});
defer sub_compilation.destroy();
- try sub_compilation.updateSubCompilation();
+ try comp.updateSubCompilation(sub_compilation, .libtsan);
assert(comp.tsan_static_lib == null);
comp.tsan_static_lib = Compilation.CRTFile{
src/libunwind.zig
@@ -130,7 +130,7 @@ pub fn buildStaticLib(comp: *Compilation) !void {
});
defer sub_compilation.destroy();
- try sub_compilation.updateSubCompilation();
+ try comp.updateSubCompilation(sub_compilation, .libunwind);
assert(comp.libunwind_static_lib == null);
src/main.zig
@@ -24,6 +24,8 @@ const clang = @import("clang.zig");
const Cache = std.Build.Cache;
const target_util = @import("target.zig");
const crash_report = @import("crash_report.zig");
+const Module = @import("Module.zig");
+const AstGen = @import("AstGen.zig");
pub const std_options = struct {
pub const wasiCwd = wasi_cwd;
@@ -3446,15 +3448,13 @@ fn buildOutputType(
var errors = try comp.getAllErrorsAlloc();
defer errors.deinit(comp.gpa);
- if (errors.list.len != 0) {
+ if (errors.errorMessageCount() > 0) {
const ttyconf: std.debug.TTY.Config = switch (comp.color) {
.auto => std.debug.detectTTYConfig(std.io.getStdErr()),
.on => .escape_codes,
.off => .no_color,
};
- for (errors.list) |full_err_msg| {
- try full_err_msg.renderToWriter(ttyconf, conn.stream.writer(), "error:", .Red, 0);
- }
+ try errors.renderToWriter(ttyconf, conn.stream.writer());
continue;
}
} else {
@@ -3830,15 +3830,13 @@ fn updateModule(gpa: Allocator, comp: *Compilation, hook: AfterUpdateHook) !void
var errors = try comp.getAllErrorsAlloc();
defer errors.deinit(comp.gpa);
- if (errors.list.len != 0) {
+ if (errors.errorMessageCount() > 0) {
const ttyconf: std.debug.TTY.Config = switch (comp.color) {
.auto => std.debug.detectTTYConfig(std.io.getStdErr()),
.on => .escape_codes,
.off => .no_color,
};
- for (errors.list) |full_err_msg| {
- full_err_msg.renderToStdErr(ttyconf);
- }
+ errors.renderToStdErr(ttyconf);
const log_text = comp.getCompileLogOutput();
if (log_text.len != 0) {
std.debug.print("\nCompile Log Output:\n{s}", .{log_text});
@@ -4438,9 +4436,13 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
var all_modules: Package.AllModules = .{};
defer all_modules.deinit(gpa);
+ var errors: std.zig.ErrorBundle = undefined;
+ try errors.init(gpa);
+ defer errors.deinit(gpa);
+
// Here we borrow main package's table and will replace it with a fresh
// one after this process completes.
- build_pkg.fetchAndAddDependencies(
+ const fetch_result = build_pkg.fetchAndAddDependencies(
&main_pkg,
arena,
&thread_pool,
@@ -4451,12 +4453,19 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
&dependencies_source,
&build_roots_source,
"",
- color,
+ &errors,
&all_modules,
- ) catch |err| switch (err) {
- error.PackageFetchFailed => process.exit(1),
- else => |e| return e,
- };
+ );
+ if (errors.errorMessageCount() > 0) {
+ const ttyconf: std.debug.TTY.Config = switch (color) {
+ .auto => std.debug.detectTTYConfig(std.io.getStdErr()),
+ .on => .escape_codes,
+ .off => .no_color,
+ };
+ errors.renderToStdErr(ttyconf);
+ process.exit(1);
+ }
+ try fetch_result;
try dependencies_source.appendSlice("};\npub const build_root = struct {\n");
try dependencies_source.appendSlice(build_roots_source.items);
@@ -4543,7 +4552,7 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
}
fn readSourceFileToEndAlloc(
- allocator: mem.Allocator,
+ allocator: Allocator,
input: *const fs.File,
size_hint: ?usize,
) ![:0]u8 {
@@ -4687,12 +4696,9 @@ pub fn cmdFmt(gpa: Allocator, arena: Allocator, args: []const []const u8) !void
};
defer tree.deinit(gpa);
- try printErrsMsgToStdErr(gpa, arena, tree, "<stdin>", color);
+ try printAstErrorsToStderr(gpa, tree, "<stdin>", color);
var has_ast_error = false;
if (check_ast_flag) {
- const Module = @import("Module.zig");
- const AstGen = @import("AstGen.zig");
-
var file: Module.File = .{
.status = .never_loaded,
.source_loaded = true,
@@ -4715,20 +4721,16 @@ pub fn cmdFmt(gpa: Allocator, arena: Allocator, args: []const []const u8) !void
defer file.zir.deinit(gpa);
if (file.zir.hasCompileErrors()) {
- var arena_instance = std.heap.ArenaAllocator.init(gpa);
- defer arena_instance.deinit();
- var errors = std.ArrayList(Compilation.AllErrors.Message).init(gpa);
- defer errors.deinit();
-
- try Compilation.AllErrors.addZir(arena_instance.allocator(), &errors, &file);
+ var errors: std.zig.ErrorBundle = undefined;
+ try errors.init(gpa);
+ defer errors.deinit(gpa);
+ try Compilation.addZirErrorMessages(gpa, &errors, &file);
const ttyconf: std.debug.TTY.Config = switch (color) {
.auto => std.debug.detectTTYConfig(std.io.getStdErr()),
.on => .escape_codes,
.off => .no_color,
};
- for (errors.items) |full_err_msg| {
- full_err_msg.renderToStdErr(ttyconf);
- }
+ errors.renderToStdErr(ttyconf);
has_ast_error = true;
}
}
@@ -4875,12 +4877,13 @@ fn fmtPathFile(
if (stat.kind == .Directory)
return error.IsDir;
+ const gpa = fmt.gpa;
const source_code = try readSourceFileToEndAlloc(
- fmt.gpa,
+ gpa,
&source_file,
std.math.cast(usize, stat.size) orelse return error.FileTooBig,
);
- defer fmt.gpa.free(source_code);
+ defer gpa.free(source_code);
source_file.close();
file_closed = true;
@@ -4888,19 +4891,16 @@ fn fmtPathFile(
// Add to set after no longer possible to get error.IsDir.
if (try fmt.seen.fetchPut(stat.inode, {})) |_| return;
- var tree = try Ast.parse(fmt.gpa, source_code, .zig);
- defer tree.deinit(fmt.gpa);
+ var tree = try Ast.parse(gpa, source_code, .zig);
+ defer tree.deinit(gpa);
- try printErrsMsgToStdErr(fmt.gpa, fmt.arena, tree, file_path, fmt.color);
+ try printAstErrorsToStderr(gpa, tree, file_path, fmt.color);
if (tree.errors.len != 0) {
fmt.any_error = true;
return;
}
if (fmt.check_ast) {
- const Module = @import("Module.zig");
- const AstGen = @import("AstGen.zig");
-
var file: Module.File = .{
.status = .never_loaded,
.source_loaded = true,
@@ -4919,31 +4919,27 @@ fn fmtPathFile(
.root_decl = .none,
};
- file.pkg = try Package.create(fmt.gpa, null, file.sub_file_path);
- defer file.pkg.destroy(fmt.gpa);
+ file.pkg = try Package.create(gpa, null, file.sub_file_path);
+ defer file.pkg.destroy(gpa);
if (stat.size > max_src_size)
return error.FileTooBig;
- file.zir = try AstGen.generate(fmt.gpa, file.tree);
+ file.zir = try AstGen.generate(gpa, file.tree);
file.zir_loaded = true;
- defer file.zir.deinit(fmt.gpa);
+ defer file.zir.deinit(gpa);
if (file.zir.hasCompileErrors()) {
- var arena_instance = std.heap.ArenaAllocator.init(fmt.gpa);
- defer arena_instance.deinit();
- var errors = std.ArrayList(Compilation.AllErrors.Message).init(fmt.gpa);
- defer errors.deinit();
-
- try Compilation.AllErrors.addZir(arena_instance.allocator(), &errors, &file);
+ var errors: std.zig.ErrorBundle = undefined;
+ try errors.init(gpa);
+ defer errors.deinit(gpa);
+ try Compilation.addZirErrorMessages(gpa, &errors, &file);
const ttyconf: std.debug.TTY.Config = switch (fmt.color) {
.auto => std.debug.detectTTYConfig(std.io.getStdErr()),
.on => .escape_codes,
.off => .no_color,
};
- for (errors.items) |full_err_msg| {
- full_err_msg.renderToStdErr(ttyconf);
- }
+ errors.renderToStdErr(ttyconf);
fmt.any_error = true;
}
}
@@ -4971,100 +4967,53 @@ fn fmtPathFile(
}
}
-pub fn printErrsMsgToStdErr(
- gpa: mem.Allocator,
- arena: mem.Allocator,
+fn printAstErrorsToStderr(gpa: Allocator, tree: Ast, path: []const u8, color: Color) !void {
+ var error_bundle: std.zig.ErrorBundle = undefined;
+ try error_bundle.init(gpa);
+ defer error_bundle.deinit(gpa);
+
+ try putAstErrorsIntoBundle(gpa, tree, path, &error_bundle);
+
+ const ttyconf: std.debug.TTY.Config = switch (color) {
+ .auto => std.debug.detectTTYConfig(std.io.getStdErr()),
+ .on => .escape_codes,
+ .off => .no_color,
+ };
+ error_bundle.renderToStdErr(ttyconf);
+}
+
+pub fn putAstErrorsIntoBundle(
+ gpa: Allocator,
tree: Ast,
path: []const u8,
- color: Color,
+ error_bundle: *std.zig.ErrorBundle,
) !void {
- const parse_errors: []const Ast.Error = tree.errors;
- var i: usize = 0;
- while (i < parse_errors.len) : (i += 1) {
- const parse_error = parse_errors[i];
- const lok_token = parse_error.token;
- const token_tags = tree.tokens.items(.tag);
- const start_loc = tree.tokenLocation(0, lok_token);
- const source_line = tree.source[start_loc.line_start..start_loc.line_end];
-
- var text_buf = std.ArrayList(u8).init(gpa);
- defer text_buf.deinit();
- const writer = text_buf.writer();
- try tree.renderError(parse_error, writer);
- const text = try arena.dupe(u8, text_buf.items);
-
- var notes_buffer: [2]Compilation.AllErrors.Message = undefined;
- var notes_len: usize = 0;
-
- if (token_tags[parse_error.token + @boolToInt(parse_error.token_is_prev)] == .invalid) {
- const bad_off = @intCast(u32, tree.tokenSlice(parse_error.token + @boolToInt(parse_error.token_is_prev)).len);
- const byte_offset = @intCast(u32, start_loc.line_start) + @intCast(u32, start_loc.column) + bad_off;
- notes_buffer[notes_len] = .{
- .src = .{
- .src_path = path,
- .msg = try std.fmt.allocPrint(arena, "invalid byte: '{'}'", .{
- std.zig.fmtEscapes(tree.source[byte_offset..][0..1]),
- }),
- .span = .{ .start = byte_offset, .end = byte_offset + 1, .main = byte_offset },
- .line = @intCast(u32, start_loc.line),
- .column = @intCast(u32, start_loc.column) + bad_off,
- .source_line = source_line,
- },
- };
- notes_len += 1;
- }
-
- for (parse_errors[i + 1 ..]) |note| {
- if (!note.is_note) break;
-
- text_buf.items.len = 0;
- try tree.renderError(note, writer);
- const note_loc = tree.tokenLocation(0, note.token);
- const byte_offset = @intCast(u32, note_loc.line_start);
- notes_buffer[notes_len] = .{
- .src = .{
- .src_path = path,
- .msg = try arena.dupe(u8, text_buf.items),
- .span = .{
- .start = byte_offset,
- .end = byte_offset + @intCast(u32, tree.tokenSlice(note.token).len),
- .main = byte_offset,
- },
- .line = @intCast(u32, note_loc.line),
- .column = @intCast(u32, note_loc.column),
- .source_line = tree.source[note_loc.line_start..note_loc.line_end],
- },
- };
- i += 1;
- notes_len += 1;
- }
+ var file: Module.File = .{
+ .status = .never_loaded,
+ .source_loaded = true,
+ .zir_loaded = false,
+ .sub_file_path = path,
+ .source = tree.source,
+ .stat = .{
+ .size = 0,
+ .inode = 0,
+ .mtime = 0,
+ },
+ .tree = tree,
+ .tree_loaded = true,
+ .zir = undefined,
+ .pkg = undefined,
+ .root_decl = .none,
+ };
- const extra_offset = tree.errorOffset(parse_error);
- const byte_offset = @intCast(u32, start_loc.line_start) + extra_offset;
- const message: Compilation.AllErrors.Message = .{
- .src = .{
- .src_path = path,
- .msg = text,
- .span = .{
- .start = byte_offset,
- .end = byte_offset + @intCast(u32, tree.tokenSlice(lok_token).len),
- .main = byte_offset,
- },
- .line = @intCast(u32, start_loc.line),
- .column = @intCast(u32, start_loc.column) + extra_offset,
- .source_line = source_line,
- .notes = notes_buffer[0..notes_len],
- },
- };
+ file.pkg = try Package.create(gpa, null, path);
+ defer file.pkg.destroy(gpa);
- const ttyconf: std.debug.TTY.Config = switch (color) {
- .auto => std.debug.detectTTYConfig(std.io.getStdErr()),
- .on => .escape_codes,
- .off => .no_color,
- };
+ file.zir = try AstGen.generate(gpa, file.tree);
+ file.zir_loaded = true;
+ defer file.zir.deinit(gpa);
- message.renderToStdErr(ttyconf);
- }
+ try Compilation.addZirErrorMessages(gpa, error_bundle, &file);
}
pub const info_zen =
@@ -5547,8 +5496,6 @@ pub fn cmdAstCheck(
arena: Allocator,
args: []const []const u8,
) !void {
- const Module = @import("Module.zig");
- const AstGen = @import("AstGen.zig");
const Zir = @import("Zir.zig");
var color: Color = .auto;
@@ -5638,7 +5585,7 @@ pub fn cmdAstCheck(
file.tree_loaded = true;
defer file.tree.deinit(gpa);
- try printErrsMsgToStdErr(gpa, arena, file.tree, file.sub_file_path, color);
+ try printAstErrorsToStderr(gpa, file.tree, file.sub_file_path, color);
if (file.tree.errors.len != 0) {
process.exit(1);
}
@@ -5648,16 +5595,16 @@ pub fn cmdAstCheck(
defer file.zir.deinit(gpa);
if (file.zir.hasCompileErrors()) {
- var errors = std.ArrayList(Compilation.AllErrors.Message).init(arena);
- try Compilation.AllErrors.addZir(arena, &errors, &file);
+ var errors: std.zig.ErrorBundle = undefined;
+ try errors.init(gpa);
+ defer errors.deinit(gpa);
+ try Compilation.addZirErrorMessages(gpa, &errors, &file);
const ttyconf: std.debug.TTY.Config = switch (color) {
.auto => std.debug.detectTTYConfig(std.io.getStdErr()),
.on => .escape_codes,
.off => .no_color,
};
- for (errors.items) |full_err_msg| {
- full_err_msg.renderToStdErr(ttyconf);
- }
+ errors.renderToStdErr(ttyconf);
process.exit(1);
}
@@ -5715,8 +5662,6 @@ pub fn cmdChangelist(
arena: Allocator,
args: []const []const u8,
) !void {
- const Module = @import("Module.zig");
- const AstGen = @import("AstGen.zig");
const Zir = @import("Zir.zig");
const old_source_file = args[0];
@@ -5764,7 +5709,7 @@ pub fn cmdChangelist(
file.tree_loaded = true;
defer file.tree.deinit(gpa);
- try printErrsMsgToStdErr(gpa, arena, file.tree, old_source_file, .auto);
+ try printAstErrorsToStderr(gpa, file.tree, old_source_file, .auto);
if (file.tree.errors.len != 0) {
process.exit(1);
}
@@ -5774,12 +5719,12 @@ pub fn cmdChangelist(
defer file.zir.deinit(gpa);
if (file.zir.hasCompileErrors()) {
- var errors = std.ArrayList(Compilation.AllErrors.Message).init(arena);
- try Compilation.AllErrors.addZir(arena, &errors, &file);
+ var errors: std.zig.ErrorBundle = undefined;
+ try errors.init(gpa);
+ defer errors.deinit(gpa);
+ try Compilation.addZirErrorMessages(gpa, &errors, &file);
const ttyconf = std.debug.detectTTYConfig(std.io.getStdErr());
- for (errors.items) |full_err_msg| {
- full_err_msg.renderToStdErr(ttyconf);
- }
+ errors.renderToStdErr(ttyconf);
process.exit(1);
}
@@ -5801,7 +5746,7 @@ pub fn cmdChangelist(
var new_tree = try Ast.parse(gpa, new_source, .zig);
defer new_tree.deinit(gpa);
- try printErrsMsgToStdErr(gpa, arena, new_tree, new_source_file, .auto);
+ try printAstErrorsToStderr(gpa, new_tree, new_source_file, .auto);
if (new_tree.errors.len != 0) {
process.exit(1);
}
@@ -5813,12 +5758,12 @@ pub fn cmdChangelist(
file.zir_loaded = true;
if (file.zir.hasCompileErrors()) {
- var errors = std.ArrayList(Compilation.AllErrors.Message).init(arena);
- try Compilation.AllErrors.addZir(arena, &errors, &file);
+ var errors: std.zig.ErrorBundle = undefined;
+ try errors.init(gpa);
+ defer errors.deinit(gpa);
+ try Compilation.addZirErrorMessages(gpa, &errors, &file);
const ttyconf = std.debug.detectTTYConfig(std.io.getStdErr());
- for (errors.items) |full_err_msg| {
- full_err_msg.renderToStdErr(ttyconf);
- }
+ errors.renderToStdErr(ttyconf);
process.exit(1);
}
src/mingw.zig
@@ -41,7 +41,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
//"-D_UNICODE",
//"-DWPRFLAG=1",
});
- return comp.build_crt_file("crt2", .Obj, &[1]Compilation.CSourceFile{
+ return comp.build_crt_file("crt2", .Obj, .@"mingw-w64 crt2.o", &[1]Compilation.CSourceFile{
.{
.src_path = try comp.zig_lib_directory.join(arena, &[_][]const u8{
"libc", "mingw", "crt", "crtexe.c",
@@ -60,7 +60,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
"-U__CRTDLL__",
"-D__MSVCRT__",
});
- return comp.build_crt_file("dllcrt2", .Obj, &[1]Compilation.CSourceFile{
+ return comp.build_crt_file("dllcrt2", .Obj, .@"mingw-w64 dllcrt2.o", &[1]Compilation.CSourceFile{
.{
.src_path = try comp.zig_lib_directory.join(arena, &[_][]const u8{
"libc", "mingw", "crt", "crtdll.c",
@@ -100,7 +100,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
.extra_flags = args.items,
};
}
- return comp.build_crt_file("mingw32", .Lib, &c_source_files);
+ return comp.build_crt_file("mingw32", .Lib, .@"mingw-w64 mingw32.lib", &c_source_files);
},
.msvcrt_os_lib => {
@@ -148,7 +148,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
};
}
}
- return comp.build_crt_file("msvcrt-os", .Lib, c_source_files.items);
+ return comp.build_crt_file("msvcrt-os", .Lib, .@"mingw-w64 msvcrt-os.lib", c_source_files.items);
},
.mingwex_lib => {
@@ -211,7 +211,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
} else {
@panic("unsupported arch");
}
- return comp.build_crt_file("mingwex", .Lib, c_source_files.items);
+ return comp.build_crt_file("mingwex", .Lib, .@"mingw-w64 mingwex.lib", c_source_files.items);
},
.uuid_lib => {
@@ -244,7 +244,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
.extra_flags = extra_flags,
};
}
- return comp.build_crt_file("uuid", .Lib, &c_source_files);
+ return comp.build_crt_file("uuid", .Lib, .@"mingw-w64 uuid.lib", &c_source_files);
},
}
}
src/Module.zig
@@ -3756,67 +3756,9 @@ pub fn astGenFile(mod: *Module, file: *File) !void {
file.source_loaded = true;
file.tree = try Ast.parse(gpa, source, .zig);
- defer if (!file.tree_loaded) file.tree.deinit(gpa);
-
- if (file.tree.errors.len != 0) {
- const parse_err = file.tree.errors[0];
-
- var msg = std.ArrayList(u8).init(gpa);
- defer msg.deinit();
-
- const token_starts = file.tree.tokens.items(.start);
- const token_tags = file.tree.tokens.items(.tag);
-
- const extra_offset = file.tree.errorOffset(parse_err);
- try file.tree.renderError(parse_err, msg.writer());
- const err_msg = try gpa.create(ErrorMsg);
- err_msg.* = .{
- .src_loc = .{
- .file_scope = file,
- .parent_decl_node = 0,
- .lazy = if (extra_offset == 0) .{
- .token_abs = parse_err.token,
- } else .{
- .byte_abs = token_starts[parse_err.token] + extra_offset,
- },
- },
- .msg = try msg.toOwnedSlice(),
- };
- if (token_tags[parse_err.token + @boolToInt(parse_err.token_is_prev)] == .invalid) {
- const bad_off = @intCast(u32, file.tree.tokenSlice(parse_err.token + @boolToInt(parse_err.token_is_prev)).len);
- const byte_abs = token_starts[parse_err.token + @boolToInt(parse_err.token_is_prev)] + bad_off;
- try mod.errNoteNonLazy(.{
- .file_scope = file,
- .parent_decl_node = 0,
- .lazy = .{ .byte_abs = byte_abs },
- }, err_msg, "invalid byte: '{'}'", .{std.zig.fmtEscapes(source[byte_abs..][0..1])});
- }
-
- for (file.tree.errors[1..]) |note| {
- if (!note.is_note) break;
-
- try file.tree.renderError(note, msg.writer());
- err_msg.notes = try mod.gpa.realloc(err_msg.notes, err_msg.notes.len + 1);
- err_msg.notes[err_msg.notes.len - 1] = .{
- .src_loc = .{
- .file_scope = file,
- .parent_decl_node = 0,
- .lazy = .{ .token_abs = note.token },
- },
- .msg = try msg.toOwnedSlice(),
- };
- }
-
- {
- comp.mutex.lock();
- defer comp.mutex.unlock();
- try mod.failed_files.putNoClobber(gpa, file, err_msg);
- }
- file.status = .parse_failure;
- return error.AnalysisFail;
- }
file.tree_loaded = true;
+ // Any potential AST errors are converted to ZIR errors here.
file.zir = try AstGen.generate(gpa, file.tree);
file.zir_loaded = true;
file.status = .success_zir;
src/musl.zig
@@ -33,7 +33,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
try args.appendSlice(&[_][]const u8{
"-Qunused-arguments",
});
- return comp.build_crt_file("crti", .Obj, &[1]Compilation.CSourceFile{
+ return comp.build_crt_file("crti", .Obj, .@"musl crti.o", &[1]Compilation.CSourceFile{
.{
.src_path = try start_asm_path(comp, arena, "crti.s"),
.extra_flags = args.items,
@@ -46,7 +46,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
try args.appendSlice(&[_][]const u8{
"-Qunused-arguments",
});
- return comp.build_crt_file("crtn", .Obj, &[1]Compilation.CSourceFile{
+ return comp.build_crt_file("crtn", .Obj, .@"musl crtn.o", &[1]Compilation.CSourceFile{
.{
.src_path = try start_asm_path(comp, arena, "crtn.s"),
.extra_flags = args.items,
@@ -60,7 +60,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
"-fno-stack-protector",
"-DCRT",
});
- return comp.build_crt_file("crt1", .Obj, &[1]Compilation.CSourceFile{
+ return comp.build_crt_file("crt1", .Obj, .@"musl crt1.o", &[1]Compilation.CSourceFile{
.{
.src_path = try comp.zig_lib_directory.join(arena, &[_][]const u8{
"libc", "musl", "crt", "crt1.c",
@@ -77,7 +77,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
"-fno-stack-protector",
"-DCRT",
});
- return comp.build_crt_file("rcrt1", .Obj, &[1]Compilation.CSourceFile{
+ return comp.build_crt_file("rcrt1", .Obj, .@"musl rcrt1.o", &[1]Compilation.CSourceFile{
.{
.src_path = try comp.zig_lib_directory.join(arena, &[_][]const u8{
"libc", "musl", "crt", "rcrt1.c",
@@ -94,7 +94,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
"-fno-stack-protector",
"-DCRT",
});
- return comp.build_crt_file("Scrt1", .Obj, &[1]Compilation.CSourceFile{
+ return comp.build_crt_file("Scrt1", .Obj, .@"musl Scrt1.o", &[1]Compilation.CSourceFile{
.{
.src_path = try comp.zig_lib_directory.join(arena, &[_][]const u8{
"libc", "musl", "crt", "Scrt1.c",
@@ -187,7 +187,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
.extra_flags = args.items,
};
}
- return comp.build_crt_file("c", .Lib, c_source_files.items);
+ return comp.build_crt_file("c", .Lib, .@"musl libc.a", c_source_files.items);
},
.libc_so => {
const target = comp.getTarget();
@@ -241,7 +241,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
});
defer sub_compilation.destroy();
- try sub_compilation.updateSubCompilation();
+ try comp.updateSubCompilation(sub_compilation, .@"musl libc.so");
try comp.crt_files.ensureUnusedCapacity(comp.gpa, 1);
src/Package.zig
@@ -225,7 +225,7 @@ pub fn fetchAndAddDependencies(
dependencies_source: *std.ArrayList(u8),
build_roots_source: *std.ArrayList(u8),
name_prefix: []const u8,
- color: main.Color,
+ error_bundle: *std.zig.ErrorBundle,
all_modules: *AllModules,
) !void {
const max_bytes = 10 * 1024 * 1024;
@@ -250,7 +250,7 @@ pub fn fetchAndAddDependencies(
if (ast.errors.len > 0) {
const file_path = try directory.join(arena, &.{Manifest.basename});
- try main.printErrsMsgToStdErr(gpa, arena, ast, file_path, color);
+ try main.putAstErrorsIntoBundle(gpa, ast, file_path, error_bundle);
return error.PackageFetchFailed;
}
@@ -258,23 +258,18 @@ pub fn fetchAndAddDependencies(
defer manifest.deinit(gpa);
if (manifest.errors.len > 0) {
- const ttyconf: std.debug.TTY.Config = switch (color) {
- .auto => std.debug.detectTTYConfig(std.io.getStdErr()),
- .on => .escape_codes,
- .off => .no_color,
- };
const file_path = try directory.join(arena, &.{Manifest.basename});
for (manifest.errors) |msg| {
- Report.renderErrorMessage(ast, file_path, ttyconf, msg, &.{});
+ try Report.addErrorMessage(gpa, ast, file_path, error_bundle, 0, msg);
}
return error.PackageFetchFailed;
}
const report: Report = .{
+ .gpa = gpa,
.ast = &ast,
.directory = directory,
- .color = color,
- .arena = arena,
+ .error_bundle = error_bundle,
};
var any_error = false;
@@ -307,7 +302,7 @@ pub fn fetchAndAddDependencies(
dependencies_source,
build_roots_source,
sub_prefix,
- color,
+ error_bundle,
all_modules,
);
@@ -348,10 +343,10 @@ pub fn createFilePkg(
}
const Report = struct {
+ gpa: Allocator,
ast: *const std.zig.Ast,
directory: Compilation.Directory,
- color: main.Color,
- arena: Allocator,
+ error_bundle: *std.zig.ErrorBundle,
fn fail(
report: Report,
@@ -359,52 +354,48 @@ const Report = struct {
comptime fmt_string: []const u8,
fmt_args: anytype,
) error{ PackageFetchFailed, OutOfMemory } {
- return failWithNotes(report, &.{}, tok, fmt_string, fmt_args);
- }
+ const gpa = report.gpa;
- fn failWithNotes(
- report: Report,
- notes: []const Compilation.AllErrors.Message,
- tok: std.zig.Ast.TokenIndex,
- comptime fmt_string: []const u8,
- fmt_args: anytype,
- ) error{ PackageFetchFailed, OutOfMemory } {
- const ttyconf: std.debug.TTY.Config = switch (report.color) {
- .auto => std.debug.detectTTYConfig(std.io.getStdErr()),
- .on => .escape_codes,
- .off => .no_color,
- };
- const file_path = try report.directory.join(report.arena, &.{Manifest.basename});
- renderErrorMessage(report.ast.*, file_path, ttyconf, .{
+ const file_path = try report.directory.join(gpa, &.{Manifest.basename});
+ defer gpa.free(file_path);
+
+ const msg = try std.fmt.allocPrint(gpa, fmt_string, fmt_args);
+ defer gpa.free(msg);
+
+ try addErrorMessage(report.gpa, report.ast.*, file_path, report.error_bundle, 0, .{
.tok = tok,
.off = 0,
- .msg = try std.fmt.allocPrint(report.arena, fmt_string, fmt_args),
- }, notes);
+ .msg = msg,
+ });
+
return error.PackageFetchFailed;
}
- fn renderErrorMessage(
+ fn addErrorMessage(
+ gpa: Allocator,
ast: std.zig.Ast,
file_path: []const u8,
- ttyconf: std.debug.TTY.Config,
+ eb: *std.zig.ErrorBundle,
+ notes_len: u32,
msg: Manifest.ErrorMessage,
- notes: []const Compilation.AllErrors.Message,
- ) void {
+ ) error{OutOfMemory}!void {
const token_starts = ast.tokens.items(.start);
const start_loc = ast.tokenLocation(0, msg.tok);
- Compilation.AllErrors.Message.renderToStdErr(.{ .src = .{
- .msg = msg.msg,
- .src_path = file_path,
- .line = @intCast(u32, start_loc.line),
- .column = @intCast(u32, start_loc.column),
- .span = .{
- .start = token_starts[msg.tok],
- .end = @intCast(u32, token_starts[msg.tok] + ast.tokenSlice(msg.tok).len),
- .main = token_starts[msg.tok] + msg.off,
- },
- .source_line = ast.source[start_loc.line_start..start_loc.line_end],
- .notes = notes,
- } }, ttyconf);
+
+ try eb.addErrorMessage(gpa, .{
+ .msg = try eb.addString(gpa, msg.msg),
+ .src_loc = try eb.addSourceLocation(gpa, .{
+ .src_path = try eb.addString(gpa, file_path),
+ .span_start = token_starts[msg.tok],
+ .span_end = @intCast(u32, token_starts[msg.tok] + ast.tokenSlice(msg.tok).len),
+ .span_main = token_starts[msg.tok] + msg.off,
+ .line = @intCast(u32, start_loc.line),
+ .column = @intCast(u32, start_loc.column),
+ .source_line = try eb.addString(gpa, ast.source[start_loc.line_start..start_loc.line_end]),
+ }),
+ .notes_len = notes_len,
+ });
+ eb.incrementCount(1);
}
};
@@ -504,9 +495,7 @@ fn fetchAndUnpack(
// by default, so the same logic applies for buffering the reader as for gzip.
try unpackTarball(gpa, &req, tmp_directory.handle, std.compress.xz);
} else {
- return report.fail(dep.url_tok, "unknown file extension for path '{s}'", .{
- uri.path,
- });
+ return report.fail(dep.url_tok, "unknown file extension for path '{s}'", .{uri.path});
}
// TODO: delete files not included in the package prior to computing the package hash.
@@ -533,10 +522,19 @@ fn fetchAndUnpack(
});
}
} else {
- const notes: [1]Compilation.AllErrors.Message = .{.{ .plain = .{
- .msg = try std.fmt.allocPrint(report.arena, "expected .hash = \"{s}\",", .{&actual_hex}),
- } }};
- return report.failWithNotes(¬es, dep.url_tok, "url field is missing corresponding hash field", .{});
+ const file_path = try report.directory.join(gpa, &.{Manifest.basename});
+ defer gpa.free(file_path);
+
+ const eb = report.error_bundle;
+ try Report.addErrorMessage(gpa, report.ast.*, file_path, eb, 1, .{
+ .tok = dep.url_tok,
+ .off = 0,
+ .msg = "url field is missing corresponding hash field",
+ });
+ try eb.addErrorMessage(gpa, .{
+ .msg = try eb.printString(gpa, "expected .hash = \"{s}\",", .{&actual_hex}),
+ });
+ return error.PackageFetchFailed;
}
const build_root = try global_cache_directory.join(gpa, &.{pkg_dir_sub_path});
src/Sema.zig
@@ -2211,29 +2211,26 @@ pub fn fail(
fn failWithOwnedErrorMsg(sema: *Sema, err_msg: *Module.ErrorMsg) CompileError {
@setCold(true);
+ const gpa = sema.gpa;
if (crash_report.is_enabled and sema.mod.comp.debug_compile_errors) {
if (err_msg.src_loc.lazy == .unneeded) return error.NeededSourceLocation;
- var arena = std.heap.ArenaAllocator.init(sema.gpa);
- errdefer arena.deinit();
- var errors = std.ArrayList(Compilation.AllErrors.Message).init(sema.gpa);
- defer errors.deinit();
-
- Compilation.AllErrors.add(sema.mod, &arena, &errors, err_msg.*) catch unreachable;
-
+ var errors: std.zig.ErrorBundle = undefined;
+ errors.init(gpa) catch unreachable;
+ Compilation.addModuleErrorMsg(gpa, &errors, err_msg.*) catch unreachable;
std.debug.print("compile error during Sema:\n", .{});
- Compilation.AllErrors.Message.renderToStdErr(errors.items[0], .no_color);
+ errors.renderToStdErr(.no_color);
crash_report.compilerPanic("unexpected compile error occurred", null, null);
}
const mod = sema.mod;
ref: {
- errdefer err_msg.destroy(mod.gpa);
+ errdefer err_msg.destroy(gpa);
if (err_msg.src_loc.lazy == .unneeded) {
return error.NeededSourceLocation;
}
- try mod.failed_decls.ensureUnusedCapacity(mod.gpa, 1);
- try mod.failed_files.ensureUnusedCapacity(mod.gpa, 1);
+ try mod.failed_decls.ensureUnusedCapacity(gpa, 1);
+ try mod.failed_files.ensureUnusedCapacity(gpa, 1);
const max_references = blk: {
if (sema.mod.comp.reference_trace) |num| break :blk num;
@@ -2243,11 +2240,11 @@ fn failWithOwnedErrorMsg(sema: *Sema, err_msg: *Module.ErrorMsg) CompileError {
};
var referenced_by = if (sema.func) |some| some.owner_decl else sema.owner_decl_index;
- var reference_stack = std.ArrayList(Module.ErrorMsg.Trace).init(sema.gpa);
+ var reference_stack = std.ArrayList(Module.ErrorMsg.Trace).init(gpa);
defer reference_stack.deinit();
// Avoid infinite loops.
- var seen = std.AutoHashMap(Module.Decl.Index, void).init(sema.gpa);
+ var seen = std.AutoHashMap(Module.Decl.Index, void).init(gpa);
defer seen.deinit();
var cur_reference_trace: u32 = 0;
@@ -2288,7 +2285,7 @@ fn failWithOwnedErrorMsg(sema: *Sema, err_msg: *Module.ErrorMsg) CompileError {
if (gop.found_existing) {
// If there are multiple errors for the same Decl, prefer the first one added.
sema.err = null;
- err_msg.destroy(mod.gpa);
+ err_msg.destroy(gpa);
} else {
sema.err = err_msg;
gop.value_ptr.* = err_msg;
src/wasi_libc.zig
@@ -74,7 +74,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
var args = std.ArrayList([]const u8).init(arena);
try addCCArgs(comp, arena, &args, false);
try addLibcBottomHalfIncludes(comp, arena, &args);
- return comp.build_crt_file("crt1-reactor", .Obj, &[1]Compilation.CSourceFile{
+ return comp.build_crt_file("crt1-reactor", .Obj, .@"wasi crt1-reactor.o", &[1]Compilation.CSourceFile{
.{
.src_path = try comp.zig_lib_directory.join(arena, &[_][]const u8{
"libc", try sanitize(arena, crt1_reactor_src_file),
@@ -87,7 +87,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
var args = std.ArrayList([]const u8).init(arena);
try addCCArgs(comp, arena, &args, false);
try addLibcBottomHalfIncludes(comp, arena, &args);
- return comp.build_crt_file("crt1-command", .Obj, &[1]Compilation.CSourceFile{
+ return comp.build_crt_file("crt1-command", .Obj, .@"wasi crt1-command.o", &[1]Compilation.CSourceFile{
.{
.src_path = try comp.zig_lib_directory.join(arena, &[_][]const u8{
"libc", try sanitize(arena, crt1_command_src_file),
@@ -145,7 +145,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
}
}
- try comp.build_crt_file("c", .Lib, libc_sources.items);
+ try comp.build_crt_file("c", .Lib, .@"wasi libc.a", libc_sources.items);
},
.libwasi_emulated_process_clocks_a => {
var args = std.ArrayList([]const u8).init(arena);
@@ -161,7 +161,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
.extra_flags = args.items,
});
}
- try comp.build_crt_file("wasi-emulated-process-clocks", .Lib, emu_clocks_sources.items);
+ try comp.build_crt_file("wasi-emulated-process-clocks", .Lib, .@"libwasi-emulated-process-clocks.a", emu_clocks_sources.items);
},
.libwasi_emulated_getpid_a => {
var args = std.ArrayList([]const u8).init(arena);
@@ -177,7 +177,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
.extra_flags = args.items,
});
}
- try comp.build_crt_file("wasi-emulated-getpid", .Lib, emu_getpid_sources.items);
+ try comp.build_crt_file("wasi-emulated-getpid", .Lib, .@"libwasi-emulated-getpid.a", emu_getpid_sources.items);
},
.libwasi_emulated_mman_a => {
var args = std.ArrayList([]const u8).init(arena);
@@ -193,7 +193,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
.extra_flags = args.items,
});
}
- try comp.build_crt_file("wasi-emulated-mman", .Lib, emu_mman_sources.items);
+ try comp.build_crt_file("wasi-emulated-mman", .Lib, .@"libwasi-emulated-mman.a", emu_mman_sources.items);
},
.libwasi_emulated_signal_a => {
var emu_signal_sources = std.ArrayList(Compilation.CSourceFile).init(arena);
@@ -228,7 +228,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
}
}
- try comp.build_crt_file("wasi-emulated-signal", .Lib, emu_signal_sources.items);
+ try comp.build_crt_file("wasi-emulated-signal", .Lib, .@"libwasi-emulated-signal.a", emu_signal_sources.items);
},
}
}