Commit 12087d4cba
Changed files (3)
test
stage2
src/Compilation.zig
@@ -1346,9 +1346,9 @@ pub fn update(self: *Compilation) !void {
module.generation += 1;
// TODO Detect which source files changed.
- // Until then we simulate a full cache miss. Source files could have been loaded for any reason;
- // to force a refresh we unload now.
- module.root_scope.unload(module.gpa);
+ // Until then we simulate a full cache miss. Source files could have been loaded
+ // for any reason; to force a refresh we unload now.
+ module.unloadFile(module.root_scope);
module.failed_root_src_file = null;
module.analyzeContainer(&module.root_scope.root_container) catch |err| switch (err) {
error.AnalysisFail => {
@@ -1362,7 +1362,7 @@ pub fn update(self: *Compilation) !void {
// TODO only analyze imports if they are still referenced
for (module.import_table.items()) |entry| {
- entry.value.unload(module.gpa);
+ module.unloadFile(entry.value);
module.analyzeContainer(&entry.value.root_container) catch |err| switch (err) {
error.AnalysisFail => {
assert(self.totalErrorCount() != 0);
@@ -1432,11 +1432,25 @@ pub fn totalErrorCount(self: *Compilation) usize {
var total: usize = self.failed_c_objects.items().len;
if (self.bin_file.options.module) |module| {
- total += module.failed_decls.count() +
- module.emit_h_failed_decls.count() +
- module.failed_exports.items().len +
+ total += module.failed_exports.items().len +
module.failed_files.items().len +
@boolToInt(module.failed_root_src_file != null);
+ // Skip errors for Decls within files that failed parsing.
+ // When a parse error is introduced, we keep all the semantic analysis for
+ // the previous parse success, including compile errors, but we cannot
+ // emit them until the file succeeds parsing.
+ for (module.failed_decls.items()) |entry| {
+ if (entry.key.container.file_scope.status == .unloaded_parse_failure) {
+ continue;
+ }
+ total += 1;
+ }
+ for (module.emit_h_failed_decls.items()) |entry| {
+ if (entry.key.container.file_scope.status == .unloaded_parse_failure) {
+ continue;
+ }
+ total += 1;
+ }
}
// The "no entry point found" error only counts if there are no other errors.
@@ -1483,9 +1497,19 @@ pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
try AllErrors.add(module, &arena, &errors, entry.value.*);
}
for (module.failed_decls.items()) |entry| {
+ if (entry.key.container.file_scope.status == .unloaded_parse_failure) {
+ // Skip errors for Decls within files that had a parse failure.
+ // We'll try again once parsing succeeds.
+ continue;
+ }
try AllErrors.add(module, &arena, &errors, entry.value.*);
}
for (module.emit_h_failed_decls.items()) |entry| {
+ if (entry.key.container.file_scope.status == .unloaded_parse_failure) {
+ // Skip errors for Decls within files that had a parse failure.
+ // We'll try again once parsing succeeds.
+ continue;
+ }
try AllErrors.add(module, &arena, &errors, entry.value.*);
}
for (module.failed_exports.items()) |entry| {
src/Module.zig
@@ -65,8 +65,8 @@ emit_h_failed_decls: std.AutoArrayHashMapUnmanaged(*Decl, *ErrorMsg) = .{},
/// Keep track of one `@compileLog` callsite per owner Decl.
compile_log_decls: std.AutoArrayHashMapUnmanaged(*Decl, SrcLoc) = .{},
/// Using a map here for consistency with the other fields here.
-/// The ErrorMsg memory is owned by the `Scope`, using Module's general purpose allocator.
-failed_files: std.AutoArrayHashMapUnmanaged(*Scope, *ErrorMsg) = .{},
+/// The ErrorMsg memory is owned by the `Scope.File`, using Module's general purpose allocator.
+failed_files: std.AutoArrayHashMapUnmanaged(*Scope.File, *ErrorMsg) = .{},
/// Using a map here for consistency with the other fields here.
/// The ErrorMsg memory is owned by the `Export`, using Module's general purpose allocator.
failed_exports: std.AutoArrayHashMapUnmanaged(*Export, *ErrorMsg) = .{},
@@ -732,10 +732,12 @@ pub const Scope = struct {
pub fn unload(file: *File, gpa: *Allocator) void {
switch (file.status) {
- .never_loaded,
.unloaded_parse_failure,
+ .never_loaded,
.unloaded_success,
- => {},
+ => {
+ file.status = .unloaded_success;
+ },
.loaded_success => {
file.tree.deinit(gpa);
@@ -3241,7 +3243,7 @@ pub fn getAstTree(mod: *Module, root_scope: *Scope.File) !*const ast.Tree {
.msg = msg.toOwnedSlice(),
};
- mod.failed_files.putAssumeCapacityNoClobber(&root_scope.base, err_msg);
+ mod.failed_files.putAssumeCapacityNoClobber(root_scope, err_msg);
root_scope.status = .unloaded_parse_failure;
return error.AnalysisFail;
}
@@ -4691,3 +4693,10 @@ pub fn parseStrLit(
},
}
}
+
+pub fn unloadFile(mod: *Module, file_scope: *Scope.File) void {
+ if (file_scope.status == .unloaded_parse_failure) {
+ mod.failed_files.swapRemove(file_scope).?.value.destroy(mod.gpa);
+ }
+ file_scope.unload(mod.gpa);
+}
test/stage2/cbe.zig
@@ -600,6 +600,7 @@ pub fn addCases(ctx: *TestContext) !void {
, "");
// Specifying alignment is a parse error.
+ // This also tests going from a successful build to a parse error.
case.addError(
\\const E1 = enum {
\\ a,
@@ -612,6 +613,24 @@ pub fn addCases(ctx: *TestContext) !void {
, &.{
":3:7: error: expected ',', found 'align'",
});
+
+ // Redundant non-exhaustive enum mark.
+ // This also tests going from a parse error to an AstGen error.
+ case.addError(
+ \\const E1 = enum {
+ \\ a,
+ \\ _,
+ \\ b,
+ \\ c,
+ \\ _,
+ \\};
+ \\export fn foo() void {
+ \\ const x = E1.a;
+ \\}
+ , &.{
+ ":6:5: error: redundant non-exhaustive enum mark",
+ ":3:5: note: other mark here",
+ });
}
ctx.c("empty start function", linux_x64,