Commit ba656e5c9f
src/Package/Fetch.zig
@@ -81,6 +81,10 @@ pub const JobQueue = struct {
wait_group: WaitGroup = .{},
global_cache: Cache.Directory,
recursive: bool,
+ /// Dumps hash information to stdout which can be used to troubleshoot why
+ /// two hashes of the same package do not match.
+ /// If this is true, `recursive` must be false.
+ debug_hash: bool,
work_around_btrfs_bug: bool,
pub const Table = std.AutoArrayHashMapUnmanaged(Manifest.MultiHashHexDigest, *Fetch);
@@ -1315,7 +1319,7 @@ fn computeHash(
const kind: HashedFile.Kind = switch (entry.kind) {
.directory => unreachable,
.file => .file,
- .sym_link => .sym_link,
+ .sym_link => .link,
else => return f.fail(f.location_tok, try eb.printString(
"package contains '{s}' which has illegal file type '{s}'",
.{ entry.path, @tagName(entry.kind) },
@@ -1399,9 +1403,36 @@ fn computeHash(
}
if (any_failures) return error.FetchFailed;
+
+ if (f.job_queue.debug_hash) {
+ assert(!f.job_queue.recursive);
+ // Print something to stdout that can be text diffed to figure out why
+ // the package hash is different.
+ dumpHashInfo(all_files.items) catch |err| {
+ std.debug.print("unable to write to stdout: {s}\n", .{@errorName(err)});
+ std.process.exit(1);
+ };
+ }
+
return hasher.finalResult();
}
+fn dumpHashInfo(all_files: []const *const HashedFile) !void {
+ const stdout = std.io.getStdOut();
+ var bw = std.io.bufferedWriter(stdout.writer());
+ const w = bw.writer();
+
+ for (all_files) |hashed_file| {
+ try w.print("{s}: {s}: {s}\n", .{
+ @tagName(hashed_file.kind),
+ std.fmt.fmtSliceHexLower(&hashed_file.hash),
+ hashed_file.normalized_path,
+ });
+ }
+
+ try bw.flush();
+}
+
fn workerHashFile(dir: fs.Dir, hashed_file: *HashedFile, wg: *WaitGroup) void {
defer wg.finish();
hashed_file.failure = hashFileFallible(dir, hashed_file);
@@ -1427,7 +1458,7 @@ fn hashFileFallible(dir: fs.Dir, hashed_file: *HashedFile) HashedFile.Error!void
hasher.update(buf[0..bytes_read]);
}
},
- .sym_link => {
+ .link => {
const link_name = try dir.readLink(hashed_file.fs_path, &buf);
if (fs.path.sep != canonical_sep) {
// Package hashes are intended to be consistent across
@@ -1480,7 +1511,7 @@ const HashedFile = struct {
fs.File.StatError ||
fs.Dir.ReadLinkError;
- const Kind = enum { file, sym_link };
+ const Kind = enum { file, link };
fn lessThan(context: void, lhs: *const HashedFile, rhs: *const HashedFile) bool {
_ = context;
src/main.zig
@@ -5143,6 +5143,7 @@ pub fn cmdBuild(gpa: Allocator, arena: Allocator, args: []const []const u8) !voi
.thread_pool = &thread_pool,
.global_cache = global_cache_directory,
.recursive = true,
+ .debug_hash = false,
.work_around_btrfs_bug = work_around_btrfs_bug,
};
defer job_queue.deinit();
@@ -6991,6 +6992,7 @@ pub const usage_fetch =
\\Options:
\\ -h, --help Print this help and exit
\\ --global-cache-dir [path] Override path to global Zig cache directory
+ \\ --debug-hash Print verbose hash information to stdout
\\
;
@@ -7004,6 +7006,7 @@ fn cmdFetch(
std.process.hasEnvVarConstant("ZIG_BTRFS_WORKAROUND");
var opt_path_or_url: ?[]const u8 = null;
var override_global_cache_dir: ?[]const u8 = try optionalStringEnvVar(arena, "ZIG_GLOBAL_CACHE_DIR");
+ var debug_hash: bool = false;
{
var i: usize = 0;
@@ -7019,6 +7022,9 @@ fn cmdFetch(
i += 1;
override_global_cache_dir = args[i];
continue;
+ } else if (mem.eql(u8, arg, "--debug-hash")) {
+ debug_hash = true;
+ continue;
} else {
fatal("unrecognized parameter: '{s}'", .{arg});
}
@@ -7057,6 +7063,7 @@ fn cmdFetch(
.thread_pool = &thread_pool,
.global_cache = global_cache_directory,
.recursive = false,
+ .debug_hash = debug_hash,
.work_around_btrfs_bug = work_around_btrfs_bug,
};
defer job_queue.deinit();