Commit d91744401f
Changed files (2)
lib
std
src
Package
Fetch
lib/std/Io/Writer.zig
@@ -2418,7 +2418,7 @@ pub fn Hashing(comptime Hasher: type) type {
n += slice.len;
}
for (0..splat) |_| hasher.update(data[data.len - 1]);
- return n + splat;
+ return n + splat * data[data.len - 1].len;
}
};
}
src/Package/Fetch/git.zig
@@ -1500,13 +1500,11 @@ fn readObjectRaw(allocator: Allocator, reader: *std.Io.Reader, size: u64) ![]u8
return aw.toOwnedSlice();
}
-/// Expands delta data from `delta_reader` to `writer`. `base_object` must
-/// support `reader` and `seekTo` (such as a `std.io.FixedBufferStream`).
+/// Expands delta data from `delta_reader` to `writer`.
///
/// The format of the delta data is documented in
/// [pack-format](https://git-scm.com/docs/pack-format).
fn expandDelta(base_object: []const u8, delta_reader: *std.Io.Reader, writer: *std.Io.Writer) !void {
- var base_offset: u32 = 0;
while (true) {
const inst: packed struct { value: u7, copy: bool } = @bitCast(delta_reader.takeByte() catch |e| switch (e) {
error.EndOfStream => return,
@@ -1528,7 +1526,7 @@ fn expandDelta(base_object: []const u8, delta_reader: *std.Io.Reader, writer: *s
.offset3 = if (available.offset3) try delta_reader.takeByte() else 0,
.offset4 = if (available.offset4) try delta_reader.takeByte() else 0,
};
- base_offset = @bitCast(offset_parts);
+ const base_offset: u32 = @bitCast(offset_parts);
const size_parts: packed struct { size1: u8, size2: u8, size3: u8 } = .{
.size1 = if (available.size1) try delta_reader.takeByte() else 0,
.size2 = if (available.size2) try delta_reader.takeByte() else 0,
@@ -1537,7 +1535,6 @@ fn expandDelta(base_object: []const u8, delta_reader: *std.Io.Reader, writer: *s
var size: u24 = @bitCast(size_parts);
if (size == 0) size = 0x10000;
try writer.writeAll(base_object[base_offset..][0..size]);
- base_offset += size;
} else if (inst.value != 0) {
try delta_reader.streamExact(writer, inst.value);
} else {
@@ -1582,13 +1579,15 @@ fn runRepositoryTest(comptime format: Oid.Format, head_commit: []const u8) !void
// (all files in the test repo are known to be smaller than this)
const max_file_size = 8192;
- const index_file_data = try git_dir.dir.readFileAlloc(testing.allocator, "testrepo.idx", max_file_size);
- defer testing.allocator.free(index_file_data);
- // testrepo.idx is generated by Git. The index created by this file should
- // match it exactly. Running `git verify-pack -v testrepo.pack` can verify
- // this.
- const testrepo_idx = @embedFile("git/testdata/testrepo-" ++ @tagName(format) ++ ".idx");
- try testing.expectEqualSlices(u8, testrepo_idx, index_file_data);
+ if (!skip_checksums) {
+ const index_file_data = try git_dir.dir.readFileAlloc(testing.allocator, "testrepo.idx", max_file_size);
+ defer testing.allocator.free(index_file_data);
+ // testrepo.idx is generated by Git. The index created by this file should
+ // match it exactly. Running `git verify-pack -v testrepo.pack` can verify
+ // this.
+ const testrepo_idx = @embedFile("git/testdata/testrepo-" ++ @tagName(format) ++ ".idx");
+ try testing.expectEqualSlices(u8, testrepo_idx, index_file_data);
+ }
var index_file_reader = index_file.reader(&index_file_buffer);
var repository: Repository = undefined;
@@ -1669,12 +1668,10 @@ fn runRepositoryTest(comptime format: Oid.Format, head_commit: []const u8) !void
const skip_checksums = true;
test "SHA-1 packfile indexing and checkout" {
- if (skip_checksums) return error.SkipZigTest;
try runRepositoryTest(.sha1, "dd582c0720819ab7130b103635bd7271b9fd4feb");
}
test "SHA-256 packfile indexing and checkout" {
- if (skip_checksums) return error.SkipZigTest;
try runRepositoryTest(.sha256, "7f444a92bd4572ee4a28b2c63059924a9ca1829138553ef3e7c41ee159afae7a");
}