Commit 9adcc31ca3
Changed files (20)
lib
compiler
resinator
test
standalone
lib/compiler/resinator/main.zig
@@ -164,13 +164,14 @@ pub fn main() !void {
} else {
switch (options.input_source) {
.stdio => |file| {
- break :full_input file.readToEndAlloc(allocator, std.math.maxInt(usize)) catch |err| {
+ var file_reader = file.reader(&.{});
+ break :full_input file_reader.interface.allocRemaining(allocator, .unlimited) catch |err| {
try error_handler.emitMessage(allocator, .err, "unable to read input from stdin: {s}", .{@errorName(err)});
std.process.exit(1);
};
},
.filename => |input_filename| {
- break :full_input std.fs.cwd().readFileAlloc(allocator, input_filename, std.math.maxInt(usize)) catch |err| {
+ break :full_input std.fs.cwd().readFileAlloc(input_filename, allocator, .unlimited) catch |err| {
try error_handler.emitMessage(allocator, .err, "unable to read input file path '{s}': {s}", .{ input_filename, @errorName(err) });
std.process.exit(1);
};
@@ -462,7 +463,10 @@ const IoStream = struct {
pub fn readAll(self: Source, allocator: std.mem.Allocator) !Data {
return switch (self) {
inline .file, .stdio => |file| .{
- .bytes = try file.readToEndAlloc(allocator, std.math.maxInt(usize)),
+ .bytes = b: {
+ var file_reader = file.reader(&.{});
+ break :b try file_reader.interface.allocRemaining(allocator, .unlimited);
+ },
.needs_free = true,
},
.memory => |list| .{ .bytes = list.items, .needs_free = false },
test/src/check-stack-trace.zig
@@ -13,7 +13,7 @@ pub fn main() !void {
const input_path = args[1];
const optimize_mode_text = args[2];
- const input_bytes = try std.fs.cwd().readFileAlloc(arena, input_path, 5 * 1024 * 1024);
+ const input_bytes = try std.fs.cwd().readFileAlloc(input_path, arena, .limited(5 * 1024 * 1024));
const optimize_mode = std.meta.stringToEnum(std.builtin.OptimizeMode, optimize_mode_text).?;
var stderr = input_bytes;
test/standalone/child_process/main.zig
@@ -32,7 +32,8 @@ pub fn main() !void {
const hello_stdout = "hello from stdout";
var buf: [hello_stdout.len]u8 = undefined;
- const n = try child.stdout.?.deprecatedReader().readAll(&buf);
+ var stdout_reader = child.stdout.?.reader(&.{});
+ const n = try stdout_reader.interface.readSliceShort(&buf);
if (!std.mem.eql(u8, buf[0..n], hello_stdout)) {
testError("child stdout: '{s}'; want '{s}'", .{ buf[0..n], hello_stdout });
}
test/standalone/cmakedefine/check.zig
@@ -9,8 +9,8 @@ pub fn main() !void {
const actual_path = args[1];
const expected_path = args[2];
- const actual = try std.fs.cwd().readFileAlloc(arena, actual_path, 1024 * 1024);
- const expected = try std.fs.cwd().readFileAlloc(arena, expected_path, 1024 * 1024);
+ const actual = try std.fs.cwd().readFileAlloc(actual_path, arena, .limited(1024 * 1024));
+ const expected = try std.fs.cwd().readFileAlloc(expected_path, arena, .limited(1024 * 1024));
// The actual output starts with a comment which we should strip out before comparing.
const comment_str = "/* This file was generated by ConfigHeader using the Zig Build System. */\n";
test/standalone/entry_point/check_differ.zig
@@ -6,8 +6,8 @@ pub fn main() !void {
const args = try std.process.argsAlloc(arena);
if (args.len != 3) return error.BadUsage; // usage: 'check_differ <path a> <path b>'
- const contents_1 = try std.fs.cwd().readFileAlloc(arena, args[1], 1024 * 1024 * 64); // 64 MiB ought to be plenty
- const contents_2 = try std.fs.cwd().readFileAlloc(arena, args[2], 1024 * 1024 * 64); // 64 MiB ought to be plenty
+ const contents_1 = try std.fs.cwd().readFileAlloc(args[1], arena, .limited(1024 * 1024 * 64)); // 64 MiB ought to be plenty
+ const contents_2 = try std.fs.cwd().readFileAlloc(args[2], arena, .limited(1024 * 1024 * 64)); // 64 MiB ought to be plenty
if (std.mem.eql(u8, contents_1, contents_2)) {
return error.FilesMatch;
tools/docgen.zig
@@ -77,7 +77,8 @@ pub fn main() !void {
var code_dir = try fs.cwd().openDir(code_dir_path, .{});
defer code_dir.close();
- const input_file_bytes = try in_file.deprecatedReader().readAllAlloc(arena, max_doc_file_size);
+ var in_file_reader = in_file.reader(&.{});
+ const input_file_bytes = try in_file_reader.interface.allocRemaining(arena, .limited(max_doc_file_size));
var tokenizer = Tokenizer.init(input_path, input_file_bytes);
var toc = try genToc(arena, &tokenizer);
@@ -1039,10 +1040,8 @@ fn genHtml(
});
defer allocator.free(out_basename);
- const contents = code_dir.readFileAlloc(allocator, out_basename, std.math.maxInt(u32)) catch |err| {
- return parseError(tokenizer, code.token, "unable to open '{s}': {s}", .{
- out_basename, @errorName(err),
- });
+ const contents = code_dir.readFileAlloc(out_basename, allocator, .limited(std.math.maxInt(u32))) catch |err| {
+ return parseError(tokenizer, code.token, "unable to open '{s}': {t}", .{ out_basename, err });
};
defer allocator.free(contents);
tools/doctest.zig
@@ -70,7 +70,7 @@ pub fn main() !void {
const zig_path = opt_zig orelse fatal("missing zig compiler path (--zig)", .{});
const cache_root = opt_cache_root orelse fatal("missing cache root path (--cache-root)", .{});
- const source_bytes = try fs.cwd().readFileAlloc(arena, input_path, std.math.maxInt(u32));
+ const source_bytes = try fs.cwd().readFileAlloc(input_path, arena, .limited(std.math.maxInt(u32)));
const code = try parseManifest(arena, source_bytes);
const source = stripManifest(source_bytes);
tools/dump-cov.zig
@@ -38,10 +38,9 @@ pub fn main() !void {
defer debug_info.deinit(gpa);
const cov_bytes = cov_path.root_dir.handle.readFileAllocOptions(
- arena,
cov_path.sub_path,
- 1 << 30,
- null,
+ arena,
+ .limited(1 << 30),
.of(SeenPcsHeader),
null,
) catch |err| {
tools/fetch_them_macos_headers.zig
@@ -1,6 +1,5 @@
const std = @import("std");
const fs = std.fs;
-const io = std.io;
const mem = std.mem;
const process = std.process;
const assert = std.debug.assert;
@@ -93,7 +92,7 @@ pub fn main() anyerror!void {
var sdk_dir = try std.fs.cwd().openDir(sysroot_path, .{});
defer sdk_dir.close();
- const sdk_info = try sdk_dir.readFileAlloc(allocator, "SDKSettings.json", std.math.maxInt(u32));
+ const sdk_info = try sdk_dir.readFileAlloc("SDKSettings.json", allocator, .limited(std.math.maxInt(u32)));
const parsed_json = try std.json.parseFromSlice(struct {
DefaultProperties: struct { MACOSX_DEPLOYMENT_TARGET: []const u8 },
@@ -198,7 +197,8 @@ fn fetchTarget(
var dirs = std.StringHashMap(fs.Dir).init(arena);
try dirs.putNoClobber(".", dest_dir);
- const headers_list_str = try headers_list_file.deprecatedReader().readAllAlloc(arena, std.math.maxInt(usize));
+ var headers_list_file_reader = headers_list_file.reader(&.{});
+ const headers_list_str = try headers_list_file_reader.interface.allocRemaining(arena, .unlimited);
const prefix = "/usr/include";
var it = mem.splitScalar(u8, headers_list_str, '\n');
tools/gen_spirv_spec.zig
@@ -136,7 +136,7 @@ fn readExtRegistry(exts: *std.array_list.Managed(Extension), dir: std.fs.Dir, su
}
fn readRegistry(comptime RegistryType: type, dir: std.fs.Dir, path: []const u8) !RegistryType {
- const spec = try dir.readFileAlloc(allocator, path, std.math.maxInt(usize));
+ const spec = try dir.readFileAlloc(path, allocator, .unlimited);
// Required for json parsing.
// TODO: ALI
@setEvalBranchQuota(10000);
@@ -189,7 +189,7 @@ fn tagPriorityScore(tag: []const u8) usize {
}
fn render(
- writer: *std.io.Writer,
+ writer: *std.Io.Writer,
registry: CoreRegistry,
extensions: []const Extension,
) !void {
@@ -214,7 +214,7 @@ fn render(
\\ none,
\\ _,
\\
- \\ pub fn format(self: Id, writer: *std.io.Writer) std.io.Writer.Error!void {
+ \\ pub fn format(self: Id, writer: *std.Io.Writer) std.Io.Writer.Error!void {
\\ switch (self) {
\\ .none => try writer.writeAll("(none)"),
\\ else => try writer.print("%{d}", .{@intFromEnum(self)}),
@@ -327,7 +327,7 @@ fn render(
}
fn renderInstructionSet(
- writer: *std.io.Writer,
+ writer: *std.Io.Writer,
core: CoreRegistry,
extensions: []const Extension,
all_operand_kinds: OperandKindMap,
@@ -362,7 +362,7 @@ fn renderInstructionSet(
}
fn renderInstructionsCase(
- writer: *std.io.Writer,
+ writer: *std.Io.Writer,
set_name: []const u8,
instructions: []const Instruction,
all_operand_kinds: OperandKindMap,
@@ -409,7 +409,7 @@ fn renderInstructionsCase(
);
}
-fn renderClass(writer: *std.io.Writer, instructions: []const Instruction) !void {
+fn renderClass(writer: *std.Io.Writer, instructions: []const Instruction) !void {
var class_map = std.StringArrayHashMap(void).init(allocator);
for (instructions) |inst| {
@@ -427,7 +427,7 @@ fn renderClass(writer: *std.io.Writer, instructions: []const Instruction) !void
const Formatter = struct {
data: []const u8,
- fn format(f: Formatter, writer: *std.Io.Writer) std.io.Writer.Error!void {
+ fn format(f: Formatter, writer: *std.Io.Writer) std.Io.Writer.Error!void {
var id_buf: [128]u8 = undefined;
var fw: std.Io.Writer = .fixed(&id_buf);
for (f.data, 0..) |c, i| {
@@ -457,7 +457,7 @@ fn formatId(identifier: []const u8) std.fmt.Alt(Formatter, Formatter.format) {
return .{ .data = .{ .data = identifier } };
}
-fn renderOperandKind(writer: *std.io.Writer, operands: []const OperandKind) !void {
+fn renderOperandKind(writer: *std.Io.Writer, operands: []const OperandKind) !void {
try writer.writeAll(
\\pub const OperandKind = enum {
\\ opcode,
@@ -513,7 +513,7 @@ fn renderOperandKind(writer: *std.io.Writer, operands: []const OperandKind) !voi
try writer.writeAll("};\n}\n};\n");
}
-fn renderEnumerant(writer: *std.io.Writer, enumerant: Enumerant) !void {
+fn renderEnumerant(writer: *std.Io.Writer, enumerant: Enumerant) !void {
try writer.print(".{{.name = \"{s}\", .value = ", .{enumerant.enumerant});
switch (enumerant.value) {
.bitflag => |flag| try writer.writeAll(flag),
@@ -530,7 +530,7 @@ fn renderEnumerant(writer: *std.io.Writer, enumerant: Enumerant) !void {
}
fn renderOpcodes(
- writer: *std.io.Writer,
+ writer: *std.Io.Writer,
opcode_type_name: []const u8,
want_operands: bool,
instructions: []const Instruction,
@@ -629,7 +629,7 @@ fn renderOpcodes(
}
fn renderOperandKinds(
- writer: *std.io.Writer,
+ writer: *std.Io.Writer,
kinds: []const OperandKind,
extended_structs: ExtendedStructSet,
) !void {
@@ -643,7 +643,7 @@ fn renderOperandKinds(
}
fn renderValueEnum(
- writer: *std.io.Writer,
+ writer: *std.Io.Writer,
enumeration: OperandKind,
extended_structs: ExtendedStructSet,
) !void {
@@ -721,7 +721,7 @@ fn renderValueEnum(
}
fn renderBitEnum(
- writer: *std.io.Writer,
+ writer: *std.Io.Writer,
enumeration: OperandKind,
extended_structs: ExtendedStructSet,
) !void {
@@ -804,7 +804,7 @@ fn renderBitEnum(
}
fn renderOperand(
- writer: *std.io.Writer,
+ writer: *std.Io.Writer,
kind: enum {
@"union",
instruction,
@@ -888,7 +888,7 @@ fn renderOperand(
try writer.writeAll(",\n");
}
-fn renderFieldName(writer: *std.io.Writer, operands: []const Operand, field_index: usize) !void {
+fn renderFieldName(writer: *std.Io.Writer, operands: []const Operand, field_index: usize) !void {
const operand = operands[field_index];
derive_from_kind: {
tools/gen_stubs.zig
@@ -299,10 +299,9 @@ pub fn main() !void {
// Read the ELF header.
const elf_bytes = build_all_dir.readFileAllocOptions(
- arena,
libc_so_path,
- 100 * 1024 * 1024,
- 1 * 1024 * 1024,
+ arena,
+ .limited(100 * 1024 * 1024),
.of(elf.Elf64_Ehdr),
null,
) catch |err| {
tools/generate_JSONTestSuite.zig
@@ -32,7 +32,7 @@ pub fn main() !void {
}).lessThan);
for (names.items) |name| {
- const contents = try std.fs.cwd().readFileAlloc(allocator, name, 250001);
+ const contents = try std.fs.cwd().readFileAlloc(name, allocator, .limited(250001));
try output.writeAll("test ");
try writeString(output, name);
try output.writeAll(" {\n try ");
tools/generate_linux_syscalls.zig
@@ -248,7 +248,7 @@ pub fn main() !void {
try Io.Writer.flush(stdout);
}
-fn usage(w: *std.io.Writer, arg0: []const u8) std.io.Writer.Error!void {
+fn usage(w: *std.Io.Writer, arg0: []const u8) std.Io.Writer.Error!void {
try w.print(
\\Usage: {s} /path/to/zig /path/to/linux
\\Alternative Usage: zig run /path/to/git/zig/tools/generate_linux_syscalls.zig -- /path/to/zig /path/to/linux
tools/incr-check.zig
@@ -52,7 +52,7 @@ pub fn main() !void {
const zig_exe = opt_zig_exe orelse fatal("missing path to zig\n{s}", .{usage});
const input_file_name = opt_input_file_name orelse fatal("missing input file\n{s}", .{usage});
- const input_file_bytes = try std.fs.cwd().readFileAlloc(arena, input_file_name, std.math.maxInt(u32));
+ const input_file_bytes = try std.fs.cwd().readFileAlloc(input_file_name, arena, .limited(std.math.maxInt(u32)));
const case = try Case.parse(arena, input_file_bytes);
// Check now: if there are any targets using the `cbe` backend, we need the lib dir.
@@ -226,7 +226,7 @@ const Eval = struct {
cc_child_args: *std.ArrayListUnmanaged([]const u8),
const StreamEnum = enum { stdout, stderr };
- const Poller = std.io.Poller(StreamEnum);
+ const Poller = std.Io.Poller(StreamEnum);
/// Currently this function assumes the previous updates have already been written.
fn write(eval: *Eval, update: Case.Update) void {
tools/migrate_langref.zig
@@ -1,6 +1,5 @@
const std = @import("std");
const builtin = @import("builtin");
-const io = std.io;
const fs = std.fs;
const print = std.debug.print;
const mem = std.mem;
@@ -29,7 +28,8 @@ pub fn main() !void {
var out_dir = try fs.cwd().openDir(fs.path.dirname(output_file).?, .{});
defer out_dir.close();
- const input_file_bytes = try in_file.deprecatedReader().readAllAlloc(arena, std.math.maxInt(u32));
+ var in_file_reader = in_file.reader(&.{});
+ const input_file_bytes = try in_file_reader.interface.allocRemaining(arena, .unlimited);
var tokenizer = Tokenizer.init(input_file, input_file_bytes);
tools/process_headers.zig
@@ -254,7 +254,7 @@ pub fn main() !void {
.file, .sym_link => {
const rel_path = try std.fs.path.relative(allocator, target_include_dir, full_path);
const max_size = 2 * 1024 * 1024 * 1024;
- const raw_bytes = try std.fs.cwd().readFileAlloc(allocator, full_path, max_size);
+ const raw_bytes = try std.fs.cwd().readFileAlloc(full_path, allocator, .limited(max_size));
const trimmed = std.mem.trim(u8, raw_bytes, " \r\n\t");
total_bytes += raw_bytes.len;
const hash = try allocator.alloc(u8, 32);
tools/update-linux-headers.zig
@@ -206,7 +206,7 @@ pub fn main() !void {
.file => {
const rel_path = try std.fs.path.relative(arena, target_include_dir, full_path);
const max_size = 2 * 1024 * 1024 * 1024;
- const raw_bytes = try std.fs.cwd().readFileAlloc(arena, full_path, max_size);
+ const raw_bytes = try std.fs.cwd().readFileAlloc(full_path, arena, .limited(max_size));
const trimmed = std.mem.trim(u8, raw_bytes, " \r\n\t");
total_bytes += raw_bytes.len;
const hash = try arena.alloc(u8, 32);
tools/update_clang_options.zig
@@ -965,7 +965,7 @@ fn printUsageAndExit(arg0: []const u8) noreturn {
std.process.exit(1);
}
-fn printUsage(w: *std.io.Writer, arg0: []const u8) std.io.Writer.Error!void {
+fn printUsage(w: *std.Io.Writer, arg0: []const u8) std.Io.Writer.Error!void {
try w.print(
\\Usage: {s} /path/to/llvm-tblgen /path/to/git/llvm/llvm-project
\\Alternative Usage: zig run /path/to/git/zig/tools/update_clang_options.zig -- /path/to/llvm-tblgen /path/to/git/llvm/llvm-project
tools/update_crc_catalog.zig
@@ -194,7 +194,7 @@ fn printUsageAndExit(arg0: []const u8) noreturn {
std.process.exit(1);
}
-fn printUsage(w: *std.io.Writer, arg0: []const u8) std.io.Writer.Error!void {
+fn printUsage(w: *std.Io.Writer, arg0: []const u8) std.Io.Writer.Error!void {
return w.print(
\\Usage: {s} /path/git/zig
\\
tools/update_glibc.zig
@@ -116,9 +116,9 @@ pub fn main() !void {
const max_file_size = 10 * 1024 * 1024;
const generic_glibc_contents = generic_glibc_dir.readFileAlloc(
- arena,
entry.path,
- max_file_size,
+ arena,
+ .limited(max_file_size),
) catch |err| switch (err) {
error.FileNotFound => continue,
else => |e| fatal("unable to load '{s}/include/{s}': {s}", .{
@@ -126,9 +126,9 @@ pub fn main() !void {
}),
};
const glibc_include_contents = include_dir.readFileAlloc(
- arena,
entry.path,
- max_file_size,
+ arena,
+ .limited(max_file_size),
) catch |err| {
fatal("unable to load '{s}/include/{s}': {s}", .{
dest_dir_path, entry.path, @errorName(err),