Commit 8c9dfcbd0f
Changed files (20)
lib
compiler
aro
backend
Object
resinator
docs
wasm
init
src
lib/compiler/aro/aro/Driver.zig
@@ -585,6 +585,8 @@ pub fn errorDescription(e: anyerror) []const u8 {
};
}
+var stdout_buffer: [4096]u8 = undefined;
+
/// The entry point of the Aro compiler.
/// **MAY call `exit` if `fast_exit` is set.**
pub fn main(d: *Driver, tc: *Toolchain, args: []const []const u8, comptime fast_exit: bool) !void {
@@ -688,13 +690,13 @@ fn processSource(
else
std.fs.File.stdout();
defer if (d.output_name != null) file.close();
+ var file_buffer: [1024]u8 = undefined;
+ var file_writer = file.writer(&file_buffer);
- var buf_w = std.io.bufferedWriter(file.deprecatedWriter());
-
- pp.prettyPrintTokens(buf_w.writer(), dump_mode) catch |er|
+ pp.prettyPrintTokens(&file_writer.interface, dump_mode) catch |er|
return d.fatal("unable to write result: {s}", .{errorDescription(er)});
- buf_w.flush() catch |er|
+ file_writer.interface.flush() catch |er|
return d.fatal("unable to write result: {s}", .{errorDescription(er)});
if (fast_exit) std.process.exit(0); // Not linking, no need for cleanup.
return;
@@ -704,10 +706,9 @@ fn processSource(
defer tree.deinit();
if (d.verbose_ast) {
- const stdout = std.fs.File.stdout();
- var buf_writer = std.io.bufferedWriter(stdout.deprecatedWriter());
- tree.dump(d.detectConfig(stdout), buf_writer.writer()) catch {};
- buf_writer.flush() catch {};
+ var stdout_writer = std.fs.File.stdout().writer(&stdout_buffer);
+ tree.dump(d.detectConfig(.stdout()), &stdout_writer.interface) catch {};
+ stdout_writer.interface.flush() catch {};
}
const prev_errors = d.comp.diagnostics.errors;
@@ -734,10 +735,9 @@ fn processSource(
defer ir.deinit(d.comp.gpa);
if (d.verbose_ir) {
- const stdout = std.fs.File.stdout();
- var buf_writer = std.io.bufferedWriter(stdout.deprecatedWriter());
- ir.dump(d.comp.gpa, d.detectConfig(stdout), buf_writer.writer()) catch {};
- buf_writer.flush() catch {};
+ var stdout_writer = std.fs.File.stdout().writer(&stdout_buffer);
+ ir.dump(d.comp.gpa, d.detectConfig(.stdout()), &stdout_writer.interface) catch {};
+ stdout_writer.interface.flush() catch {};
}
var render_errors: Ir.Renderer.ErrorList = .{};
lib/compiler/aro/aro/Preprocessor.zig
@@ -811,10 +811,9 @@ fn verboseLog(pp: *Preprocessor, raw: RawToken, comptime fmt: []const u8, args:
const source = pp.comp.getSource(raw.source);
const line_col = source.lineCol(.{ .id = raw.source, .line = raw.line, .byte_offset = raw.start });
- const stderr = std.fs.File.stderr().deprecatedWriter();
- var buf_writer = std.io.bufferedWriter(stderr);
- const writer = buf_writer.writer();
- defer buf_writer.flush() catch {};
+ var stderr_buffer: [64]u8 = undefined;
+ var writer = std.debug.lockStderrWriter(&stderr_buffer);
+ defer std.debug.unlockStderrWriter();
writer.print("{s}:{d}:{d}: ", .{ source.path, line_col.line_no, line_col.col }) catch return;
writer.print(fmt, args) catch return;
writer.writeByte('\n') catch return;
lib/compiler/aro/backend/Object/Elf.zig
@@ -171,8 +171,9 @@ pub fn addRelocation(elf: *Elf, name: []const u8, section_kind: Object.Section,
/// strtab
/// section headers
pub fn finish(elf: *Elf, file: std.fs.File) !void {
- var buf_writer = std.io.bufferedWriter(file.deprecatedWriter());
- const w = buf_writer.writer();
+ var file_buffer: [1024]u8 = undefined;
+ var file_writer = file.writer(&file_buffer);
+ const w = &file_writer.interface;
var num_sections: std.elf.Elf64_Half = additional_sections;
var relocations_len: std.elf.Elf64_Off = 0;
@@ -374,5 +375,5 @@ pub fn finish(elf: *Elf, file: std.fs.File) !void {
name_offset += @as(u32, @intCast(entry.key_ptr.len + ".\x00".len)) + rela_name_offset;
}
}
- try buf_writer.flush();
+ try w.flush();
}
lib/compiler/resinator/compile.zig
@@ -1268,12 +1268,8 @@ pub const Compiler = struct {
try header.write(writer, self.errContext(id_token));
}
- pub fn writeResourceDataNoPadding(writer: anytype, data_reader: *std.Io.Reader, data_size: u32) !void {
- var adapted = writer.adaptToNewApi();
- var buffer: [128]u8 = undefined;
- adapted.new_interface.buffer = &buffer;
- try data_reader.streamExact(&adapted.new_interface, data_size);
- try adapted.new_interface.flush();
+ pub fn writeResourceDataNoPadding(writer: *std.Io.Writer, data_reader: *std.Io.Reader, data_size: u32) !void {
+ try data_reader.streamExact(writer, data_size);
}
pub fn writeResourceData(writer: anytype, data_reader: *std.Io.Reader, data_size: u32) !void {
@@ -1281,8 +1277,8 @@ pub const Compiler = struct {
try writeDataPadding(writer, data_size);
}
- pub fn writeDataPadding(writer: anytype, data_size: u32) !void {
- try writer.writeByteNTimes(0, numPaddingBytesNeeded(data_size));
+ pub fn writeDataPadding(writer: *std.Io.Writer, data_size: u32) !void {
+ try writer.splatByteAll(0, numPaddingBytesNeeded(data_size));
}
pub fn numPaddingBytesNeeded(data_size: u32) u2 {
@@ -2100,8 +2096,10 @@ pub const Compiler = struct {
const resource = ResourceType.fromString(type_bytes);
std.debug.assert(resource == .menu or resource == .menuex);
- self.writeMenuData(node, data_writer, resource) catch |err| switch (err) {
- error.NoSpaceLeft => {
+ var adapted = data_writer.adaptToNewApi(&.{});
+
+ self.writeMenuData(node, &adapted.new_interface, resource) catch |err| switch (err) {
+ error.WriteFailed => {
return self.addErrorDetailsAndFail(.{
.err = .resource_data_size_exceeds_max,
.token = node.id,
@@ -2129,7 +2127,7 @@ pub const Compiler = struct {
/// Expects `data_writer` to be a LimitedWriter limited to u32, meaning all writes to
/// the writer within this function could return error.NoSpaceLeft
- pub fn writeMenuData(self: *Compiler, node: *Node.Menu, data_writer: anytype, resource: ResourceType) !void {
+ pub fn writeMenuData(self: *Compiler, node: *Node.Menu, data_writer: *std.Io.Writer, resource: ResourceType) !void {
// menu header
const version: u16 = if (resource == .menu) 0 else 1;
try data_writer.writeInt(u16, version, .little);
@@ -2156,7 +2154,7 @@ pub const Compiler = struct {
}
}
- pub fn writeMenuItem(self: *Compiler, node: *Node, writer: anytype, is_last_of_parent: bool) !void {
+ pub fn writeMenuItem(self: *Compiler, node: *Node, writer: *std.Io.Writer, is_last_of_parent: bool) !void {
switch (node.id) {
.menu_item_separator => {
// This is the 'alternate compability form' of the separator, see
@@ -2356,8 +2354,9 @@ pub const Compiler = struct {
try fixed_file_info.write(data_writer);
for (node.block_statements) |statement| {
- self.writeVersionNode(statement, data_writer, &data_buffer) catch |err| switch (err) {
- error.NoSpaceLeft => {
+ var adapted = data_writer.adaptToNewApi(&.{});
+ self.writeVersionNode(statement, &adapted.new_interface, &data_buffer) catch |err| switch (err) {
+ error.WriteFailed => {
try self.addErrorDetails(.{
.err = .version_node_size_exceeds_max,
.token = node.id,
@@ -2395,7 +2394,7 @@ pub const Compiler = struct {
/// Expects writer to be a LimitedWriter limited to u16, meaning all writes to
/// the writer within this function could return error.NoSpaceLeft, and that buf.items.len
/// will never be able to exceed maxInt(u16).
- pub fn writeVersionNode(self: *Compiler, node: *Node, writer: anytype, buf: *std.ArrayList(u8)) !void {
+ pub fn writeVersionNode(self: *Compiler, node: *Node, writer: *std.Io.Writer, buf: *std.ArrayList(u8)) !void {
// We can assume that buf.items.len will never be able to exceed the limits of a u16
try writeDataPadding(writer, @as(u16, @intCast(buf.items.len)));
@@ -2700,12 +2699,12 @@ pub const Compiler = struct {
return self.writeSizeInfo(writer, size_info);
}
- pub fn writeSizeInfo(self: ResourceHeader, writer: anytype, size_info: SizeInfo) !void {
+ pub fn writeSizeInfo(self: ResourceHeader, writer: *std.Io.Writer, size_info: SizeInfo) !void {
try writer.writeInt(DWORD, self.data_size, .little); // DataSize
try writer.writeInt(DWORD, size_info.bytes, .little); // HeaderSize
try self.type_value.write(writer); // TYPE
try self.name_value.write(writer); // NAME
- try writer.writeByteNTimes(0, size_info.padding_after_name);
+ try writer.splatByteAll(0, size_info.padding_after_name);
try writer.writeInt(DWORD, self.data_version, .little); // DataVersion
try writer.writeInt(WORD, self.memory_flags.value, .little); // MemoryFlags
@@ -3120,7 +3119,7 @@ pub const FontDir = struct {
// First, the ID is written, though
try writer.writeInt(u16, font.id, .little);
try writer.writeAll(&font.header_bytes);
- try writer.writeByteNTimes(0, 2);
+ try writer.splatByteAll(0, 2);
}
try Compiler.writeDataPadding(writer, data_size);
}
lib/compiler/resinator/cvtres.zig
@@ -188,7 +188,7 @@ pub const Diagnostics = union {
overflow_resource: usize,
};
-pub fn writeCoff(allocator: Allocator, writer: anytype, resources: []const Resource, options: CoffOptions, diagnostics: ?*Diagnostics) !void {
+pub fn writeCoff(allocator: Allocator, writer: *std.Io.Writer, resources: []const Resource, options: CoffOptions, diagnostics: ?*Diagnostics) !void {
var resource_tree = ResourceTree.init(allocator, options);
defer resource_tree.deinit();
@@ -232,7 +232,7 @@ pub fn writeCoff(allocator: Allocator, writer: anytype, resources: []const Resou
.flags = flags,
};
- try writer.writeStructEndian(coff_header, .little);
+ try writer.writeStruct(coff_header, .little);
const rsrc01_header = std.coff.SectionHeader{
.name = ".rsrc$01".*,
@@ -250,7 +250,7 @@ pub fn writeCoff(allocator: Allocator, writer: anytype, resources: []const Resou
.MEM_READ = 1,
},
};
- try writer.writeStructEndian(rsrc01_header, .little);
+ try writer.writeStruct(rsrc01_header, .little);
const rsrc02_header = std.coff.SectionHeader{
.name = ".rsrc$02".*,
@@ -268,7 +268,7 @@ pub fn writeCoff(allocator: Allocator, writer: anytype, resources: []const Resou
.MEM_READ = 1,
},
};
- try writer.writeStructEndian(rsrc02_header, .little);
+ try writer.writeStruct(rsrc02_header, .little);
// TODO: test surrogate pairs
try resource_tree.sort();
@@ -665,13 +665,13 @@ const ResourceTree = struct {
pub fn writeCoff(
self: *const ResourceTree,
allocator: Allocator,
- w: anytype,
+ w: *std.Io.Writer,
resources_in_data_order: []const Resource,
lengths: Lengths,
coff_string_table: *StringTable,
) ![]const std.coff.Symbol {
if (self.type_to_name_map.count() == 0) {
- try w.writeByteNTimes(0, 16);
+ try w.splatByteAll(0, 16);
return &.{};
}
@@ -710,7 +710,7 @@ const ResourceTree = struct {
.number_of_id_entries = counts.ids,
.number_of_name_entries = counts.names,
};
- try w.writeStructEndian(table, .little);
+ try w.writeStruct(table, .little);
var it = self.type_to_name_map.iterator();
while (it.next()) |entry| {
@@ -745,7 +745,7 @@ const ResourceTree = struct {
.number_of_id_entries = counts.ids,
.number_of_name_entries = counts.names,
};
- try w.writeStructEndian(table, .little);
+ try w.writeStruct(table, .little);
var it = name_to_lang_map.iterator();
while (it.next()) |entry| {
@@ -786,7 +786,7 @@ const ResourceTree = struct {
.number_of_id_entries = counts.ids,
.number_of_name_entries = counts.names,
};
- try w.writeStructEndian(table, .little);
+ try w.writeStruct(table, .little);
var it = lang_to_resources_map.iterator();
while (it.next()) |entry| {
@@ -819,7 +819,7 @@ const ResourceTree = struct {
.size = @intCast(orig_resource.data.len),
.codepage = 0,
};
- try w.writeStructEndian(data_entry, .little);
+ try w.writeStruct(data_entry, .little);
}
for (self.rsrc_string_table.keys()) |v| {
@@ -828,7 +828,7 @@ const ResourceTree = struct {
try w.writeAll(std.mem.sliceAsBytes(str));
}
- try w.writeByteNTimes(0, lengths.padding);
+ try w.splatByteAll(0, lengths.padding);
for (relocations.list.items) |relocation| {
try writeRelocation(w, std.coff.Relocation{
@@ -842,13 +842,13 @@ const ResourceTree = struct {
for (self.deduplicated_data.keys()) |data| {
const padding_bytes: u4 = @intCast((8 -% data.len) % 8);
try w.writeAll(data);
- try w.writeByteNTimes(0, padding_bytes);
+ try w.splatByteAll(0, padding_bytes);
}
} else {
for (resources_in_data_order) |resource| {
const padding_bytes: u4 = @intCast((8 -% resource.data.len) % 8);
try w.writeAll(resource.data);
- try w.writeByteNTimes(0, padding_bytes);
+ try w.splatByteAll(0, padding_bytes);
}
}
lib/compiler/resinator/main.zig
@@ -248,10 +248,11 @@ pub fn main() !void {
var diagnostics = Diagnostics.init(allocator);
defer diagnostics.deinit();
- const res_stream_writer = res_stream.source.writer(allocator);
- var output_buffered_stream = std.io.bufferedWriter(res_stream_writer);
+ var output_buffer: [4096]u8 = undefined;
+ var res_stream_writer = res_stream.source.writer(allocator).adaptToNewApi(&output_buffer);
+ const output_buffered_stream = &res_stream_writer.new_interface;
- compile(allocator, final_input, output_buffered_stream.writer(), .{
+ compile(allocator, final_input, output_buffered_stream, .{
.cwd = std.fs.cwd(),
.diagnostics = &diagnostics,
.source_mappings = &mapping_results.mappings,
@@ -340,10 +341,11 @@ pub fn main() !void {
};
defer coff_stream.deinit(allocator);
- var coff_output_buffered_stream = std.io.bufferedWriter(coff_stream.source.writer(allocator));
+ var coff_output_buffer: [4096]u8 = undefined;
+ var coff_output_buffered_stream = coff_stream.source.writer(allocator).adaptToNewApi(&coff_output_buffer);
var cvtres_diagnostics: cvtres.Diagnostics = .{ .none = {} };
- cvtres.writeCoff(allocator, coff_output_buffered_stream.writer(), resources.list.items, options.coff_options, &cvtres_diagnostics) catch |err| {
+ cvtres.writeCoff(allocator, &coff_output_buffered_stream.new_interface, resources.list.items, options.coff_options, &cvtres_diagnostics) catch |err| {
switch (err) {
error.DuplicateResource => {
const duplicate_resource = resources.list.items[cvtres_diagnostics.duplicate_resource];
@@ -380,7 +382,7 @@ pub fn main() !void {
std.process.exit(1);
};
- try coff_output_buffered_stream.flush();
+ try coff_output_buffered_stream.new_interface.flush();
}
const IoStream = struct {
lib/compiler/libc.zig
@@ -22,6 +22,8 @@ const usage_libc =
\\
;
+var stdout_buffer: [4096]u8 = undefined;
+
pub fn main() !void {
var arena_instance = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena_instance.deinit();
@@ -34,14 +36,16 @@ pub fn main() !void {
var input_file: ?[]const u8 = null;
var target_arch_os_abi: []const u8 = "native";
var print_includes: bool = false;
+ var stdout_writer = std.fs.File.stdout().writer(&stdout_buffer);
+ const stdout = &stdout_writer.interface;
{
var i: usize = 2;
while (i < args.len) : (i += 1) {
const arg = args[i];
if (mem.startsWith(u8, arg, "-")) {
if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) {
- const stdout = std.fs.File.stdout().deprecatedWriter();
try stdout.writeAll(usage_libc);
+ try stdout.flush();
return std.process.cleanExit();
} else if (mem.eql(u8, arg, "-target")) {
if (i + 1 >= args.len) fatal("expected parameter after {s}", .{arg});
@@ -97,13 +101,11 @@ pub fn main() !void {
fatal("no include dirs detected for target {s}", .{zig_target});
}
- var bw = std.io.bufferedWriter(std.fs.File.stdout().deprecatedWriter());
- var writer = bw.writer();
for (libc_dirs.libc_include_dir_list) |include_dir| {
- try writer.writeAll(include_dir);
- try writer.writeByte('\n');
+ try stdout.writeAll(include_dir);
+ try stdout.writeByte('\n');
}
- try bw.flush();
+ try stdout.flush();
return std.process.cleanExit();
}
@@ -125,9 +127,8 @@ pub fn main() !void {
};
defer libc.deinit(gpa);
- var bw = std.io.bufferedWriter(std.fs.File.stdout().deprecatedWriter());
- try libc.render(bw.writer());
- try bw.flush();
+ try libc.render(stdout);
+ try stdout.flush();
}
}
lib/docs/wasm/markdown.zig
@@ -159,9 +159,10 @@ fn mainImpl() !void {
var doc = try parser.endInput();
defer doc.deinit(gpa);
- var stdout_buf = std.io.bufferedWriter(std.fs.File.stdout().deprecatedWriter());
- try doc.render(stdout_buf.writer());
- try stdout_buf.flush();
+ var stdout_buffer: [1024]u8 = undefined;
+ var stdout_writer = std.fs.File.stdout().writer(&stdout_buffer);
+ try doc.render(&stdout_writer.interface);
+ try stdout_writer.interface.flush();
}
test "empty document" {
lib/init/src/root.zig
@@ -5,14 +5,13 @@ pub fn bufferedPrint() !void {
// Stdout is for the actual output of your application, for example if you
// are implementing gzip, then only the compressed bytes should be sent to
// stdout, not any debugging messages.
- const stdout_file = std.fs.File.stdout().deprecatedWriter();
- // Buffering can improve performance significantly in print-heavy programs.
- var bw = std.io.bufferedWriter(stdout_file);
- const stdout = bw.writer();
+ var stdout_buffer: [1024]u8 = undefined;
+ var stdout_writer = std.fs.File.stdout().writer(&stdout_buffer);
+ const stdout = &stdout_writer.interface;
try stdout.print("Run `zig build test` to run the tests.\n", .{});
- try bw.flush(); // Don't forget to flush!
+ try stdout.flush(); // Don't forget to flush!
}
pub fn add(a: i32, b: i32) i32 {
lib/std/Io/buffered_writer.zig
@@ -1,43 +0,0 @@
-const std = @import("../std.zig");
-
-const io = std.io;
-const mem = std.mem;
-
-pub fn BufferedWriter(comptime buffer_size: usize, comptime WriterType: type) type {
- return struct {
- unbuffered_writer: WriterType,
- buf: [buffer_size]u8 = undefined,
- end: usize = 0,
-
- pub const Error = WriterType.Error;
- pub const Writer = io.GenericWriter(*Self, Error, write);
-
- const Self = @This();
-
- pub fn flush(self: *Self) !void {
- try self.unbuffered_writer.writeAll(self.buf[0..self.end]);
- self.end = 0;
- }
-
- pub fn writer(self: *Self) Writer {
- return .{ .context = self };
- }
-
- pub fn write(self: *Self, bytes: []const u8) Error!usize {
- if (self.end + bytes.len > self.buf.len) {
- try self.flush();
- if (bytes.len > self.buf.len)
- return self.unbuffered_writer.write(bytes);
- }
-
- const new_end = self.end + bytes.len;
- @memcpy(self.buf[self.end..new_end], bytes);
- self.end = new_end;
- return bytes.len;
- }
- };
-}
-
-pub fn bufferedWriter(underlying_stream: anytype) BufferedWriter(4096, @TypeOf(underlying_stream)) {
- return .{ .unbuffered_writer = underlying_stream };
-}
lib/std/Io/test.zig
@@ -24,12 +24,12 @@ test "write a file, read it, then delete it" {
var file = try tmp.dir.createFile(tmp_file_name, .{});
defer file.close();
- var buf_stream = io.bufferedWriter(file.deprecatedWriter());
- const st = buf_stream.writer();
+ var file_writer = file.writer(&.{});
+ const st = &file_writer.interface;
try st.print("begin", .{});
- try st.writeAll(data[0..]);
+ try st.writeAll(&data);
try st.print("end", .{});
- try buf_stream.flush();
+ try st.flush();
}
{
lib/std/zig/LibCInstallation.zig
@@ -109,7 +109,7 @@ pub fn parse(
return self;
}
-pub fn render(self: LibCInstallation, out: anytype) !void {
+pub fn render(self: LibCInstallation, out: *std.Io.Writer) !void {
@setEvalBranchQuota(4000);
const include_dir = self.include_dir orelse "";
const sys_include_dir = self.sys_include_dir orelse "";
lib/std/Io.zig
@@ -423,10 +423,6 @@ pub fn GenericWriter(
pub const AnyReader = @import("Io/DeprecatedReader.zig");
/// Deprecated in favor of `Writer`.
pub const AnyWriter = @import("Io/DeprecatedWriter.zig");
-/// Deprecated in favor of `Writer`.
-pub const BufferedWriter = @import("Io/buffered_writer.zig").BufferedWriter;
-/// Deprecated in favor of `Writer`.
-pub const bufferedWriter = @import("Io/buffered_writer.zig").bufferedWriter;
/// Deprecated in favor of `Reader`.
pub const FixedBufferStream = @import("Io/fixed_buffer_stream.zig").FixedBufferStream;
/// Deprecated in favor of `Reader`.
@@ -912,7 +908,6 @@ pub fn PollFiles(comptime StreamEnum: type) type {
test {
_ = Reader;
_ = Writer;
- _ = BufferedWriter;
_ = CountingReader;
_ = FixedBufferStream;
_ = tty;
src/link/Lld.zig
@@ -1662,8 +1662,9 @@ fn spawnLld(
log.warn("failed to delete response file {s}: {s}", .{ rsp_path, @errorName(err) });
{
defer rsp_file.close();
- var rsp_buf = std.io.bufferedWriter(rsp_file.deprecatedWriter());
- const rsp_writer = rsp_buf.writer();
+ var rsp_file_buffer: [1024]u8 = undefined;
+ var rsp_file_writer = rsp_file.writer(&rsp_file_buffer);
+ const rsp_writer = &rsp_file_writer.interface;
for (argv[2..]) |arg| {
try rsp_writer.writeByte('"');
for (arg) |c| {
@@ -1676,7 +1677,7 @@ fn spawnLld(
try rsp_writer.writeByte('"');
try rsp_writer.writeByte('\n');
}
- try rsp_buf.flush();
+ try rsp_writer.flush();
}
var rsp_child = std.process.Child.init(&.{ argv[0], argv[1], try std.fmt.allocPrint(
src/Package/Fetch.zig
@@ -1631,19 +1631,13 @@ fn computeHash(f: *Fetch, pkg_path: Cache.Path, filter: Filter) RunError!Compute
}
fn dumpHashInfo(all_files: []const *const HashedFile) !void {
- const stdout: std.fs.File = .stdout();
- var bw = std.io.bufferedWriter(stdout.deprecatedWriter());
- const w = bw.writer();
-
+ var stdout_buffer: [1024]u8 = undefined;
+ var stdout_writer: fs.File.Writer = .initMode(.stdout(), &stdout_buffer, .streaming);
+ const w = &stdout_writer.interface;
for (all_files) |hashed_file| {
- try w.print("{s}: {x}: {s}\n", .{
- @tagName(hashed_file.kind),
- &hashed_file.hash,
- hashed_file.normalized_path,
- });
+ try w.print("{t}: {x}: {s}\n", .{ hashed_file.kind, &hashed_file.hash, hashed_file.normalized_path });
}
-
- try bw.flush();
+ try w.flush();
}
fn workerHashFile(dir: fs.Dir, hashed_file: *HashedFile) void {
tools/docgen.zig
@@ -70,19 +70,19 @@ pub fn main() !void {
var out_file = try fs.cwd().createFile(output_path, .{});
defer out_file.close();
+ var out_file_buffer: [4096]u8 = undefined;
+ var out_file_writer = out_file.writer(&out_file_buffer);
var code_dir = try fs.cwd().openDir(code_dir_path, .{});
defer code_dir.close();
const input_file_bytes = try in_file.deprecatedReader().readAllAlloc(arena, max_doc_file_size);
- var buffered_writer = io.bufferedWriter(out_file.deprecatedWriter());
-
var tokenizer = Tokenizer.init(input_path, input_file_bytes);
var toc = try genToc(arena, &tokenizer);
- try genHtml(arena, &tokenizer, &toc, code_dir, buffered_writer.writer());
- try buffered_writer.flush();
+ try genHtml(arena, &tokenizer, &toc, code_dir, &out_file_writer.interface);
+ try out_file_writer.end();
}
const Token = struct {
tools/doctest.zig
@@ -84,9 +84,10 @@ pub fn main() !void {
var out_file = try fs.cwd().createFile(output_path, .{});
defer out_file.close();
+ var out_file_buffer: [4096]u8 = undefined;
+ var out_file_writer = out_file.writer(&out_file_buffer);
- var bw = std.io.bufferedWriter(out_file.deprecatedWriter());
- const out = bw.writer();
+ const out = &out_file_writer.interface;
try printSourceBlock(arena, out, source, fs.path.basename(input_path));
try printOutput(
@@ -102,7 +103,7 @@ pub fn main() !void {
null,
);
- try bw.flush();
+ try out_file_writer.end();
}
fn printOutput(
tools/migrate_langref.zig
@@ -23,19 +23,19 @@ pub fn main() !void {
var out_file = try fs.cwd().createFile(output_file, .{});
defer out_file.close();
+ var out_file_buffer: [4096]u8 = undefined;
+ var out_file_writer = out_file.writer(&out_file_buffer);
var out_dir = try fs.cwd().openDir(fs.path.dirname(output_file).?, .{});
defer out_dir.close();
const input_file_bytes = try in_file.deprecatedReader().readAllAlloc(arena, std.math.maxInt(u32));
- var buffered_writer = io.bufferedWriter(out_file.deprecatedWriter());
-
var tokenizer = Tokenizer.init(input_file, input_file_bytes);
- try walk(arena, &tokenizer, out_dir, buffered_writer.writer());
+ try walk(arena, &tokenizer, out_dir, &out_file_writer.interface);
- try buffered_writer.flush();
+ try out_file_writer.end();
}
const Token = struct {
tools/update_cpu_features.zig
@@ -1906,8 +1906,9 @@ fn processOneTarget(job: Job) void {
var zig_code_file = try target_dir.createFile(zig_code_basename, .{});
defer zig_code_file.close();
- var bw = std.io.bufferedWriter(zig_code_file.deprecatedWriter());
- const w = bw.writer();
+ var zig_code_file_buffer: [4096]u8 = undefined;
+ var zig_code_file_writer = zig_code_file.writer(&zig_code_file_buffer);
+ const w = &zig_code_file_writer.interface;
try w.writeAll(
\\//! This file is auto-generated by tools/update_cpu_features.zig.
@@ -2076,7 +2077,7 @@ fn processOneTarget(job: Job) void {
\\};
\\
);
- try bw.flush();
+ try w.flush();
render_progress.end();
}
tools/update_crc_catalog.zig
@@ -29,10 +29,9 @@ pub fn main() anyerror!void {
var zig_code_file = try hash_target_dir.createFile("crc.zig", .{});
defer zig_code_file.close();
-
- var cbw = std.io.bufferedWriter(zig_code_file.deprecatedWriter());
- defer cbw.flush() catch unreachable;
- const code_writer = cbw.writer();
+ var zig_code_file_buffer: [4096]u8 = undefined;
+ var zig_code_file_writer = zig_code_file.writer(&zig_code_file_buffer);
+ const code_writer = &zig_code_file_writer.interface;
try code_writer.writeAll(
\\//! This file is auto-generated by tools/update_crc_catalog.zig.
@@ -54,10 +53,9 @@ pub fn main() anyerror!void {
var zig_test_file = try crc_target_dir.createFile("test.zig", .{});
defer zig_test_file.close();
-
- var tbw = std.io.bufferedWriter(zig_test_file.deprecatedWriter());
- defer tbw.flush() catch unreachable;
- const test_writer = tbw.writer();
+ var zig_test_file_buffer: [4096]u8 = undefined;
+ var zig_test_file_writer = zig_test_file.writer(&zig_test_file_buffer);
+ const test_writer = &zig_test_file_writer.interface;
try test_writer.writeAll(
\\//! This file is auto-generated by tools/update_crc_catalog.zig.
@@ -187,6 +185,9 @@ pub fn main() anyerror!void {
\\
, .{ name, camelcase, width, check }));
}
+
+ try code_writer.flush();
+ try test_writer.flush();
}
fn printUsageAndExit(arg0: []const u8) noreturn {