Commit 0e37ff0d59
Changed files (161)
lib
compiler
docs
wasm
markdown
init
src
std
Build
crypto
fmt
hash
math
net
os
posix
process
Random
Target
unicode
zig
zip
src
Air
arch
x86_64
codegen
link
Package
Sema
test
behavior
link
standalone
lib/compiler/aro/aro/Compilation.zig
@@ -1432,7 +1432,7 @@ fn getFileContents(comp: *Compilation, path: []const u8, limit: ?u32) ![]const u
defer buf.deinit();
const max = limit orelse std.math.maxInt(u32);
- file.reader().readAllArrayList(&buf, max) catch |e| switch (e) {
+ file.deprecatedReader().readAllArrayList(&buf, max) catch |e| switch (e) {
error.StreamTooLong => if (limit == null) return e,
else => return e,
};
lib/compiler/aro/aro/Diagnostics.zig
@@ -1,4 +1,5 @@
const std = @import("std");
+const assert = std.debug.assert;
const Allocator = mem.Allocator;
const mem = std.mem;
const Source = @import("Source.zig");
@@ -443,18 +444,13 @@ pub fn renderMessage(comp: *Compilation, m: anytype, msg: Message) void {
printRt(m, prop.msg, .{"{s}"}, .{&str});
} else {
var buf: [3]u8 = undefined;
- const str = std.fmt.bufPrint(&buf, "x{x}", .{std.fmt.fmtSliceHexLower(&.{msg.extra.invalid_escape.char})}) catch unreachable;
+ const str = std.fmt.bufPrint(&buf, "x{x}", .{&.{msg.extra.invalid_escape.char}}) catch unreachable;
printRt(m, prop.msg, .{"{s}"}, .{str});
}
},
.normalized => {
const f = struct {
- pub fn f(
- bytes: []const u8,
- comptime _: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
+ pub fn f(bytes: []const u8, writer: *std.io.Writer) std.io.Writer.Error!void {
var it: std.unicode.Utf8Iterator = .{
.bytes = bytes,
.i = 0,
@@ -464,22 +460,16 @@ pub fn renderMessage(comp: *Compilation, m: anytype, msg: Message) void {
try writer.writeByte(@intCast(codepoint));
} else if (codepoint < 0xFFFF) {
try writer.writeAll("\\u");
- try std.fmt.formatInt(codepoint, 16, .upper, .{
- .fill = '0',
- .width = 4,
- }, writer);
+ try writer.printIntOptions(codepoint, 16, .upper, .{ .fill = '0', .width = 4 });
} else {
try writer.writeAll("\\U");
- try std.fmt.formatInt(codepoint, 16, .upper, .{
- .fill = '0',
- .width = 8,
- }, writer);
+ try writer.printIntOptions(codepoint, 16, .upper, .{ .fill = '0', .width = 8 });
}
}
}
}.f;
- printRt(m, prop.msg, .{"{s}"}, .{
- std.fmt.Formatter(f){ .data = msg.extra.normalized },
+ printRt(m, prop.msg, .{"{f}"}, .{
+ std.fmt.Formatter([]const u8, f){ .data = msg.extra.normalized },
});
},
.none, .offset => m.write(prop.msg),
@@ -541,7 +531,7 @@ const MsgWriter = struct {
fn init(config: std.io.tty.Config) MsgWriter {
std.debug.lockStdErr();
return .{
- .w = std.io.bufferedWriter(std.fs.File.stderr().writer()),
+ .w = std.io.bufferedWriter(std.fs.File.stderr().deprecatedWriter()),
.config = config,
};
}
lib/compiler/aro/aro/Driver.zig
@@ -591,7 +591,7 @@ pub fn main(d: *Driver, tc: *Toolchain, args: []const []const u8, comptime fast_
var macro_buf = std.ArrayList(u8).init(d.comp.gpa);
defer macro_buf.deinit();
- const std_out = std.fs.File.stdout().writer();
+ const std_out = std.fs.File.stdout().deprecatedWriter();
if (try parseArgs(d, std_out, macro_buf.writer(), args)) return;
const linking = !(d.only_preprocess or d.only_syntax or d.only_compile or d.only_preprocess_and_compile);
@@ -689,7 +689,7 @@ fn processSource(
std.fs.File.stdout();
defer if (d.output_name != null) file.close();
- var buf_w = std.io.bufferedWriter(file.writer());
+ var buf_w = std.io.bufferedWriter(file.deprecatedWriter());
pp.prettyPrintTokens(buf_w.writer(), dump_mode) catch |er|
return d.fatal("unable to write result: {s}", .{errorDescription(er)});
@@ -705,7 +705,7 @@ fn processSource(
if (d.verbose_ast) {
const stdout = std.fs.File.stdout();
- var buf_writer = std.io.bufferedWriter(stdout.writer());
+ var buf_writer = std.io.bufferedWriter(stdout.deprecatedWriter());
tree.dump(d.detectConfig(stdout), buf_writer.writer()) catch {};
buf_writer.flush() catch {};
}
@@ -735,7 +735,7 @@ fn processSource(
if (d.verbose_ir) {
const stdout = std.fs.File.stdout();
- var buf_writer = std.io.bufferedWriter(stdout.writer());
+ var buf_writer = std.io.bufferedWriter(stdout.deprecatedWriter());
ir.dump(d.comp.gpa, d.detectConfig(stdout), buf_writer.writer()) catch {};
buf_writer.flush() catch {};
}
@@ -806,10 +806,10 @@ fn processSource(
}
fn dumpLinkerArgs(items: []const []const u8) !void {
- const stdout = std.fs.File.stdout().writer();
+ const stdout = std.fs.File.stdout().deprecatedWriter();
for (items, 0..) |item, i| {
if (i > 0) try stdout.writeByte(' ');
- try stdout.print("\"{}\"", .{std.zig.fmtEscapes(item)});
+ try stdout.print("\"{f}\"", .{std.zig.fmtString(item)});
}
try stdout.writeByte('\n');
}
lib/compiler/aro/aro/Parser.zig
@@ -500,8 +500,8 @@ fn checkDeprecatedUnavailable(p: *Parser, ty: Type, usage_tok: TokenIndex, decl_
const w = p.strings.writer();
const msg_str = p.comp.interner.get(@"error".msg.ref()).bytes;
- try w.print("call to '{s}' declared with attribute error: {}", .{
- p.tokSlice(@"error".__name_tok), std.zig.fmtEscapes(msg_str),
+ try w.print("call to '{s}' declared with attribute error: {f}", .{
+ p.tokSlice(@"error".__name_tok), std.zig.fmtString(msg_str),
});
const str = try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items[strings_top..]);
try p.errStr(.error_attribute, usage_tok, str);
@@ -512,8 +512,8 @@ fn checkDeprecatedUnavailable(p: *Parser, ty: Type, usage_tok: TokenIndex, decl_
const w = p.strings.writer();
const msg_str = p.comp.interner.get(warning.msg.ref()).bytes;
- try w.print("call to '{s}' declared with attribute warning: {}", .{
- p.tokSlice(warning.__name_tok), std.zig.fmtEscapes(msg_str),
+ try w.print("call to '{s}' declared with attribute warning: {f}", .{
+ p.tokSlice(warning.__name_tok), std.zig.fmtString(msg_str),
});
const str = try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items[strings_top..]);
try p.errStr(.warning_attribute, usage_tok, str);
@@ -542,7 +542,7 @@ fn errDeprecated(p: *Parser, tag: Diagnostics.Tag, tok_i: TokenIndex, msg: ?Valu
try w.writeAll(reason);
if (msg) |m| {
const str = p.comp.interner.get(m.ref()).bytes;
- try w.print(": {}", .{std.zig.fmtEscapes(str)});
+ try w.print(": {f}", .{std.zig.fmtString(str)});
}
const str = try p.comp.diagnostics.arena.allocator().dupe(u8, p.strings.items[strings_top..]);
return p.errStr(tag, tok_i, str);
lib/compiler/aro/aro/Preprocessor.zig
@@ -811,7 +811,7 @@ fn verboseLog(pp: *Preprocessor, raw: RawToken, comptime fmt: []const u8, args:
const source = pp.comp.getSource(raw.source);
const line_col = source.lineCol(.{ .id = raw.source, .line = raw.line, .byte_offset = raw.start });
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
var buf_writer = std.io.bufferedWriter(stderr);
const writer = buf_writer.writer();
defer buf_writer.flush() catch {};
@@ -3262,7 +3262,8 @@ fn printLinemarker(
// containing the same bytes as the input regardless of encoding.
else => {
try w.writeAll("\\x");
- try std.fmt.formatInt(byte, 16, .lower, .{ .width = 2, .fill = '0' }, w);
+ // TODO try w.printIntOptions(byte, 16, .lower, .{ .width = 2, .fill = '0' });
+ try w.print("{x:0>2}", .{byte});
},
};
try w.writeByte('"');
lib/compiler/aro/aro/Value.zig
@@ -982,7 +982,7 @@ pub fn printString(bytes: []const u8, ty: Type, comp: *const Compilation, w: any
const without_null = bytes[0 .. bytes.len - @intFromEnum(size)];
try w.writeByte('"');
switch (size) {
- .@"1" => try w.print("{}", .{std.zig.fmtEscapes(without_null)}),
+ .@"1" => try w.print("{f}", .{std.zig.fmtString(without_null)}),
.@"2" => {
var items: [2]u16 = undefined;
var i: usize = 0;
lib/compiler/aro/backend/Object/Elf.zig
@@ -171,7 +171,7 @@ pub fn addRelocation(elf: *Elf, name: []const u8, section_kind: Object.Section,
/// strtab
/// section headers
pub fn finish(elf: *Elf, file: std.fs.File) !void {
- var buf_writer = std.io.bufferedWriter(file.writer());
+ var buf_writer = std.io.bufferedWriter(file.deprecatedWriter());
const w = buf_writer.writer();
var num_sections: std.elf.Elf64_Half = additional_sections;
lib/compiler/aro_translate_c/ast.zig
@@ -849,7 +849,7 @@ const Context = struct {
fn addIdentifier(c: *Context, bytes: []const u8) Allocator.Error!TokenIndex {
if (std.zig.primitives.isPrimitive(bytes))
return c.addTokenFmt(.identifier, "@\"{s}\"", .{bytes});
- return c.addTokenFmt(.identifier, "{p}", .{std.zig.fmtId(bytes)});
+ return c.addTokenFmt(.identifier, "{f}", .{std.zig.fmtIdFlags(bytes, .{ .allow_primitive = true })});
}
fn listToSpan(c: *Context, list: []const NodeIndex) Allocator.Error!NodeSubRange {
@@ -1201,7 +1201,7 @@ fn renderNode(c: *Context, node: Node) Allocator.Error!NodeIndex {
const compile_error_tok = try c.addToken(.builtin, "@compileError");
_ = try c.addToken(.l_paren, "(");
- const err_msg_tok = try c.addTokenFmt(.string_literal, "\"{}\"", .{std.zig.fmtEscapes(payload.mangled)});
+ const err_msg_tok = try c.addTokenFmt(.string_literal, "\"{f}\"", .{std.zig.fmtString(payload.mangled)});
const err_msg = try c.addNode(.{
.tag = .string_literal,
.main_token = err_msg_tok,
@@ -2116,7 +2116,7 @@ fn renderRecord(c: *Context, node: Node) !NodeIndex {
defer c.gpa.free(members);
for (payload.fields, 0..) |field, i| {
- const name_tok = try c.addTokenFmt(.identifier, "{p}", .{std.zig.fmtId(field.name)});
+ const name_tok = try c.addTokenFmt(.identifier, "{f}", .{std.zig.fmtIdFlags(field.name, .{ .allow_primitive = true })});
_ = try c.addToken(.colon, ":");
const type_expr = try renderNode(c, field.type);
@@ -2205,7 +2205,7 @@ fn renderFieldAccess(c: *Context, lhs: NodeIndex, field_name: []const u8) !NodeI
.main_token = try c.addToken(.period, "."),
.data = .{ .node_and_token = .{
lhs,
- try c.addTokenFmt(.identifier, "{p}", .{std.zig.fmtId(field_name)}),
+ try c.addTokenFmt(.identifier, "{f}", .{std.zig.fmtIdFlags(field_name, .{ .allow_primitive = true })}),
} },
});
}
@@ -2681,7 +2681,7 @@ fn renderVar(c: *Context, node: Node) !NodeIndex {
_ = try c.addToken(.l_paren, "(");
const res = try c.addNode(.{
.tag = .string_literal,
- .main_token = try c.addTokenFmt(.string_literal, "\"{}\"", .{std.zig.fmtEscapes(some)}),
+ .main_token = try c.addTokenFmt(.string_literal, "\"{f}\"", .{std.zig.fmtString(some)}),
.data = undefined,
});
_ = try c.addToken(.r_paren, ")");
@@ -2765,7 +2765,7 @@ fn renderFunc(c: *Context, node: Node) !NodeIndex {
_ = try c.addToken(.l_paren, "(");
const res = try c.addNode(.{
.tag = .string_literal,
- .main_token = try c.addTokenFmt(.string_literal, "\"{}\"", .{std.zig.fmtEscapes(some)}),
+ .main_token = try c.addTokenFmt(.string_literal, "\"{f}\"", .{std.zig.fmtString(some)}),
.data = undefined,
});
_ = try c.addToken(.r_paren, ")");
lib/compiler/resinator/cli.zig
@@ -127,7 +127,7 @@ pub const Diagnostics = struct {
pub fn renderToStdErr(self: *Diagnostics, args: []const []const u8, config: std.io.tty.Config) void {
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
self.renderToWriter(args, stderr, config) catch return;
}
lib/compiler/resinator/compile.zig
@@ -570,7 +570,7 @@ pub const Compiler = struct {
switch (predefined_type) {
.GROUP_ICON, .GROUP_CURSOR => {
// Check for animated icon first
- if (ani.isAnimatedIcon(file.reader())) {
+ if (ani.isAnimatedIcon(file.deprecatedReader())) {
// Animated icons are just put into the resource unmodified,
// and the resource type changes to ANIICON/ANICURSOR
@@ -586,14 +586,14 @@ pub const Compiler = struct {
try header.write(writer, self.errContext(node.id));
try file.seekTo(0);
- try writeResourceData(writer, file.reader(), header.data_size);
+ try writeResourceData(writer, file.deprecatedReader(), header.data_size);
return;
}
// isAnimatedIcon moved the file cursor so reset to the start
try file.seekTo(0);
- const icon_dir = ico.read(self.allocator, file.reader(), try file.getEndPos()) catch |err| switch (err) {
+ const icon_dir = ico.read(self.allocator, file.deprecatedReader(), try file.getEndPos()) catch |err| switch (err) {
error.OutOfMemory => |e| return e,
else => |e| {
return self.iconReadError(
@@ -672,7 +672,7 @@ pub const Compiler = struct {
}
try file.seekTo(entry.data_offset_from_start_of_file);
- var header_bytes = file.reader().readBytesNoEof(16) catch {
+ var header_bytes = file.deprecatedReader().readBytesNoEof(16) catch {
return self.iconReadError(
error.UnexpectedEOF,
filename_utf8,
@@ -803,7 +803,7 @@ pub const Compiler = struct {
}
try file.seekTo(entry.data_offset_from_start_of_file);
- try writeResourceDataNoPadding(writer, file.reader(), entry.data_size_in_bytes);
+ try writeResourceDataNoPadding(writer, file.deprecatedReader(), entry.data_size_in_bytes);
try writeDataPadding(writer, full_data_size);
if (self.state.icon_id == std.math.maxInt(u16)) {
@@ -859,7 +859,7 @@ pub const Compiler = struct {
header.applyMemoryFlags(node.common_resource_attributes, self.source);
const file_size = try file.getEndPos();
- const bitmap_info = bmp.read(file.reader(), file_size) catch |err| {
+ const bitmap_info = bmp.read(file.deprecatedReader(), file_size) catch |err| {
const filename_string_index = try self.diagnostics.putString(filename_utf8);
return self.addErrorDetailsAndFail(.{
.err = .bmp_read_error,
@@ -922,7 +922,7 @@ pub const Compiler = struct {
header.data_size = bmp_bytes_to_write;
try header.write(writer, self.errContext(node.id));
try file.seekTo(bmp.file_header_len);
- const file_reader = file.reader();
+ const file_reader = file.deprecatedReader();
try writeResourceDataNoPadding(writer, file_reader, bitmap_info.dib_header_size);
if (bitmap_info.getBitmasksByteLen() > 0) {
try writeResourceDataNoPadding(writer, file_reader, bitmap_info.getBitmasksByteLen());
@@ -968,7 +968,7 @@ pub const Compiler = struct {
header.data_size = @intCast(file_size);
try header.write(writer, self.errContext(node.id));
- var header_slurping_reader = headerSlurpingReader(148, file.reader());
+ var header_slurping_reader = headerSlurpingReader(148, file.deprecatedReader());
try writeResourceData(writer, header_slurping_reader.reader(), header.data_size);
try self.state.font_dir.add(self.arena, FontDir.Font{
@@ -1002,7 +1002,7 @@ pub const Compiler = struct {
// We now know that the data size will fit in a u32
header.data_size = @intCast(data_size);
try header.write(writer, self.errContext(node.id));
- try writeResourceData(writer, file.reader(), header.data_size);
+ try writeResourceData(writer, file.deprecatedReader(), header.data_size);
}
fn iconReadError(
lib/compiler/resinator/errors.zig
@@ -1,4 +1,5 @@
const std = @import("std");
+const assert = std.debug.assert;
const Token = @import("lex.zig").Token;
const SourceMappings = @import("source_mapping.zig").SourceMappings;
const utils = @import("utils.zig");
@@ -63,7 +64,7 @@ pub const Diagnostics = struct {
pub fn renderToStdErr(self: *Diagnostics, cwd: std.fs.Dir, source: []const u8, tty_config: std.io.tty.Config, source_mappings: ?SourceMappings) void {
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
for (self.errors.items) |err_details| {
renderErrorMessage(stderr, tty_config, cwd, err_details, source, self.strings.items, source_mappings) catch return;
}
@@ -409,15 +410,7 @@ pub const ErrorDetails = struct {
failed_to_open_cwd,
};
- fn formatToken(
- ctx: TokenFormatContext,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
- _ = fmt;
- _ = options;
-
+ fn formatToken(ctx: TokenFormatContext, writer: *std.io.Writer) std.io.Writer.Error!void {
switch (ctx.token.id) {
.eof => return writer.writeAll(ctx.token.id.nameForErrorDisplay()),
else => {},
@@ -441,7 +434,7 @@ pub const ErrorDetails = struct {
code_page: SupportedCodePage,
};
- fn fmtToken(self: ErrorDetails, source: []const u8) std.fmt.Formatter(formatToken) {
+ fn fmtToken(self: ErrorDetails, source: []const u8) std.fmt.Formatter(TokenFormatContext, formatToken) {
return .{ .data = .{
.token = self.token,
.code_page = self.code_page,
@@ -466,10 +459,14 @@ pub const ErrorDetails = struct {
.hint => return,
},
.illegal_byte => {
- return writer.print("character '{s}' is not allowed", .{std.fmt.fmtSliceEscapeUpper(self.token.slice(source))});
+ return writer.print("character '{f}' is not allowed", .{
+ std.ascii.hexEscape(self.token.slice(source), .upper),
+ });
},
.illegal_byte_outside_string_literals => {
- return writer.print("character '{s}' is not allowed outside of string literals", .{std.fmt.fmtSliceEscapeUpper(self.token.slice(source))});
+ return writer.print("character '{f}' is not allowed outside of string literals", .{
+ std.ascii.hexEscape(self.token.slice(source), .upper),
+ });
},
.illegal_codepoint_outside_string_literals => {
// This is somewhat hacky, but we know that:
@@ -1106,7 +1103,7 @@ const CorrespondingLines = struct {
.code_page = err_details.code_page,
};
corresponding_lines.buffered_reader = BufferedReaderType{
- .unbuffered_reader = corresponding_lines.file.reader(),
+ .unbuffered_reader = corresponding_lines.file.deprecatedReader(),
};
errdefer corresponding_lines.deinit();
lib/compiler/resinator/lex.zig
@@ -237,7 +237,9 @@ pub const Lexer = struct {
}
pub fn dump(self: *Self, token: *const Token) void {
- std.debug.print("{s}:{d}: {s}\n", .{ @tagName(token.id), token.line_number, std.fmt.fmtSliceEscapeLower(token.slice(self.buffer)) });
+ std.debug.print("{s}:{d}: {f}\n", .{
+ @tagName(token.id), token.line_number, std.ascii.hexEscape(token.slice(self.buffer), .lower),
+ });
}
pub const LexMethod = enum {
lib/compiler/resinator/main.zig
@@ -29,7 +29,7 @@ pub fn main() !void {
defer std.process.argsFree(allocator, args);
if (args.len < 2) {
- try renderErrorMessage(stderr.writer(), stderr_config, .err, "expected zig lib dir as first argument", .{});
+ try renderErrorMessage(stderr.deprecatedWriter(), stderr_config, .err, "expected zig lib dir as first argument", .{});
std.process.exit(1);
}
const zig_lib_dir = args[1];
@@ -82,14 +82,14 @@ pub fn main() !void {
if (options.print_help_and_exit) {
const stdout = std.fs.File.stdout();
- try cli.writeUsage(stdout.writer(), "zig rc");
+ try cli.writeUsage(stdout.deprecatedWriter(), "zig rc");
return;
}
// Don't allow verbose when integrating with Zig via stdout
options.verbose = false;
- const stdout_writer = std.fs.File.stdout().writer();
+ const stdout_writer = std.fs.File.stdout().deprecatedWriter();
if (options.verbose) {
try options.dumpVerbose(stdout_writer);
try stdout_writer.writeByte('\n');
@@ -290,7 +290,7 @@ pub fn main() !void {
};
defer depfile.close();
- const depfile_writer = depfile.writer();
+ const depfile_writer = depfile.deprecatedWriter();
var depfile_buffered_writer = std.io.bufferedWriter(depfile_writer);
switch (options.depfile_fmt) {
.json => {
@@ -645,7 +645,7 @@ const ErrorHandler = union(enum) {
},
.tty => {
// extra newline to separate this line from the aro errors
- try renderErrorMessage(std.fs.File.stderr().writer(), self.tty, .err, "{s}\n", .{fail_msg});
+ try renderErrorMessage(std.fs.File.stderr().deprecatedWriter(), self.tty, .err, "{s}\n", .{fail_msg});
aro.Diagnostics.render(comp, self.tty);
},
}
@@ -690,7 +690,7 @@ const ErrorHandler = union(enum) {
try server.serveErrorBundle(error_bundle);
},
.tty => {
- try renderErrorMessage(std.fs.File.stderr().writer(), self.tty, msg_type, format, args);
+ try renderErrorMessage(std.fs.File.stderr().deprecatedWriter(), self.tty, msg_type, format, args);
},
}
}
lib/compiler/resinator/res.zig
@@ -1,4 +1,5 @@
const std = @import("std");
+const assert = std.debug.assert;
const rc = @import("rc.zig");
const ResourceType = rc.ResourceType;
const CommonResourceAttributes = rc.CommonResourceAttributes;
@@ -163,14 +164,8 @@ pub const Language = packed struct(u16) {
return @bitCast(self);
}
- pub fn format(
- language: Language,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- out_stream: anytype,
- ) !void {
- _ = fmt;
- _ = options;
+ pub fn format(language: Language, w: *std.io.Writer, comptime fmt: []const u8) std.io.Writer.Error!void {
+ comptime assert(fmt.len == 0);
const language_id = language.asInt();
const language_name = language_name: {
if (std.enums.fromInt(lang.LanguageId, language_id)) |lang_enum_val| {
@@ -181,7 +176,7 @@ pub const Language = packed struct(u16) {
}
break :language_name "<UNKNOWN>";
};
- try out_stream.print("{s} (0x{X})", .{ language_name, language_id });
+ try w.print("{s} (0x{X})", .{ language_name, language_id });
}
};
@@ -445,47 +440,34 @@ pub const NameOrOrdinal = union(enum) {
}
}
- pub fn format(
- self: NameOrOrdinal,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- out_stream: anytype,
- ) !void {
- _ = fmt;
- _ = options;
+ pub fn format(self: NameOrOrdinal, w: *std.io.Writer, comptime fmt: []const u8) !void {
+ comptime assert(fmt.len == 0);
switch (self) {
.name => |name| {
- try out_stream.print("{s}", .{std.unicode.fmtUtf16Le(name)});
+ try w.print("{s}", .{std.unicode.fmtUtf16Le(name)});
},
.ordinal => |ordinal| {
- try out_stream.print("{d}", .{ordinal});
+ try w.print("{d}", .{ordinal});
},
}
}
- fn formatResourceType(
- self: NameOrOrdinal,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- out_stream: anytype,
- ) !void {
- _ = fmt;
- _ = options;
+ fn formatResourceType(self: NameOrOrdinal, w: *std.io.Writer) std.io.Writer.Error!void {
switch (self) {
.name => |name| {
- try out_stream.print("{s}", .{std.unicode.fmtUtf16Le(name)});
+ try w.print("{s}", .{std.unicode.fmtUtf16Le(name)});
},
.ordinal => |ordinal| {
if (std.enums.tagName(RT, @enumFromInt(ordinal))) |predefined_type_name| {
- try out_stream.print("{s}", .{predefined_type_name});
+ try w.print("{s}", .{predefined_type_name});
} else {
- try out_stream.print("{d}", .{ordinal});
+ try w.print("{d}", .{ordinal});
}
},
}
}
- pub fn fmtResourceType(type_value: NameOrOrdinal) std.fmt.Formatter(formatResourceType) {
+ pub fn fmtResourceType(type_value: NameOrOrdinal) std.fmt.Formatter(NameOrOrdinal, formatResourceType) {
return .{ .data = type_value };
}
};
lib/compiler/build_runner.zig
@@ -365,7 +365,7 @@ pub fn main() !void {
.data = buffer.items,
.flags = .{ .exclusive = true },
}) catch |err| {
- fatal("unable to write configuration results to '{}{s}': {s}", .{
+ fatal("unable to write configuration results to '{f}{s}': {s}", .{
local_cache_directory, tmp_sub_path, @errorName(err),
});
};
@@ -378,7 +378,7 @@ pub fn main() !void {
validateSystemLibraryOptions(builder);
- const stdout_writer = std.fs.File.stdout().writer();
+ const stdout_writer = std.fs.File.stdout().deprecatedWriter();
if (help_menu)
return usage(builder, stdout_writer);
@@ -704,14 +704,14 @@ fn runStepNames(
ttyconf.setColor(stderr, .cyan) catch {};
stderr.writeAll("Build Summary:") catch {};
ttyconf.setColor(stderr, .reset) catch {};
- stderr.writer().print(" {d}/{d} steps succeeded", .{ success_count, total_count }) catch {};
- if (skipped_count > 0) stderr.writer().print("; {d} skipped", .{skipped_count}) catch {};
- if (failure_count > 0) stderr.writer().print("; {d} failed", .{failure_count}) catch {};
+ stderr.deprecatedWriter().print(" {d}/{d} steps succeeded", .{ success_count, total_count }) catch {};
+ if (skipped_count > 0) stderr.deprecatedWriter().print("; {d} skipped", .{skipped_count}) catch {};
+ if (failure_count > 0) stderr.deprecatedWriter().print("; {d} failed", .{failure_count}) catch {};
- if (test_count > 0) stderr.writer().print("; {d}/{d} tests passed", .{ test_pass_count, test_count }) catch {};
- if (test_skip_count > 0) stderr.writer().print("; {d} skipped", .{test_skip_count}) catch {};
- if (test_fail_count > 0) stderr.writer().print("; {d} failed", .{test_fail_count}) catch {};
- if (test_leak_count > 0) stderr.writer().print("; {d} leaked", .{test_leak_count}) catch {};
+ if (test_count > 0) stderr.deprecatedWriter().print("; {d}/{d} tests passed", .{ test_pass_count, test_count }) catch {};
+ if (test_skip_count > 0) stderr.deprecatedWriter().print("; {d} skipped", .{test_skip_count}) catch {};
+ if (test_fail_count > 0) stderr.deprecatedWriter().print("; {d} failed", .{test_fail_count}) catch {};
+ if (test_leak_count > 0) stderr.deprecatedWriter().print("; {d} leaked", .{test_leak_count}) catch {};
stderr.writeAll("\n") catch {};
@@ -820,10 +820,10 @@ fn printStepStatus(
try stderr.writeAll(" cached");
} else if (s.test_results.test_count > 0) {
const pass_count = s.test_results.passCount();
- try stderr.writer().print(" {d} passed", .{pass_count});
+ try stderr.deprecatedWriter().print(" {d} passed", .{pass_count});
if (s.test_results.skip_count > 0) {
try ttyconf.setColor(stderr, .yellow);
- try stderr.writer().print(" {d} skipped", .{s.test_results.skip_count});
+ try stderr.deprecatedWriter().print(" {d} skipped", .{s.test_results.skip_count});
}
} else {
try stderr.writeAll(" success");
@@ -832,15 +832,15 @@ fn printStepStatus(
if (s.result_duration_ns) |ns| {
try ttyconf.setColor(stderr, .dim);
if (ns >= std.time.ns_per_min) {
- try stderr.writer().print(" {d}m", .{ns / std.time.ns_per_min});
+ try stderr.deprecatedWriter().print(" {d}m", .{ns / std.time.ns_per_min});
} else if (ns >= std.time.ns_per_s) {
- try stderr.writer().print(" {d}s", .{ns / std.time.ns_per_s});
+ try stderr.deprecatedWriter().print(" {d}s", .{ns / std.time.ns_per_s});
} else if (ns >= std.time.ns_per_ms) {
- try stderr.writer().print(" {d}ms", .{ns / std.time.ns_per_ms});
+ try stderr.deprecatedWriter().print(" {d}ms", .{ns / std.time.ns_per_ms});
} else if (ns >= std.time.ns_per_us) {
- try stderr.writer().print(" {d}us", .{ns / std.time.ns_per_us});
+ try stderr.deprecatedWriter().print(" {d}us", .{ns / std.time.ns_per_us});
} else {
- try stderr.writer().print(" {d}ns", .{ns});
+ try stderr.deprecatedWriter().print(" {d}ns", .{ns});
}
try ttyconf.setColor(stderr, .reset);
}
@@ -848,13 +848,13 @@ fn printStepStatus(
const rss = s.result_peak_rss;
try ttyconf.setColor(stderr, .dim);
if (rss >= 1000_000_000) {
- try stderr.writer().print(" MaxRSS:{d}G", .{rss / 1000_000_000});
+ try stderr.deprecatedWriter().print(" MaxRSS:{d}G", .{rss / 1000_000_000});
} else if (rss >= 1000_000) {
- try stderr.writer().print(" MaxRSS:{d}M", .{rss / 1000_000});
+ try stderr.deprecatedWriter().print(" MaxRSS:{d}M", .{rss / 1000_000});
} else if (rss >= 1000) {
- try stderr.writer().print(" MaxRSS:{d}K", .{rss / 1000});
+ try stderr.deprecatedWriter().print(" MaxRSS:{d}K", .{rss / 1000});
} else {
- try stderr.writer().print(" MaxRSS:{d}B", .{rss});
+ try stderr.deprecatedWriter().print(" MaxRSS:{d}B", .{rss});
}
try ttyconf.setColor(stderr, .reset);
}
@@ -866,7 +866,7 @@ fn printStepStatus(
if (skip == .skipped_oom) {
try stderr.writeAll(" (not enough memory)");
try ttyconf.setColor(stderr, .dim);
- try stderr.writer().print(" upper bound of {d} exceeded runner limit ({d})", .{ s.max_rss, run.max_rss });
+ try stderr.deprecatedWriter().print(" upper bound of {d} exceeded runner limit ({d})", .{ s.max_rss, run.max_rss });
try ttyconf.setColor(stderr, .yellow);
}
try stderr.writeAll("\n");
@@ -883,18 +883,18 @@ fn printStepFailure(
) !void {
if (s.result_error_bundle.errorMessageCount() > 0) {
try ttyconf.setColor(stderr, .red);
- try stderr.writer().print(" {d} errors\n", .{
+ try stderr.deprecatedWriter().print(" {d} errors\n", .{
s.result_error_bundle.errorMessageCount(),
});
try ttyconf.setColor(stderr, .reset);
} else if (!s.test_results.isSuccess()) {
- try stderr.writer().print(" {d}/{d} passed", .{
+ try stderr.deprecatedWriter().print(" {d}/{d} passed", .{
s.test_results.passCount(), s.test_results.test_count,
});
if (s.test_results.fail_count > 0) {
try stderr.writeAll(", ");
try ttyconf.setColor(stderr, .red);
- try stderr.writer().print("{d} failed", .{
+ try stderr.deprecatedWriter().print("{d} failed", .{
s.test_results.fail_count,
});
try ttyconf.setColor(stderr, .reset);
@@ -902,7 +902,7 @@ fn printStepFailure(
if (s.test_results.skip_count > 0) {
try stderr.writeAll(", ");
try ttyconf.setColor(stderr, .yellow);
- try stderr.writer().print("{d} skipped", .{
+ try stderr.deprecatedWriter().print("{d} skipped", .{
s.test_results.skip_count,
});
try ttyconf.setColor(stderr, .reset);
@@ -910,7 +910,7 @@ fn printStepFailure(
if (s.test_results.leak_count > 0) {
try stderr.writeAll(", ");
try ttyconf.setColor(stderr, .red);
- try stderr.writer().print("{d} leaked", .{
+ try stderr.deprecatedWriter().print("{d} leaked", .{
s.test_results.leak_count,
});
try ttyconf.setColor(stderr, .reset);
@@ -992,7 +992,7 @@ fn printTreeStep(
if (s.dependencies.items.len == 0) {
try stderr.writeAll(" (reused)\n");
} else {
- try stderr.writer().print(" (+{d} more reused dependencies)\n", .{
+ try stderr.deprecatedWriter().print(" (+{d} more reused dependencies)\n", .{
s.dependencies.items.len,
});
}
@@ -1209,7 +1209,7 @@ pub fn printErrorMessages(
var indent: usize = 0;
while (step_stack.pop()) |s| : (indent += 1) {
if (indent > 0) {
- try stderr.writer().writeByteNTimes(' ', (indent - 1) * 3);
+ try stderr.deprecatedWriter().writeByteNTimes(' ', (indent - 1) * 3);
try printChildNodePrefix(stderr, ttyconf);
}
@@ -1231,7 +1231,7 @@ pub fn printErrorMessages(
}
if (!prominent_compile_errors and failing_step.result_error_bundle.errorMessageCount() > 0) {
- try failing_step.result_error_bundle.renderToWriter(options, stderr.writer());
+ try failing_step.result_error_bundle.renderToWriter(options, stderr.deprecatedWriter());
}
for (failing_step.result_error_msgs.items) |msg| {
lib/compiler/libc.zig
@@ -40,7 +40,7 @@ pub fn main() !void {
const arg = args[i];
if (mem.startsWith(u8, arg, "-")) {
if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) {
- const stdout = std.fs.File.stdout().writer();
+ const stdout = std.fs.File.stdout().deprecatedWriter();
try stdout.writeAll(usage_libc);
return std.process.cleanExit();
} else if (mem.eql(u8, arg, "-target")) {
@@ -97,7 +97,7 @@ pub fn main() !void {
fatal("no include dirs detected for target {s}", .{zig_target});
}
- var bw = std.io.bufferedWriter(std.fs.File.stdout().writer());
+ var bw = std.io.bufferedWriter(std.fs.File.stdout().deprecatedWriter());
var writer = bw.writer();
for (libc_dirs.libc_include_dir_list) |include_dir| {
try writer.writeAll(include_dir);
@@ -125,7 +125,7 @@ pub fn main() !void {
};
defer libc.deinit(gpa);
- var bw = std.io.bufferedWriter(std.fs.File.stdout().writer());
+ var bw = std.io.bufferedWriter(std.fs.File.stdout().deprecatedWriter());
try libc.render(bw.writer());
try bw.flush();
}
lib/compiler/objcopy.zig
@@ -635,11 +635,11 @@ const HexWriter = struct {
const payload_bytes = self.getPayloadBytes();
assert(payload_bytes.len <= MAX_PAYLOAD_LEN);
- const line = try std.fmt.bufPrint(&outbuf, ":{0X:0>2}{1X:0>4}{2X:0>2}{3s}{4X:0>2}" ++ linesep, .{
+ const line = try std.fmt.bufPrint(&outbuf, ":{0X:0>2}{1X:0>4}{2X:0>2}{3X}{4X:0>2}" ++ linesep, .{
@as(u8, @intCast(payload_bytes.len)),
self.address,
@intFromEnum(self.payload),
- std.fmt.fmtSliceHexUpper(payload_bytes),
+ payload_bytes,
self.checksum(),
});
try file.writeAll(line);
@@ -1495,7 +1495,7 @@ const ElfFileHelper = struct {
if (size < prefix.len) return null;
try in_file.seekTo(offset);
- var section_reader = std.io.limitedReader(in_file.reader(), size);
+ var section_reader = std.io.limitedReader(in_file.deprecatedReader(), size);
// allocate as large as decompressed data. if the compression doesn't fit, keep the data uncompressed.
const compressed_data = try allocator.alignedAlloc(u8, .@"8", @intCast(size));
lib/compiler/reduce.zig
@@ -68,7 +68,7 @@ pub fn main() !void {
const arg = args[i];
if (mem.startsWith(u8, arg, "-")) {
if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) {
- const stdout = std.fs.File.stdout().writer();
+ const stdout = std.fs.File.stdout().deprecatedWriter();
try stdout.writeAll(usage);
return std.process.cleanExit();
} else if (mem.eql(u8, arg, "--")) {
lib/compiler/test_runner.zig
@@ -328,7 +328,7 @@ pub fn mainSimple() anyerror!void {
passed += 1;
}
if (enable_print and print_summary) {
- stderr.writer().print("{} passed, {} skipped, {} failed\n", .{ passed, skipped, failed }) catch {};
+ stderr.deprecatedWriter().print("{} passed, {} skipped, {} failed\n", .{ passed, skipped, failed }) catch {};
}
if (failed != 0) std.process.exit(1);
}
lib/docs/wasm/markdown/renderer.zig
@@ -1,6 +1,7 @@
const std = @import("std");
const Document = @import("Document.zig");
const Node = Document.Node;
+const assert = std.debug.assert;
/// A Markdown document renderer.
///
@@ -229,18 +230,11 @@ pub fn renderInlineNodeText(
}
}
-pub fn fmtHtml(bytes: []const u8) std.fmt.Formatter(formatHtml) {
+pub fn fmtHtml(bytes: []const u8) std.fmt.Formatter([]const u8, formatHtml) {
return .{ .data = bytes };
}
-fn formatHtml(
- bytes: []const u8,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
-) !void {
- _ = fmt;
- _ = options;
+fn formatHtml(bytes: []const u8, writer: *std.io.Writer) std.io.Writer.Error!void {
for (bytes) |b| {
switch (b) {
'<' => try writer.writeAll("<"),
lib/docs/wasm/markdown.zig
@@ -160,7 +160,7 @@ fn mainImpl() !void {
var doc = try parser.endInput();
defer doc.deinit(gpa);
- var stdout_buf = std.io.bufferedWriter(std.fs.File.stdout().writer());
+ var stdout_buf = std.io.bufferedWriter(std.fs.File.stdout().deprecatedWriter());
try doc.render(stdout_buf.writer());
try stdout_buf.flush();
}
lib/init/src/root.zig
@@ -5,7 +5,7 @@ pub fn bufferedPrint() !void {
// Stdout is for the actual output of your application, for example if you
// are implementing gzip, then only the compressed bytes should be sent to
// stdout, not any debugging messages.
- const stdout_file = std.fs.File.stdout().writer();
+ const stdout_file = std.fs.File.stdout().deprecatedWriter();
// Buffering can improve performance significantly in print-heavy programs.
var bw = std.io.bufferedWriter(stdout_file);
const stdout = bw.writer();
lib/std/Build/Cache/DepTokenizer.zig
@@ -7,6 +7,7 @@ state: State = .lhs,
const std = @import("std");
const testing = std.testing;
const assert = std.debug.assert;
+const Allocator = std.mem.Allocator;
pub fn next(self: *Tokenizer) ?Token {
var start = self.index;
@@ -362,7 +363,7 @@ pub const Token = union(enum) {
};
/// Resolve escapes in target or prereq. Only valid with .target_must_resolve or .prereq_must_resolve.
- pub fn resolve(self: Token, writer: anytype) @TypeOf(writer).Error!void {
+ pub fn resolve(self: Token, gpa: Allocator, list: *std.ArrayListUnmanaged(u8)) error{OutOfMemory}!void {
switch (self) {
.target_must_resolve => |bytes| {
var state: enum { start, escape, dollar } = .start;
@@ -372,27 +373,27 @@ pub const Token = union(enum) {
switch (c) {
'\\' => state = .escape,
'$' => state = .dollar,
- else => try writer.writeByte(c),
+ else => try list.append(gpa, c),
}
},
.escape => {
switch (c) {
' ', '#', '\\' => {},
'$' => {
- try writer.writeByte('\\');
+ try list.append(gpa, '\\');
state = .dollar;
continue;
},
- else => try writer.writeByte('\\'),
+ else => try list.append(gpa, '\\'),
}
- try writer.writeByte(c);
+ try list.append(gpa, c);
state = .start;
},
.dollar => {
- try writer.writeByte('$');
+ try list.append(gpa, '$');
switch (c) {
'$' => {},
- else => try writer.writeByte(c),
+ else => try list.append(gpa, c),
}
state = .start;
},
@@ -406,19 +407,19 @@ pub const Token = union(enum) {
.start => {
switch (c) {
'\\' => state = .escape,
- else => try writer.writeByte(c),
+ else => try list.append(gpa, c),
}
},
.escape => {
switch (c) {
' ' => {},
'\\' => {
- try writer.writeByte(c);
+ try list.append(gpa, c);
continue;
},
- else => try writer.writeByte('\\'),
+ else => try list.append(gpa, '\\'),
}
- try writer.writeByte(c);
+ try list.append(gpa, c);
state = .start;
},
}
@@ -428,20 +429,20 @@ pub const Token = union(enum) {
}
}
- pub fn printError(self: Token, writer: anytype) @TypeOf(writer).Error!void {
+ pub fn printError(self: Token, gpa: Allocator, list: *std.ArrayListUnmanaged(u8)) error{OutOfMemory}!void {
switch (self) {
.target, .target_must_resolve, .prereq, .prereq_must_resolve => unreachable, // not an error
.incomplete_quoted_prerequisite,
.incomplete_target,
=> |index_and_bytes| {
- try writer.print("{s} '", .{self.errStr()});
+ try list.print(gpa, "{s} '", .{self.errStr()});
if (self == .incomplete_target) {
const tmp = Token{ .target_must_resolve = index_and_bytes.bytes };
- try tmp.resolve(writer);
+ try tmp.resolve(gpa, list);
} else {
- try printCharValues(writer, index_and_bytes.bytes);
+ try printCharValues(gpa, list, index_and_bytes.bytes);
}
- try writer.print("' at position {d}", .{index_and_bytes.index});
+ try list.print(gpa, "' at position {d}", .{index_and_bytes.index});
},
.invalid_target,
.bad_target_escape,
@@ -450,9 +451,9 @@ pub const Token = union(enum) {
.incomplete_escape,
.expected_colon,
=> |index_and_char| {
- try writer.writeAll("illegal char ");
- try printUnderstandableChar(writer, index_and_char.char);
- try writer.print(" at position {d}: {s}", .{ index_and_char.index, self.errStr() });
+ try list.appendSlice(gpa, "illegal char ");
+ try printUnderstandableChar(gpa, list, index_and_char.char);
+ try list.print(gpa, " at position {d}: {s}", .{ index_and_char.index, self.errStr() });
},
}
}
@@ -1026,41 +1027,41 @@ fn depTokenizer(input: []const u8, expect: []const u8) !void {
defer arena_allocator.deinit();
var it: Tokenizer = .{ .bytes = input };
- var buffer = std.ArrayList(u8).init(arena);
- var resolve_buf = std.ArrayList(u8).init(arena);
+ var buffer: std.ArrayListUnmanaged(u8) = .empty;
+ var resolve_buf: std.ArrayListUnmanaged(u8) = .empty;
var i: usize = 0;
while (it.next()) |token| {
- if (i != 0) try buffer.appendSlice("\n");
+ if (i != 0) try buffer.appendSlice(arena, "\n");
switch (token) {
.target, .prereq => |bytes| {
- try buffer.appendSlice(@tagName(token));
- try buffer.appendSlice(" = {");
+ try buffer.appendSlice(arena, @tagName(token));
+ try buffer.appendSlice(arena, " = {");
for (bytes) |b| {
- try buffer.append(printable_char_tab[b]);
+ try buffer.append(arena, printable_char_tab[b]);
}
- try buffer.appendSlice("}");
+ try buffer.appendSlice(arena, "}");
},
.target_must_resolve => {
- try buffer.appendSlice("target = {");
- try token.resolve(resolve_buf.writer());
+ try buffer.appendSlice(arena, "target = {");
+ try token.resolve(arena, &resolve_buf);
for (resolve_buf.items) |b| {
- try buffer.append(printable_char_tab[b]);
+ try buffer.append(arena, printable_char_tab[b]);
}
resolve_buf.items.len = 0;
- try buffer.appendSlice("}");
+ try buffer.appendSlice(arena, "}");
},
.prereq_must_resolve => {
- try buffer.appendSlice("prereq = {");
- try token.resolve(resolve_buf.writer());
+ try buffer.appendSlice(arena, "prereq = {");
+ try token.resolve(arena, &resolve_buf);
for (resolve_buf.items) |b| {
- try buffer.append(printable_char_tab[b]);
+ try buffer.append(arena, printable_char_tab[b]);
}
resolve_buf.items.len = 0;
- try buffer.appendSlice("}");
+ try buffer.appendSlice(arena, "}");
},
else => {
- try buffer.appendSlice("ERROR: ");
- try token.printError(buffer.writer());
+ try buffer.appendSlice(arena, "ERROR: ");
+ try token.printError(arena, &buffer);
break;
},
}
@@ -1072,134 +1073,18 @@ fn depTokenizer(input: []const u8, expect: []const u8) !void {
return;
}
- const out = std.fs.File.stderr().writer();
-
- try out.writeAll("\n");
- try printSection(out, "<<<< input", input);
- try printSection(out, "==== expect", expect);
- try printSection(out, ">>>> got", buffer.items);
- try printRuler(out);
-
- try testing.expect(false);
-}
-
-fn printSection(out: anytype, label: []const u8, bytes: []const u8) !void {
- try printLabel(out, label, bytes);
- try hexDump(out, bytes);
- try printRuler(out);
- try out.writeAll(bytes);
- try out.writeAll("\n");
-}
-
-fn printLabel(out: anytype, label: []const u8, bytes: []const u8) !void {
- var buf: [80]u8 = undefined;
- const text = try std.fmt.bufPrint(buf[0..], "{s} {d} bytes ", .{ label, bytes.len });
- try out.writeAll(text);
- var i: usize = text.len;
- const end = 79;
- while (i < end) : (i += 1) {
- try out.writeAll(&[_]u8{label[0]});
- }
- try out.writeAll("\n");
-}
-
-fn printRuler(out: anytype) !void {
- var i: usize = 0;
- const end = 79;
- while (i < end) : (i += 1) {
- try out.writeAll("-");
- }
- try out.writeAll("\n");
-}
-
-fn hexDump(out: anytype, bytes: []const u8) !void {
- const n16 = bytes.len >> 4;
- var line: usize = 0;
- var offset: usize = 0;
- while (line < n16) : (line += 1) {
- try hexDump16(out, offset, bytes[offset..][0..16]);
- offset += 16;
- }
-
- const n = bytes.len & 0x0f;
- if (n > 0) {
- try printDecValue(out, offset, 8);
- try out.writeAll(":");
- try out.writeAll(" ");
- const end1 = @min(offset + n, offset + 8);
- for (bytes[offset..end1]) |b| {
- try out.writeAll(" ");
- try printHexValue(out, b, 2);
- }
- const end2 = offset + n;
- if (end2 > end1) {
- try out.writeAll(" ");
- for (bytes[end1..end2]) |b| {
- try out.writeAll(" ");
- try printHexValue(out, b, 2);
- }
- }
- const short = 16 - n;
- var i: usize = 0;
- while (i < short) : (i += 1) {
- try out.writeAll(" ");
- }
- if (end2 > end1) {
- try out.writeAll(" |");
- } else {
- try out.writeAll(" |");
- }
- try printCharValues(out, bytes[offset..end2]);
- try out.writeAll("|\n");
- offset += n;
- }
-
- try printDecValue(out, offset, 8);
- try out.writeAll(":");
- try out.writeAll("\n");
+ try testing.expectEqualStrings(expect, buffer.items);
}
-fn hexDump16(out: anytype, offset: usize, bytes: []const u8) !void {
- try printDecValue(out, offset, 8);
- try out.writeAll(":");
- try out.writeAll(" ");
- for (bytes[0..8]) |b| {
- try out.writeAll(" ");
- try printHexValue(out, b, 2);
- }
- try out.writeAll(" ");
- for (bytes[8..16]) |b| {
- try out.writeAll(" ");
- try printHexValue(out, b, 2);
- }
- try out.writeAll(" |");
- try printCharValues(out, bytes);
- try out.writeAll("|\n");
-}
-
-fn printDecValue(out: anytype, value: u64, width: u8) !void {
- var buffer: [20]u8 = undefined;
- const len = std.fmt.formatIntBuf(buffer[0..], value, 10, .lower, .{ .width = width, .fill = '0' });
- try out.writeAll(buffer[0..len]);
-}
-
-fn printHexValue(out: anytype, value: u64, width: u8) !void {
- var buffer: [16]u8 = undefined;
- const len = std.fmt.formatIntBuf(buffer[0..], value, 16, .lower, .{ .width = width, .fill = '0' });
- try out.writeAll(buffer[0..len]);
-}
-
-fn printCharValues(out: anytype, bytes: []const u8) !void {
- for (bytes) |b| {
- try out.writeAll(&[_]u8{printable_char_tab[b]});
- }
+fn printCharValues(gpa: Allocator, list: *std.ArrayListUnmanaged(u8), bytes: []const u8) !void {
+ for (bytes) |b| try list.append(gpa, printable_char_tab[b]);
}
-fn printUnderstandableChar(out: anytype, char: u8) !void {
+fn printUnderstandableChar(gpa: Allocator, list: *std.ArrayListUnmanaged(u8), char: u8) !void {
if (std.ascii.isPrint(char)) {
- try out.print("'{c}'", .{char});
+ try list.print(gpa, "'{c}'", .{char});
} else {
- try out.print("\\x{X:0>2}", .{char});
+ try list.print(gpa, "\\x{X:0>2}", .{char});
}
}
lib/std/Build/Cache/Directory.zig
@@ -1,5 +1,6 @@
const Directory = @This();
const std = @import("../../std.zig");
+const assert = std.debug.assert;
const fs = std.fs;
const fmt = std.fmt;
const Allocator = std.mem.Allocator;
@@ -55,14 +56,8 @@ pub fn closeAndFree(self: *Directory, gpa: Allocator) void {
self.* = undefined;
}
-pub fn format(
- self: Directory,
- comptime fmt_string: []const u8,
- options: fmt.FormatOptions,
- writer: anytype,
-) !void {
- _ = options;
- if (fmt_string.len != 0) fmt.invalidFmtError(fmt_string, self);
+pub fn format(self: Directory, writer: *std.io.Writer, comptime f: []const u8) std.io.Writer.Error!void {
+ comptime assert(f.len == 0);
if (self.path) |p| {
try writer.writeAll(p);
try writer.writeAll(fs.path.sep_str);
lib/std/Build/Cache/Path.zig
@@ -1,3 +1,10 @@
+const Path = @This();
+const std = @import("../../std.zig");
+const assert = std.debug.assert;
+const fs = std.fs;
+const Allocator = std.mem.Allocator;
+const Cache = std.Build.Cache;
+
root_dir: Cache.Directory,
/// The path, relative to the root dir, that this `Path` represents.
/// Empty string means the root_dir is the path.
@@ -133,38 +140,32 @@ pub fn makePath(p: Path, sub_path: []const u8) !void {
}
pub fn toString(p: Path, allocator: Allocator) Allocator.Error![]u8 {
- return std.fmt.allocPrint(allocator, "{}", .{p});
+ return std.fmt.allocPrint(allocator, "{f}", .{p});
}
pub fn toStringZ(p: Path, allocator: Allocator) Allocator.Error![:0]u8 {
- return std.fmt.allocPrintZ(allocator, "{}", .{p});
+ return std.fmt.allocPrintSentinel(allocator, "{f}", .{p}, 0);
}
-pub fn format(
- self: Path,
- comptime fmt_string: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
-) !void {
- if (fmt_string.len == 1) {
+pub fn format(self: Path, writer: *std.io.Writer, comptime f: []const u8) std.io.Writer.Error!void {
+ if (f.len == 1) {
// Quote-escape the string.
- const stringEscape = std.zig.stringEscape;
- const f = switch (fmt_string[0]) {
- 'q' => "",
- '\'' => "\'",
- else => @compileError("unsupported format string: " ++ fmt_string),
+ const zigEscape = switch (f[0]) {
+ 'q' => std.zig.stringEscape,
+ '\'' => std.zig.charEscape,
+ else => @compileError("unsupported format string: " ++ f),
};
if (self.root_dir.path) |p| {
- try stringEscape(p, f, options, writer);
- if (self.sub_path.len > 0) try stringEscape(fs.path.sep_str, f, options, writer);
+ try zigEscape(p, writer);
+ if (self.sub_path.len > 0) try zigEscape(fs.path.sep_str, writer);
}
if (self.sub_path.len > 0) {
- try stringEscape(self.sub_path, f, options, writer);
+ try zigEscape(self.sub_path, writer);
}
return;
}
- if (fmt_string.len > 0)
- std.fmt.invalidFmtError(fmt_string, self);
+ if (f.len > 0)
+ std.fmt.invalidFmtError(f, self);
if (std.fs.path.isAbsolute(self.sub_path)) {
try writer.writeAll(self.sub_path);
return;
@@ -223,9 +224,3 @@ pub const TableAdapter = struct {
return a.eql(b);
}
};
-
-const Path = @This();
-const std = @import("../../std.zig");
-const fs = std.fs;
-const Allocator = std.mem.Allocator;
-const Cache = std.Build.Cache;
lib/std/Build/Fuzz/WebServer.zig
@@ -170,7 +170,7 @@ fn serveFile(
// We load the file with every request so that the user can make changes to the file
// and refresh the HTML page without restarting this server.
const file_contents = ws.zig_lib_directory.handle.readFileAlloc(gpa, name, 10 * 1024 * 1024) catch |err| {
- log.err("failed to read '{}{s}': {s}", .{ ws.zig_lib_directory, name, @errorName(err) });
+ log.err("failed to read '{f}{s}': {s}", .{ ws.zig_lib_directory, name, @errorName(err) });
return error.AlreadyReported;
};
defer gpa.free(file_contents);
@@ -251,10 +251,10 @@ fn buildWasmBinary(
"-fsingle-threaded", //
"--dep", "Walk", //
"--dep", "html_render", //
- try std.fmt.allocPrint(arena, "-Mroot={}", .{main_src_path}), //
- try std.fmt.allocPrint(arena, "-MWalk={}", .{walk_src_path}), //
+ try std.fmt.allocPrint(arena, "-Mroot={f}", .{main_src_path}), //
+ try std.fmt.allocPrint(arena, "-MWalk={f}", .{walk_src_path}), //
"--dep", "Walk", //
- try std.fmt.allocPrint(arena, "-Mhtml_render={}", .{html_render_src_path}), //
+ try std.fmt.allocPrint(arena, "-Mhtml_render={f}", .{html_render_src_path}), //
"--listen=-",
});
@@ -526,7 +526,7 @@ fn serveSourcesTar(ws: *WebServer, request: *std.http.Server.Request) !void {
for (deduped_paths) |joined_path| {
var file = joined_path.root_dir.handle.openFile(joined_path.sub_path, .{}) catch |err| {
- log.err("failed to open {}: {s}", .{ joined_path, @errorName(err) });
+ log.err("failed to open {f}: {s}", .{ joined_path, @errorName(err) });
continue;
};
defer file.close();
@@ -604,7 +604,7 @@ fn prepareTables(
const rebuilt_exe_path = run_step.rebuilt_executable.?;
var debug_info = std.debug.Info.load(gpa, rebuilt_exe_path, &gop.value_ptr.coverage) catch |err| {
- log.err("step '{s}': failed to load debug information for '{}': {s}", .{
+ log.err("step '{s}': failed to load debug information for '{f}': {s}", .{
run_step.step.name, rebuilt_exe_path, @errorName(err),
});
return error.AlreadyReported;
@@ -616,7 +616,7 @@ fn prepareTables(
.sub_path = "v/" ++ std.fmt.hex(coverage_id),
};
var coverage_file = coverage_file_path.root_dir.handle.openFile(coverage_file_path.sub_path, .{}) catch |err| {
- log.err("step '{s}': failed to load coverage file '{}': {s}", .{
+ log.err("step '{s}': failed to load coverage file '{f}': {s}", .{
run_step.step.name, coverage_file_path, @errorName(err),
});
return error.AlreadyReported;
@@ -624,7 +624,7 @@ fn prepareTables(
defer coverage_file.close();
const file_size = coverage_file.getEndPos() catch |err| {
- log.err("unable to check len of coverage file '{}': {s}", .{ coverage_file_path, @errorName(err) });
+ log.err("unable to check len of coverage file '{f}': {s}", .{ coverage_file_path, @errorName(err) });
return error.AlreadyReported;
};
@@ -636,7 +636,7 @@ fn prepareTables(
coverage_file.handle,
0,
) catch |err| {
- log.err("failed to map coverage file '{}': {s}", .{ coverage_file_path, @errorName(err) });
+ log.err("failed to map coverage file '{f}': {s}", .{ coverage_file_path, @errorName(err) });
return error.AlreadyReported;
};
gop.value_ptr.mapped_memory = mapped_memory;
lib/std/Build/Step/CheckObject.zig
@@ -6,6 +6,7 @@ const macho = std.macho;
const math = std.math;
const mem = std.mem;
const testing = std.testing;
+const Writer = std.io.Writer;
const CheckObject = @This();
@@ -28,14 +29,14 @@ pub fn create(
const gpa = owner.allocator;
const check_object = gpa.create(CheckObject) catch @panic("OOM");
check_object.* = .{
- .step = Step.init(.{
+ .step = .init(.{
.id = base_id,
.name = "CheckObject",
.owner = owner,
.makeFn = make,
}),
.source = source.dupe(owner),
- .checks = std.ArrayList(Check).init(gpa),
+ .checks = .init(gpa),
.obj_format = obj_format,
};
check_object.source.addStepDependencies(&check_object.step);
@@ -74,13 +75,13 @@ const Action = struct {
b: *std.Build,
step: *Step,
haystack: []const u8,
- global_vars: anytype,
+ global_vars: *std.StringHashMap(u64),
) !bool {
assert(act.tag == .extract);
const hay = mem.trim(u8, haystack, " ");
const phrase = mem.trim(u8, act.phrase.resolve(b, step), " ");
- var candidate_vars = std.ArrayList(struct { name: []const u8, value: u64 }).init(b.allocator);
+ var candidate_vars: std.ArrayList(struct { name: []const u8, value: u64 }) = .init(b.allocator);
var hay_it = mem.tokenizeScalar(u8, hay, ' ');
var needle_it = mem.tokenizeScalar(u8, phrase, ' ');
@@ -153,11 +154,11 @@ const Action = struct {
/// Will return true if the `phrase` is correctly parsed into an RPN program and
/// its reduced, computed value compares using `op` with the expected value, either
/// a literal or another extracted variable.
- fn computeCmp(act: Action, b: *std.Build, step: *Step, global_vars: anytype) !bool {
+ fn computeCmp(act: Action, b: *std.Build, step: *Step, global_vars: std.StringHashMap(u64)) !bool {
const gpa = step.owner.allocator;
const phrase = act.phrase.resolve(b, step);
- var op_stack = std.ArrayList(enum { add, sub, mod, mul }).init(gpa);
- var values = std.ArrayList(u64).init(gpa);
+ var op_stack: std.ArrayList(enum { add, sub, mod, mul }) = .init(gpa);
+ var values: std.ArrayList(u64) = .init(gpa);
var it = mem.tokenizeScalar(u8, phrase, ' ');
while (it.next()) |next| {
@@ -230,17 +231,15 @@ const ComputeCompareExpected = struct {
},
pub fn format(
- value: @This(),
+ value: ComputeCompareExpected,
+ bw: *Writer,
comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
) !void {
if (fmt.len != 0) std.fmt.invalidFmtError(fmt, value);
- _ = options;
- try writer.print("{s} ", .{@tagName(value.op)});
+ try bw.print("{s} ", .{@tagName(value.op)});
switch (value.value) {
- .variable => |name| try writer.writeAll(name),
- .literal => |x| try writer.print("{x}", .{x}),
+ .variable => |name| try bw.writeAll(name),
+ .literal => |x| try bw.print("{x}", .{x}),
}
}
};
@@ -248,56 +247,63 @@ const ComputeCompareExpected = struct {
const Check = struct {
kind: Kind,
payload: Payload,
- data: std.ArrayList(u8),
- actions: std.ArrayList(Action),
+ allocator: Allocator,
+ data: std.ArrayListUnmanaged(u8),
+ actions: std.ArrayListUnmanaged(Action),
fn create(allocator: Allocator, kind: Kind) Check {
return .{
.kind = kind,
.payload = .{ .none = {} },
- .data = std.ArrayList(u8).init(allocator),
- .actions = std.ArrayList(Action).init(allocator),
+ .allocator = allocator,
+ .data = .empty,
+ .actions = .empty,
};
}
- fn dumpSection(allocator: Allocator, name: [:0]const u8) Check {
- var check = Check.create(allocator, .dump_section);
+ fn dumpSection(gpa: Allocator, name: [:0]const u8) Check {
+ var check = Check.create(gpa, .dump_section);
const off: u32 = @intCast(check.data.items.len);
- check.data.writer().print("{s}\x00", .{name}) catch @panic("OOM");
+ check.data.print(gpa, "{s}\x00", .{name}) catch @panic("OOM");
check.payload = .{ .dump_section = off };
return check;
}
fn extract(check: *Check, phrase: SearchPhrase) void {
- check.actions.append(.{
+ const gpa = check.allocator;
+ check.actions.append(gpa, .{
.tag = .extract,
.phrase = phrase,
}) catch @panic("OOM");
}
fn exact(check: *Check, phrase: SearchPhrase) void {
- check.actions.append(.{
+ const gpa = check.allocator;
+ check.actions.append(gpa, .{
.tag = .exact,
.phrase = phrase,
}) catch @panic("OOM");
}
fn contains(check: *Check, phrase: SearchPhrase) void {
- check.actions.append(.{
+ const gpa = check.allocator;
+ check.actions.append(gpa, .{
.tag = .contains,
.phrase = phrase,
}) catch @panic("OOM");
}
fn notPresent(check: *Check, phrase: SearchPhrase) void {
- check.actions.append(.{
+ const gpa = check.allocator;
+ check.actions.append(gpa, .{
.tag = .not_present,
.phrase = phrase,
}) catch @panic("OOM");
}
fn computeCmp(check: *Check, phrase: SearchPhrase, expected: ComputeCompareExpected) void {
- check.actions.append(.{
+ const gpa = check.allocator;
+ check.actions.append(gpa, .{
.tag = .compute_cmp,
.phrase = phrase,
.expected = expected,
@@ -565,9 +571,9 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
null,
.of(u64),
null,
- ) catch |err| return step.fail("unable to read '{'}': {s}", .{ src_path, @errorName(err) });
+ ) catch |err| return step.fail("unable to read '{f'}': {s}", .{ src_path, @errorName(err) });
- var vars = std.StringHashMap(u64).init(gpa);
+ var vars: std.StringHashMap(u64) = .init(gpa);
for (check_object.checks.items) |chk| {
if (chk.kind == .compute_compare) {
assert(chk.actions.items.len == 1);
@@ -581,7 +587,7 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
return step.fail(
\\
\\========= comparison failed for action: ===========
- \\{s} {}
+ \\{s} {f}
\\===================================================
, .{ act.phrase.resolve(b, step), act.expected.? });
}
@@ -600,7 +606,7 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
// we either format message string with escaped codes, or not to aid debugging
// the failed test.
const fmtMessageString = struct {
- fn fmtMessageString(kind: Check.Kind, msg: []const u8) std.fmt.Formatter(formatMessageString) {
+ fn fmtMessageString(kind: Check.Kind, msg: []const u8) std.fmt.Formatter(Ctx, formatMessageString) {
return .{ .data = .{
.kind = kind,
.msg = msg,
@@ -612,17 +618,10 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
msg: []const u8,
};
- fn formatMessageString(
- ctx: Ctx,
- comptime unused_fmt_string: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
- _ = unused_fmt_string;
- _ = options;
+ fn formatMessageString(ctx: Ctx, w: *Writer) !void {
switch (ctx.kind) {
- .dump_section => try writer.print("{s}", .{std.fmt.fmtSliceEscapeLower(ctx.msg)}),
- else => try writer.writeAll(ctx.msg),
+ .dump_section => try w.print("{f}", .{std.ascii.hexEscape(ctx.msg, .lower)}),
+ else => try w.writeAll(ctx.msg),
}
}
}.fmtMessageString;
@@ -637,11 +636,11 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
return step.fail(
\\
\\========= expected to find: ==========================
- \\{s}
+ \\{f}
\\========= but parsed file does not contain it: =======
- \\{s}
+ \\{f}
\\========= file path: =================================
- \\{}
+ \\{f}
, .{
fmtMessageString(chk.kind, act.phrase.resolve(b, step)),
fmtMessageString(chk.kind, output),
@@ -657,11 +656,11 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
return step.fail(
\\
\\========= expected to find: ==========================
- \\*{s}*
+ \\*{f}*
\\========= but parsed file does not contain it: =======
- \\{s}
+ \\{f}
\\========= file path: =================================
- \\{}
+ \\{f}
, .{
fmtMessageString(chk.kind, act.phrase.resolve(b, step)),
fmtMessageString(chk.kind, output),
@@ -676,11 +675,11 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
return step.fail(
\\
\\========= expected not to find: ===================
- \\{s}
+ \\{f}
\\========= but parsed file does contain it: ========
- \\{s}
+ \\{f}
\\========= file path: ==============================
- \\{}
+ \\{f}
, .{
fmtMessageString(chk.kind, act.phrase.resolve(b, step)),
fmtMessageString(chk.kind, output),
@@ -696,13 +695,13 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
return step.fail(
\\
\\========= expected to find and extract: ==============
- \\{s}
+ \\{f}
\\========= but parsed file does not contain it: =======
- \\{s}
+ \\{f}
\\========= file path: ==============================
- \\{}
+ \\{f}
, .{
- act.phrase.resolve(b, step),
+ fmtMessageString(chk.kind, act.phrase.resolve(b, step)),
fmtMessageString(chk.kind, output),
src_path,
});
@@ -755,14 +754,14 @@ const MachODumper = struct {
},
.SYMTAB => {
const lc = cmd.cast(macho.symtab_command).?;
- const symtab = @as([*]align(1) const macho.nlist_64, @ptrCast(ctx.data.ptr + lc.symoff))[0..lc.nsyms];
+ const symtab = @as([*]align(1) const macho.nlist_64, @ptrCast(ctx.data[lc.symoff..].ptr))[0..lc.nsyms];
const strtab = ctx.data[lc.stroff..][0..lc.strsize];
try ctx.symtab.appendUnalignedSlice(ctx.gpa, symtab);
try ctx.strtab.appendSlice(ctx.gpa, strtab);
},
.DYSYMTAB => {
const lc = cmd.cast(macho.dysymtab_command).?;
- const indexes = @as([*]align(1) const u32, @ptrCast(ctx.data.ptr + lc.indirectsymoff))[0..lc.nindirectsyms];
+ const indexes = @as([*]align(1) const u32, @ptrCast(ctx.data[lc.indirectsymoff..].ptr))[0..lc.nindirectsyms];
try ctx.indsymtab.appendUnalignedSlice(ctx.gpa, indexes);
},
.LOAD_DYLIB,
@@ -780,7 +779,7 @@ const MachODumper = struct {
fn getString(ctx: ObjectContext, off: u32) [:0]const u8 {
assert(off < ctx.strtab.items.len);
- return mem.sliceTo(@as([*:0]const u8, @ptrCast(ctx.strtab.items.ptr + off)), 0);
+ return mem.sliceTo(@as([*:0]const u8, @ptrCast(ctx.strtab.items[off..].ptr)), 0);
}
fn getLoadCommandIterator(ctx: ObjectContext) macho.LoadCommandIterator {
@@ -810,7 +809,7 @@ const MachODumper = struct {
return null;
}
- fn dumpHeader(hdr: macho.mach_header_64, writer: anytype) !void {
+ fn dumpHeader(hdr: macho.mach_header_64, bw: *Writer) !void {
const cputype = switch (hdr.cputype) {
macho.CPU_TYPE_ARM64 => "ARM64",
macho.CPU_TYPE_X86_64 => "X86_64",
@@ -831,7 +830,7 @@ const MachODumper = struct {
else => "Unknown",
};
- try writer.print(
+ try bw.print(
\\header
\\cputype {s}
\\filetype {s}
@@ -846,41 +845,41 @@ const MachODumper = struct {
});
if (hdr.flags > 0) {
- if (hdr.flags & macho.MH_NOUNDEFS != 0) try writer.writeAll(" NOUNDEFS");
- if (hdr.flags & macho.MH_INCRLINK != 0) try writer.writeAll(" INCRLINK");
- if (hdr.flags & macho.MH_DYLDLINK != 0) try writer.writeAll(" DYLDLINK");
- if (hdr.flags & macho.MH_BINDATLOAD != 0) try writer.writeAll(" BINDATLOAD");
- if (hdr.flags & macho.MH_PREBOUND != 0) try writer.writeAll(" PREBOUND");
- if (hdr.flags & macho.MH_SPLIT_SEGS != 0) try writer.writeAll(" SPLIT_SEGS");
- if (hdr.flags & macho.MH_LAZY_INIT != 0) try writer.writeAll(" LAZY_INIT");
- if (hdr.flags & macho.MH_TWOLEVEL != 0) try writer.writeAll(" TWOLEVEL");
- if (hdr.flags & macho.MH_FORCE_FLAT != 0) try writer.writeAll(" FORCE_FLAT");
- if (hdr.flags & macho.MH_NOMULTIDEFS != 0) try writer.writeAll(" NOMULTIDEFS");
- if (hdr.flags & macho.MH_NOFIXPREBINDING != 0) try writer.writeAll(" NOFIXPREBINDING");
- if (hdr.flags & macho.MH_PREBINDABLE != 0) try writer.writeAll(" PREBINDABLE");
- if (hdr.flags & macho.MH_ALLMODSBOUND != 0) try writer.writeAll(" ALLMODSBOUND");
- if (hdr.flags & macho.MH_SUBSECTIONS_VIA_SYMBOLS != 0) try writer.writeAll(" SUBSECTIONS_VIA_SYMBOLS");
- if (hdr.flags & macho.MH_CANONICAL != 0) try writer.writeAll(" CANONICAL");
- if (hdr.flags & macho.MH_WEAK_DEFINES != 0) try writer.writeAll(" WEAK_DEFINES");
- if (hdr.flags & macho.MH_BINDS_TO_WEAK != 0) try writer.writeAll(" BINDS_TO_WEAK");
- if (hdr.flags & macho.MH_ALLOW_STACK_EXECUTION != 0) try writer.writeAll(" ALLOW_STACK_EXECUTION");
- if (hdr.flags & macho.MH_ROOT_SAFE != 0) try writer.writeAll(" ROOT_SAFE");
- if (hdr.flags & macho.MH_SETUID_SAFE != 0) try writer.writeAll(" SETUID_SAFE");
- if (hdr.flags & macho.MH_NO_REEXPORTED_DYLIBS != 0) try writer.writeAll(" NO_REEXPORTED_DYLIBS");
- if (hdr.flags & macho.MH_PIE != 0) try writer.writeAll(" PIE");
- if (hdr.flags & macho.MH_DEAD_STRIPPABLE_DYLIB != 0) try writer.writeAll(" DEAD_STRIPPABLE_DYLIB");
- if (hdr.flags & macho.MH_HAS_TLV_DESCRIPTORS != 0) try writer.writeAll(" HAS_TLV_DESCRIPTORS");
- if (hdr.flags & macho.MH_NO_HEAP_EXECUTION != 0) try writer.writeAll(" NO_HEAP_EXECUTION");
- if (hdr.flags & macho.MH_APP_EXTENSION_SAFE != 0) try writer.writeAll(" APP_EXTENSION_SAFE");
- if (hdr.flags & macho.MH_NLIST_OUTOFSYNC_WITH_DYLDINFO != 0) try writer.writeAll(" NLIST_OUTOFSYNC_WITH_DYLDINFO");
+ if (hdr.flags & macho.MH_NOUNDEFS != 0) try bw.writeAll(" NOUNDEFS");
+ if (hdr.flags & macho.MH_INCRLINK != 0) try bw.writeAll(" INCRLINK");
+ if (hdr.flags & macho.MH_DYLDLINK != 0) try bw.writeAll(" DYLDLINK");
+ if (hdr.flags & macho.MH_BINDATLOAD != 0) try bw.writeAll(" BINDATLOAD");
+ if (hdr.flags & macho.MH_PREBOUND != 0) try bw.writeAll(" PREBOUND");
+ if (hdr.flags & macho.MH_SPLIT_SEGS != 0) try bw.writeAll(" SPLIT_SEGS");
+ if (hdr.flags & macho.MH_LAZY_INIT != 0) try bw.writeAll(" LAZY_INIT");
+ if (hdr.flags & macho.MH_TWOLEVEL != 0) try bw.writeAll(" TWOLEVEL");
+ if (hdr.flags & macho.MH_FORCE_FLAT != 0) try bw.writeAll(" FORCE_FLAT");
+ if (hdr.flags & macho.MH_NOMULTIDEFS != 0) try bw.writeAll(" NOMULTIDEFS");
+ if (hdr.flags & macho.MH_NOFIXPREBINDING != 0) try bw.writeAll(" NOFIXPREBINDING");
+ if (hdr.flags & macho.MH_PREBINDABLE != 0) try bw.writeAll(" PREBINDABLE");
+ if (hdr.flags & macho.MH_ALLMODSBOUND != 0) try bw.writeAll(" ALLMODSBOUND");
+ if (hdr.flags & macho.MH_SUBSECTIONS_VIA_SYMBOLS != 0) try bw.writeAll(" SUBSECTIONS_VIA_SYMBOLS");
+ if (hdr.flags & macho.MH_CANONICAL != 0) try bw.writeAll(" CANONICAL");
+ if (hdr.flags & macho.MH_WEAK_DEFINES != 0) try bw.writeAll(" WEAK_DEFINES");
+ if (hdr.flags & macho.MH_BINDS_TO_WEAK != 0) try bw.writeAll(" BINDS_TO_WEAK");
+ if (hdr.flags & macho.MH_ALLOW_STACK_EXECUTION != 0) try bw.writeAll(" ALLOW_STACK_EXECUTION");
+ if (hdr.flags & macho.MH_ROOT_SAFE != 0) try bw.writeAll(" ROOT_SAFE");
+ if (hdr.flags & macho.MH_SETUID_SAFE != 0) try bw.writeAll(" SETUID_SAFE");
+ if (hdr.flags & macho.MH_NO_REEXPORTED_DYLIBS != 0) try bw.writeAll(" NO_REEXPORTED_DYLIBS");
+ if (hdr.flags & macho.MH_PIE != 0) try bw.writeAll(" PIE");
+ if (hdr.flags & macho.MH_DEAD_STRIPPABLE_DYLIB != 0) try bw.writeAll(" DEAD_STRIPPABLE_DYLIB");
+ if (hdr.flags & macho.MH_HAS_TLV_DESCRIPTORS != 0) try bw.writeAll(" HAS_TLV_DESCRIPTORS");
+ if (hdr.flags & macho.MH_NO_HEAP_EXECUTION != 0) try bw.writeAll(" NO_HEAP_EXECUTION");
+ if (hdr.flags & macho.MH_APP_EXTENSION_SAFE != 0) try bw.writeAll(" APP_EXTENSION_SAFE");
+ if (hdr.flags & macho.MH_NLIST_OUTOFSYNC_WITH_DYLDINFO != 0) try bw.writeAll(" NLIST_OUTOFSYNC_WITH_DYLDINFO");
}
- try writer.writeByte('\n');
+ try bw.writeByte('\n');
}
- fn dumpLoadCommand(lc: macho.LoadCommandIterator.LoadCommand, index: usize, writer: anytype) !void {
+ fn dumpLoadCommand(lc: macho.LoadCommandIterator.LoadCommand, index: usize, bw: *Writer) !void {
// print header first
- try writer.print(
+ try bw.print(
\\LC {d}
\\cmd {s}
\\cmdsize {d}
@@ -889,8 +888,8 @@ const MachODumper = struct {
switch (lc.cmd()) {
.SEGMENT_64 => {
const seg = lc.cast(macho.segment_command_64).?;
- try writer.writeByte('\n');
- try writer.print(
+ try bw.writeByte('\n');
+ try bw.print(
\\segname {s}
\\vmaddr {x}
\\vmsize {x}
@@ -905,8 +904,8 @@ const MachODumper = struct {
});
for (lc.getSections()) |sect| {
- try writer.writeByte('\n');
- try writer.print(
+ try bw.writeByte('\n');
+ try bw.print(
\\sectname {s}
\\addr {x}
\\size {x}
@@ -928,8 +927,8 @@ const MachODumper = struct {
.REEXPORT_DYLIB,
=> {
const dylib = lc.cast(macho.dylib_command).?;
- try writer.writeByte('\n');
- try writer.print(
+ try bw.writeByte('\n');
+ try bw.print(
\\name {s}
\\timestamp {d}
\\current version {x}
@@ -944,16 +943,16 @@ const MachODumper = struct {
.MAIN => {
const main = lc.cast(macho.entry_point_command).?;
- try writer.writeByte('\n');
- try writer.print(
+ try bw.writeByte('\n');
+ try bw.print(
\\entryoff {x}
\\stacksize {x}
, .{ main.entryoff, main.stacksize });
},
.RPATH => {
- try writer.writeByte('\n');
- try writer.print(
+ try bw.writeByte('\n');
+ try bw.print(
\\path {s}
, .{
lc.getRpathPathName(),
@@ -962,8 +961,8 @@ const MachODumper = struct {
.UUID => {
const uuid = lc.cast(macho.uuid_command).?;
- try writer.writeByte('\n');
- try writer.print("uuid {x}", .{std.fmt.fmtSliceHexLower(&uuid.uuid)});
+ try bw.writeByte('\n');
+ try bw.print("uuid {x}", .{&uuid.uuid});
},
.DATA_IN_CODE,
@@ -971,8 +970,8 @@ const MachODumper = struct {
.CODE_SIGNATURE,
=> {
const llc = lc.cast(macho.linkedit_data_command).?;
- try writer.writeByte('\n');
- try writer.print(
+ try bw.writeByte('\n');
+ try bw.print(
\\dataoff {x}
\\datasize {x}
, .{ llc.dataoff, llc.datasize });
@@ -980,8 +979,8 @@ const MachODumper = struct {
.DYLD_INFO_ONLY => {
const dlc = lc.cast(macho.dyld_info_command).?;
- try writer.writeByte('\n');
- try writer.print(
+ try bw.writeByte('\n');
+ try bw.print(
\\rebaseoff {x}
\\rebasesize {x}
\\bindoff {x}
@@ -1008,8 +1007,8 @@ const MachODumper = struct {
.SYMTAB => {
const slc = lc.cast(macho.symtab_command).?;
- try writer.writeByte('\n');
- try writer.print(
+ try bw.writeByte('\n');
+ try bw.print(
\\symoff {x}
\\nsyms {x}
\\stroff {x}
@@ -1024,8 +1023,8 @@ const MachODumper = struct {
.DYSYMTAB => {
const dlc = lc.cast(macho.dysymtab_command).?;
- try writer.writeByte('\n');
- try writer.print(
+ try bw.writeByte('\n');
+ try bw.print(
\\ilocalsym {x}
\\nlocalsym {x}
\\iextdefsym {x}
@@ -1048,8 +1047,8 @@ const MachODumper = struct {
.BUILD_VERSION => {
const blc = lc.cast(macho.build_version_command).?;
- try writer.writeByte('\n');
- try writer.print(
+ try bw.writeByte('\n');
+ try bw.print(
\\platform {s}
\\minos {d}.{d}.{d}
\\sdk {d}.{d}.{d}
@@ -1065,12 +1064,12 @@ const MachODumper = struct {
blc.ntools,
});
for (lc.getBuildVersionTools()) |tool| {
- try writer.writeByte('\n');
+ try bw.writeByte('\n');
switch (tool.tool) {
- .CLANG, .SWIFT, .LD, .LLD, .ZIG => try writer.print("tool {s}\n", .{@tagName(tool.tool)}),
- else => |x| try writer.print("tool {d}\n", .{@intFromEnum(x)}),
+ .CLANG, .SWIFT, .LD, .LLD, .ZIG => try bw.print("tool {s}\n", .{@tagName(tool.tool)}),
+ else => |x| try bw.print("tool {d}\n", .{@intFromEnum(x)}),
}
- try writer.print(
+ try bw.print(
\\version {d}.{d}.{d}
, .{
tool.version >> 16,
@@ -1086,8 +1085,8 @@ const MachODumper = struct {
.VERSION_MIN_TVOS,
=> {
const vlc = lc.cast(macho.version_min_command).?;
- try writer.writeByte('\n');
- try writer.print(
+ try bw.writeByte('\n');
+ try bw.print(
\\version {d}.{d}.{d}
\\sdk {d}.{d}.{d}
, .{
@@ -1104,8 +1103,8 @@ const MachODumper = struct {
}
}
- fn dumpSymtab(ctx: ObjectContext, writer: anytype) !void {
- try writer.writeAll(symtab_label ++ "\n");
+ fn dumpSymtab(ctx: ObjectContext, bw: *Writer) !void {
+ try bw.writeAll(symtab_label ++ "\n");
for (ctx.symtab.items) |sym| {
const sym_name = ctx.getString(sym.n_strx);
@@ -1120,32 +1119,32 @@ const MachODumper = struct {
macho.N_STSYM => "STSYM",
else => "UNKNOWN STAB",
};
- try writer.print("{x}", .{sym.n_value});
+ try bw.print("{x}", .{sym.n_value});
if (sym.n_sect > 0) {
const sect = ctx.sections.items[sym.n_sect - 1];
- try writer.print(" ({s},{s})", .{ sect.segName(), sect.sectName() });
+ try bw.print(" ({s},{s})", .{ sect.segName(), sect.sectName() });
}
- try writer.print(" {s} (stab) {s}\n", .{ tt, sym_name });
+ try bw.print(" {s} (stab) {s}\n", .{ tt, sym_name });
} else if (sym.sect()) {
const sect = ctx.sections.items[sym.n_sect - 1];
- try writer.print("{x} ({s},{s})", .{
+ try bw.print("{x} ({s},{s})", .{
sym.n_value,
sect.segName(),
sect.sectName(),
});
- if (sym.n_desc & macho.REFERENCED_DYNAMICALLY != 0) try writer.writeAll(" [referenced dynamically]");
- if (sym.weakDef()) try writer.writeAll(" weak");
- if (sym.weakRef()) try writer.writeAll(" weakref");
+ if (sym.n_desc & macho.REFERENCED_DYNAMICALLY != 0) try bw.writeAll(" [referenced dynamically]");
+ if (sym.weakDef()) try bw.writeAll(" weak");
+ if (sym.weakRef()) try bw.writeAll(" weakref");
if (sym.ext()) {
- if (sym.pext()) try writer.writeAll(" private");
- try writer.writeAll(" external");
- } else if (sym.pext()) try writer.writeAll(" (was private external)");
- try writer.print(" {s}\n", .{sym_name});
+ if (sym.pext()) try bw.writeAll(" private");
+ try bw.writeAll(" external");
+ } else if (sym.pext()) try bw.writeAll(" (was private external)");
+ try bw.print(" {s}\n", .{sym_name});
} else if (sym.tentative()) {
const alignment = (sym.n_desc >> 8) & 0x0F;
- try writer.print(" 0x{x:0>16} (common) (alignment 2^{d})", .{ sym.n_value, alignment });
- if (sym.ext()) try writer.writeAll(" external");
- try writer.print(" {s}\n", .{sym_name});
+ try bw.print(" 0x{x:0>16} (common) (alignment 2^{d})", .{ sym.n_value, alignment });
+ if (sym.ext()) try bw.writeAll(" external");
+ try bw.print(" {s}\n", .{sym_name});
} else if (sym.undf()) {
const ordinal = @divFloor(@as(i16, @bitCast(sym.n_desc)), macho.N_SYMBOL_RESOLVER);
const import_name = blk: {
@@ -1164,10 +1163,10 @@ const MachODumper = struct {
const ext = mem.lastIndexOfScalar(u8, basename, '.') orelse basename.len;
break :blk basename[0..ext];
};
- try writer.writeAll("(undefined)");
- if (sym.weakRef()) try writer.writeAll(" weakref");
- if (sym.ext()) try writer.writeAll(" external");
- try writer.print(" {s} (from {s})\n", .{
+ try bw.writeAll("(undefined)");
+ if (sym.weakRef()) try bw.writeAll(" weakref");
+ if (sym.ext()) try bw.writeAll(" external");
+ try bw.print(" {s} (from {s})\n", .{
sym_name,
import_name,
});
@@ -1175,8 +1174,8 @@ const MachODumper = struct {
}
}
- fn dumpIndirectSymtab(ctx: ObjectContext, writer: anytype) !void {
- try writer.writeAll(indirect_symtab_label ++ "\n");
+ fn dumpIndirectSymtab(ctx: ObjectContext, bw: *Writer) !void {
+ try bw.writeAll(indirect_symtab_label ++ "\n");
var sects_buffer: [3]macho.section_64 = undefined;
const sects = blk: {
@@ -1214,35 +1213,33 @@ const MachODumper = struct {
break :blk @sizeOf(u64);
};
- try writer.print("{s},{s}\n", .{ sect.segName(), sect.sectName() });
- try writer.print("nentries {d}\n", .{end - start});
+ try bw.print("{s},{s}\n", .{ sect.segName(), sect.sectName() });
+ try bw.print("nentries {d}\n", .{end - start});
for (ctx.indsymtab.items[start..end], 0..) |index, j| {
const sym = ctx.symtab.items[index];
const addr = sect.addr + entry_size * j;
- try writer.print("0x{x} {d} {s}\n", .{ addr, index, ctx.getString(sym.n_strx) });
+ try bw.print("0x{x} {d} {s}\n", .{ addr, index, ctx.getString(sym.n_strx) });
}
}
}
- fn dumpRebaseInfo(ctx: ObjectContext, data: []const u8, writer: anytype) !void {
- var rebases = std.ArrayList(u64).init(ctx.gpa);
+ fn dumpRebaseInfo(ctx: ObjectContext, data: []const u8, bw: *Writer) !void {
+ var rebases: std.ArrayList(u64) = .init(ctx.gpa);
defer rebases.deinit();
try ctx.parseRebaseInfo(data, &rebases);
mem.sort(u64, rebases.items, {}, std.sort.asc(u64));
for (rebases.items) |addr| {
- try writer.print("0x{x}\n", .{addr});
+ try bw.print("0x{x}\n", .{addr});
}
}
fn parseRebaseInfo(ctx: ObjectContext, data: []const u8, rebases: *std.ArrayList(u64)) !void {
- var stream = std.io.fixedBufferStream(data);
- var creader = std.io.countingReader(stream.reader());
- const reader = creader.reader();
+ var br: std.io.Reader = .fixed(data);
var seg_id: ?u8 = null;
var offset: u64 = 0;
while (true) {
- const byte = reader.readByte() catch break;
+ const byte = br.takeByte() catch break;
const opc = byte & macho.REBASE_OPCODE_MASK;
const imm = byte & macho.REBASE_IMMEDIATE_MASK;
switch (opc) {
@@ -1250,17 +1247,17 @@ const MachODumper = struct {
macho.REBASE_OPCODE_SET_TYPE_IMM => {},
macho.REBASE_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB => {
seg_id = imm;
- offset = try std.leb.readUleb128(u64, reader);
+ offset = try br.takeLeb128(u64);
},
macho.REBASE_OPCODE_ADD_ADDR_IMM_SCALED => {
offset += imm * @sizeOf(u64);
},
macho.REBASE_OPCODE_ADD_ADDR_ULEB => {
- const addend = try std.leb.readUleb128(u64, reader);
+ const addend = try br.takeLeb128(u64);
offset += addend;
},
macho.REBASE_OPCODE_DO_REBASE_ADD_ADDR_ULEB => {
- const addend = try std.leb.readUleb128(u64, reader);
+ const addend = try br.takeLeb128(u64);
const seg = ctx.segments.items[seg_id.?];
const addr = seg.vmaddr + offset;
try rebases.append(addr);
@@ -1277,11 +1274,11 @@ const MachODumper = struct {
ntimes = imm;
},
macho.REBASE_OPCODE_DO_REBASE_ULEB_TIMES => {
- ntimes = try std.leb.readUleb128(u64, reader);
+ ntimes = try br.takeLeb128(u64);
},
macho.REBASE_OPCODE_DO_REBASE_ULEB_TIMES_SKIPPING_ULEB => {
- ntimes = try std.leb.readUleb128(u64, reader);
- skip = try std.leb.readUleb128(u64, reader);
+ ntimes = try br.takeLeb128(u64);
+ skip = try br.takeLeb128(u64);
},
else => unreachable,
}
@@ -1323,8 +1320,8 @@ const MachODumper = struct {
};
};
- fn dumpBindInfo(ctx: ObjectContext, data: []const u8, writer: anytype) !void {
- var bindings = std.ArrayList(Binding).init(ctx.gpa);
+ fn dumpBindInfo(ctx: ObjectContext, data: []const u8, bw: *Writer) !void {
+ var bindings: std.ArrayList(Binding) = .init(ctx.gpa);
defer {
for (bindings.items) |*b| {
b.deinit(ctx.gpa);
@@ -1334,22 +1331,20 @@ const MachODumper = struct {
try ctx.parseBindInfo(data, &bindings);
mem.sort(Binding, bindings.items, {}, Binding.lessThan);
for (bindings.items) |binding| {
- try writer.print("0x{x} [addend: {d}]", .{ binding.address, binding.addend });
- try writer.writeAll(" (");
+ try bw.print("0x{x} [addend: {d}]", .{ binding.address, binding.addend });
+ try bw.writeAll(" (");
switch (binding.tag) {
- .self => try writer.writeAll("self"),
- .exe => try writer.writeAll("main executable"),
- .flat => try writer.writeAll("flat lookup"),
- .ord => try writer.writeAll(std.fs.path.basename(ctx.imports.items[binding.ordinal - 1])),
+ .self => try bw.writeAll("self"),
+ .exe => try bw.writeAll("main executable"),
+ .flat => try bw.writeAll("flat lookup"),
+ .ord => try bw.writeAll(std.fs.path.basename(ctx.imports.items[binding.ordinal - 1])),
}
- try writer.print(") {s}\n", .{binding.name});
+ try bw.print(") {s}\n", .{binding.name});
}
}
fn parseBindInfo(ctx: ObjectContext, data: []const u8, bindings: *std.ArrayList(Binding)) !void {
- var stream = std.io.fixedBufferStream(data);
- var creader = std.io.countingReader(stream.reader());
- const reader = creader.reader();
+ var br: std.io.Reader = .fixed(data);
var seg_id: ?u8 = null;
var tag: Binding.Tag = .self;
@@ -1357,11 +1352,10 @@ const MachODumper = struct {
var offset: u64 = 0;
var addend: i64 = 0;
- var name_buf = std.ArrayList(u8).init(ctx.gpa);
+ var name_buf: std.ArrayList(u8) = .init(ctx.gpa);
defer name_buf.deinit();
- while (true) {
- const byte = reader.readByte() catch break;
+ while (br.takeByte()) |byte| {
const opc = byte & macho.BIND_OPCODE_MASK;
const imm = byte & macho.BIND_IMMEDIATE_MASK;
switch (opc) {
@@ -1382,18 +1376,19 @@ const MachODumper = struct {
},
macho.BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB => {
seg_id = imm;
- offset = try std.leb.readUleb128(u64, reader);
+ offset = try br.takeLeb128(u64);
},
macho.BIND_OPCODE_SET_SYMBOL_TRAILING_FLAGS_IMM => {
name_buf.clearRetainingCapacity();
- try reader.readUntilDelimiterArrayList(&name_buf, 0, std.math.maxInt(u32));
+ if (true) @panic("TODO fix this");
+ //try reader.readUntilDelimiterArrayList(&name_buf, 0, std.math.maxInt(u32));
try name_buf.append(0);
},
macho.BIND_OPCODE_SET_ADDEND_SLEB => {
- addend = try std.leb.readIleb128(i64, reader);
+ addend = try br.takeLeb128(i64);
},
macho.BIND_OPCODE_ADD_ADDR_ULEB => {
- const x = try std.leb.readUleb128(u64, reader);
+ const x = try br.takeLeb128(u64);
offset = @intCast(@as(i64, @intCast(offset)) + @as(i64, @bitCast(x)));
},
macho.BIND_OPCODE_DO_BIND,
@@ -1408,14 +1403,14 @@ const MachODumper = struct {
switch (opc) {
macho.BIND_OPCODE_DO_BIND => {},
macho.BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB => {
- add_addr = try std.leb.readUleb128(u64, reader);
+ add_addr = try br.takeLeb128(u64);
},
macho.BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED => {
add_addr = imm * @sizeOf(u64);
},
macho.BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB => {
- count = try std.leb.readUleb128(u64, reader);
- skip = try std.leb.readUleb128(u64, reader);
+ count = try br.takeLeb128(u64);
+ skip = try br.takeLeb128(u64);
},
else => unreachable,
}
@@ -1436,18 +1431,18 @@ const MachODumper = struct {
},
else => break,
}
- }
+ } else |_| {}
}
- fn dumpExportsTrie(ctx: ObjectContext, data: []const u8, writer: anytype) !void {
+ fn dumpExportsTrie(ctx: ObjectContext, data: []const u8, bw: *Writer) !void {
const seg = ctx.getSegmentByName("__TEXT") orelse return;
var arena = std.heap.ArenaAllocator.init(ctx.gpa);
defer arena.deinit();
- var exports = std.ArrayList(Export).init(arena.allocator());
- var it = TrieIterator{ .data = data };
- try parseTrieNode(arena.allocator(), &it, "", &exports);
+ var exports: std.ArrayList(Export) = .init(arena.allocator());
+ var br: std.io.Reader = .fixed(data);
+ try parseTrieNode(arena.allocator(), &br, "", &exports);
mem.sort(Export, exports.items, {}, Export.lessThan);
@@ -1456,66 +1451,26 @@ const MachODumper = struct {
.@"export" => {
const info = exp.data.@"export";
if (info.kind != .regular or info.weak) {
- try writer.writeByte('[');
+ try bw.writeByte('[');
}
switch (info.kind) {
.regular => {},
- .absolute => try writer.writeAll("ABS, "),
- .tlv => try writer.writeAll("THREAD_LOCAL, "),
+ .absolute => try bw.writeAll("ABS, "),
+ .tlv => try bw.writeAll("THREAD_LOCAL, "),
}
- if (info.weak) try writer.writeAll("WEAK");
+ if (info.weak) try bw.writeAll("WEAK");
if (info.kind != .regular or info.weak) {
- try writer.writeAll("] ");
+ try bw.writeAll("] ");
}
- try writer.print("{x} ", .{seg.vmaddr + info.vmoffset});
+ try bw.print("{x} ", .{seg.vmaddr + info.vmoffset});
},
else => {},
}
- try writer.print("{s}\n", .{exp.name});
+ try bw.print("{s}\n", .{exp.name});
}
}
- const TrieIterator = struct {
- data: []const u8,
- pos: usize = 0,
-
- fn getStream(it: *TrieIterator) std.io.FixedBufferStream([]const u8) {
- return std.io.fixedBufferStream(it.data[it.pos..]);
- }
-
- fn readUleb128(it: *TrieIterator) !u64 {
- var stream = it.getStream();
- var creader = std.io.countingReader(stream.reader());
- const reader = creader.reader();
- const value = try std.leb.readUleb128(u64, reader);
- it.pos += creader.bytes_read;
- return value;
- }
-
- fn readString(it: *TrieIterator) ![:0]const u8 {
- var stream = it.getStream();
- const reader = stream.reader();
-
- var count: usize = 0;
- while (true) : (count += 1) {
- const byte = try reader.readByte();
- if (byte == 0) break;
- }
-
- const str = @as([*:0]const u8, @ptrCast(it.data.ptr + it.pos))[0..count :0];
- it.pos += count + 1;
- return str;
- }
-
- fn readByte(it: *TrieIterator) !u8 {
- var stream = it.getStream();
- const value = try stream.reader().readByte();
- it.pos += 1;
- return value;
- }
- };
-
const Export = struct {
name: []const u8,
tag: enum { @"export", reexport, stub_resolver },
@@ -1555,17 +1510,17 @@ const MachODumper = struct {
fn parseTrieNode(
arena: Allocator,
- it: *TrieIterator,
+ br: *std.io.Reader,
prefix: []const u8,
exports: *std.ArrayList(Export),
) !void {
- const size = try it.readUleb128();
+ const size = try br.takeLeb128(u64);
if (size > 0) {
- const flags = try it.readUleb128();
+ const flags = try br.takeLeb128(u8);
switch (flags) {
macho.EXPORT_SYMBOL_FLAGS_REEXPORT => {
- const ord = try it.readUleb128();
- const name = try arena.dupe(u8, try it.readString());
+ const ord = try br.takeLeb128(u64);
+ const name = try br.takeSentinel(0);
try exports.append(.{
.name = if (name.len > 0) name else prefix,
.tag = .reexport,
@@ -1573,8 +1528,8 @@ const MachODumper = struct {
});
},
macho.EXPORT_SYMBOL_FLAGS_STUB_AND_RESOLVER => {
- const stub_offset = try it.readUleb128();
- const resolver_offset = try it.readUleb128();
+ const stub_offset = try br.takeLeb128(u64);
+ const resolver_offset = try br.takeLeb128(u64);
try exports.append(.{
.name = prefix,
.tag = .stub_resolver,
@@ -1585,7 +1540,7 @@ const MachODumper = struct {
});
},
else => {
- const vmoff = try it.readUleb128();
+ const vmoff = try br.takeLeb128(u64);
try exports.append(.{
.name = prefix,
.tag = .@"export",
@@ -1604,21 +1559,21 @@ const MachODumper = struct {
}
}
- const nedges = try it.readByte();
+ const nedges = try br.takeByte();
for (0..nedges) |_| {
- const label = try it.readString();
- const off = try it.readUleb128();
+ const label = try br.takeSentinel(0);
+ const off = try br.takeLeb128(usize);
const prefix_label = try std.fmt.allocPrint(arena, "{s}{s}", .{ prefix, label });
- const curr = it.pos;
- it.pos = off;
- try parseTrieNode(arena, it, prefix_label, exports);
- it.pos = curr;
+ const seek = br.seek;
+ br.seek = off;
+ try parseTrieNode(arena, br, prefix_label, exports);
+ br.seek = seek;
}
}
- fn dumpSection(ctx: ObjectContext, sect: macho.section_64, writer: anytype) !void {
+ fn dumpSection(ctx: ObjectContext, sect: macho.section_64, bw: *Writer) !void {
const data = ctx.data[sect.offset..][0..sect.size];
- try writer.print("{s}", .{data});
+ try bw.print("{s}", .{data});
}
};
@@ -1632,29 +1587,30 @@ const MachODumper = struct {
var ctx = ObjectContext{ .gpa = gpa, .data = bytes, .header = hdr };
try ctx.parse();
- var output = std.ArrayList(u8).init(gpa);
- const writer = output.writer();
+ var aw: std.io.Writer.Allocating = .init(gpa);
+ defer aw.deinit();
+ const bw = &aw.interface;
switch (check.kind) {
.headers => {
- try ObjectContext.dumpHeader(ctx.header, writer);
+ try ObjectContext.dumpHeader(ctx.header, bw);
var it = ctx.getLoadCommandIterator();
var i: usize = 0;
while (it.next()) |cmd| {
- try ObjectContext.dumpLoadCommand(cmd, i, writer);
- try writer.writeByte('\n');
+ try ObjectContext.dumpLoadCommand(cmd, i, bw);
+ try bw.writeByte('\n');
i += 1;
}
},
.symtab => if (ctx.symtab.items.len > 0) {
- try ctx.dumpSymtab(writer);
+ try ctx.dumpSymtab(bw);
} else return step.fail("no symbol table found", .{}),
.indirect_symtab => if (ctx.symtab.items.len > 0 and ctx.indsymtab.items.len > 0) {
- try ctx.dumpIndirectSymtab(writer);
+ try ctx.dumpIndirectSymtab(bw);
} else return step.fail("no indirect symbol table found", .{}),
.dyld_rebase,
@@ -1669,26 +1625,26 @@ const MachODumper = struct {
switch (check.kind) {
.dyld_rebase => if (lc.rebase_size > 0) {
const data = ctx.data[lc.rebase_off..][0..lc.rebase_size];
- try writer.writeAll(dyld_rebase_label ++ "\n");
- try ctx.dumpRebaseInfo(data, writer);
+ try bw.writeAll(dyld_rebase_label ++ "\n");
+ try ctx.dumpRebaseInfo(data, bw);
} else return step.fail("no rebase data found", .{}),
.dyld_bind => if (lc.bind_size > 0) {
const data = ctx.data[lc.bind_off..][0..lc.bind_size];
- try writer.writeAll(dyld_bind_label ++ "\n");
- try ctx.dumpBindInfo(data, writer);
+ try bw.writeAll(dyld_bind_label ++ "\n");
+ try ctx.dumpBindInfo(data, bw);
} else return step.fail("no bind data found", .{}),
.dyld_weak_bind => if (lc.weak_bind_size > 0) {
const data = ctx.data[lc.weak_bind_off..][0..lc.weak_bind_size];
- try writer.writeAll(dyld_weak_bind_label ++ "\n");
- try ctx.dumpBindInfo(data, writer);
+ try bw.writeAll(dyld_weak_bind_label ++ "\n");
+ try ctx.dumpBindInfo(data, bw);
} else return step.fail("no weak bind data found", .{}),
.dyld_lazy_bind => if (lc.lazy_bind_size > 0) {
const data = ctx.data[lc.lazy_bind_off..][0..lc.lazy_bind_size];
- try writer.writeAll(dyld_lazy_bind_label ++ "\n");
- try ctx.dumpBindInfo(data, writer);
+ try bw.writeAll(dyld_lazy_bind_label ++ "\n");
+ try ctx.dumpBindInfo(data, bw);
} else return step.fail("no lazy bind data found", .{}),
else => unreachable,
@@ -1700,8 +1656,8 @@ const MachODumper = struct {
const lc = cmd.cast(macho.dyld_info_command).?;
if (lc.export_size > 0) {
const data = ctx.data[lc.export_off..][0..lc.export_size];
- try writer.writeAll(exports_label ++ "\n");
- try ctx.dumpExportsTrie(data, writer);
+ try bw.writeAll(exports_label ++ "\n");
+ try ctx.dumpExportsTrie(data, bw);
break :blk;
}
}
@@ -1709,20 +1665,20 @@ const MachODumper = struct {
},
.dump_section => {
- const name = mem.sliceTo(@as([*:0]const u8, @ptrCast(check.data.items.ptr + check.payload.dump_section)), 0);
+ const name = mem.sliceTo(@as([*:0]const u8, @ptrCast(check.data.items[check.payload.dump_section..].ptr)), 0);
const sep_index = mem.indexOfScalar(u8, name, ',') orelse
return step.fail("invalid section name: {s}", .{name});
const segname = name[0..sep_index];
const sectname = name[sep_index + 1 ..];
const sect = ctx.getSectionByName(segname, sectname) orelse
return step.fail("section '{s}' not found", .{name});
- try ctx.dumpSection(sect, writer);
+ try ctx.dumpSection(sect, bw);
},
else => return step.fail("invalid check kind for MachO file format: {s}", .{@tagName(check.kind)}),
}
- return output.toOwnedSlice();
+ return aw.toOwnedSlice();
}
};
@@ -1741,161 +1697,138 @@ const ElfDumper = struct {
fn parseAndDumpArchive(step: *Step, check: Check, bytes: []const u8) ![]const u8 {
const gpa = step.owner.allocator;
- var stream = std.io.fixedBufferStream(bytes);
- const reader = stream.reader();
+ var br: std.io.Reader = .fixed(bytes);
- const magic = try reader.readBytesNoEof(elf.ARMAG.len);
- if (!mem.eql(u8, &magic, elf.ARMAG)) {
- return error.InvalidArchiveMagicNumber;
- }
+ if (!mem.eql(u8, try br.takeArray(elf.ARMAG.len), elf.ARMAG)) return error.InvalidArchiveMagicNumber;
- var ctx = ArchiveContext{
+ var ctx: ArchiveContext = .{
.gpa = gpa,
.data = bytes,
- .strtab = &[0]u8{},
+ .symtab = &.{},
+ .strtab = &.{},
+ .objects = .empty,
};
- defer {
- for (ctx.objects.items) |*object| {
- gpa.free(object.name);
- }
- ctx.objects.deinit(gpa);
- }
+ defer ctx.deinit();
- while (true) {
- if (stream.pos >= ctx.data.len) break;
- if (!mem.isAligned(stream.pos, 2)) stream.pos += 1;
-
- const hdr = try reader.readStruct(elf.ar_hdr);
+ while (br.seek < bytes.len) {
+ const hdr_seek = std.mem.alignForward(usize, br.seek, 2);
+ br.seek = hdr_seek;
+ const hdr = try br.takeStruct(elf.ar_hdr);
if (!mem.eql(u8, &hdr.ar_fmag, elf.ARFMAG)) return error.InvalidArchiveHeaderMagicNumber;
- const size = try hdr.size();
- defer {
- _ = stream.seekBy(size) catch {};
- }
+ const data = try br.take(try hdr.size());
if (hdr.isSymtab()) {
- try ctx.parseSymtab(ctx.data[stream.pos..][0..size], .p32);
+ try ctx.parseSymtab(data, .p32);
continue;
}
if (hdr.isSymtab64()) {
- try ctx.parseSymtab(ctx.data[stream.pos..][0..size], .p64);
+ try ctx.parseSymtab(data, .p64);
continue;
}
if (hdr.isStrtab()) {
- ctx.strtab = ctx.data[stream.pos..][0..size];
+ ctx.strtab = data;
continue;
}
if (hdr.isSymdef() or hdr.isSymdefSorted()) continue;
- const name = if (hdr.name()) |name|
- try gpa.dupe(u8, name)
- else if (try hdr.nameOffset()) |off|
- try gpa.dupe(u8, ctx.getString(off))
- else
- unreachable;
-
- try ctx.objects.append(gpa, .{ .name = name, .off = stream.pos, .len = size });
+ const name = hdr.name() orelse ctx.getString((try hdr.nameOffset()).?);
+ try ctx.objects.putNoClobber(gpa, hdr_seek, .{
+ .name = name,
+ .data = data,
+ });
}
- var output = std.ArrayList(u8).init(gpa);
- const writer = output.writer();
+ var aw: std.io.Writer.Allocating = .init(gpa);
+ defer aw.deinit();
+ const bw = &aw.interface;
switch (check.kind) {
- .archive_symtab => if (ctx.symtab.items.len > 0) {
- try ctx.dumpSymtab(writer);
+ .archive_symtab => if (ctx.symtab.len > 0) {
+ try ctx.dumpSymtab(bw);
} else return step.fail("no archive symbol table found", .{}),
- else => if (ctx.objects.items.len > 0) {
- try ctx.dumpObjects(step, check, writer);
+ else => if (ctx.objects.count() > 0) {
+ try ctx.dumpObjects(step, check, bw);
} else return step.fail("empty archive", .{}),
}
- return output.toOwnedSlice();
+ return aw.toOwnedSlice();
}
const ArchiveContext = struct {
gpa: Allocator,
data: []const u8,
- symtab: std.ArrayListUnmanaged(ArSymtabEntry) = .empty,
+ symtab: []ArSymtabEntry,
strtab: []const u8,
- objects: std.ArrayListUnmanaged(struct { name: []const u8, off: usize, len: usize }) = .empty,
+ objects: std.AutoArrayHashMapUnmanaged(usize, struct { name: []const u8, data: []const u8 }),
- fn parseSymtab(ctx: *ArchiveContext, raw: []const u8, ptr_width: enum { p32, p64 }) !void {
- var stream = std.io.fixedBufferStream(raw);
- const reader = stream.reader();
+ fn deinit(ctx: *ArchiveContext) void {
+ ctx.gpa.free(ctx.symtab);
+ ctx.objects.deinit(ctx.gpa);
+ }
+
+ fn parseSymtab(ctx: *ArchiveContext, data: []const u8, ptr_width: enum { p32, p64 }) !void {
+ var br: std.io.Reader = .fixed(data);
const num = switch (ptr_width) {
- .p32 => try reader.readInt(u32, .big),
- .p64 => try reader.readInt(u64, .big),
+ .p32 => try br.takeInt(u32, .big),
+ .p64 => try br.takeInt(u64, .big),
};
const ptr_size: usize = switch (ptr_width) {
.p32 => @sizeOf(u32),
.p64 => @sizeOf(u64),
};
- const strtab_off = (num + 1) * ptr_size;
- const strtab_len = raw.len - strtab_off;
- const strtab = raw[strtab_off..][0..strtab_len];
+ _ = try br.discard(.limited(num * ptr_size));
+ const strtab = br.buffered();
- try ctx.symtab.ensureTotalCapacityPrecise(ctx.gpa, num);
+ assert(ctx.symtab.len == 0);
+ ctx.symtab = try ctx.gpa.alloc(ArSymtabEntry, num);
var stroff: usize = 0;
- for (0..num) |_| {
+ for (ctx.symtab) |*entry| {
const off = switch (ptr_width) {
- .p32 => try reader.readInt(u32, .big),
- .p64 => try reader.readInt(u64, .big),
+ .p32 => try br.takeInt(u32, .big),
+ .p64 => try br.takeInt(u64, .big),
};
- const name = mem.sliceTo(@as([*:0]const u8, @ptrCast(strtab.ptr + stroff)), 0);
+ const name = mem.sliceTo(@as([*:0]const u8, @ptrCast(strtab[stroff..].ptr)), 0);
stroff += name.len + 1;
- ctx.symtab.appendAssumeCapacity(.{ .off = off, .name = name });
+ entry.* = .{ .off = off, .name = name };
}
}
- fn dumpSymtab(ctx: ArchiveContext, writer: anytype) !void {
- var files = std.AutoHashMap(usize, []const u8).init(ctx.gpa);
- defer files.deinit();
- try files.ensureUnusedCapacity(@intCast(ctx.objects.items.len));
-
- for (ctx.objects.items) |object| {
- files.putAssumeCapacityNoClobber(object.off - @sizeOf(elf.ar_hdr), object.name);
- }
-
- var symbols = std.AutoArrayHashMap(usize, std.ArrayList([]const u8)).init(ctx.gpa);
+ fn dumpSymtab(ctx: ArchiveContext, bw: *Writer) !void {
+ var symbols: std.AutoArrayHashMap(usize, std.ArrayList([]const u8)) = .init(ctx.gpa);
defer {
- for (symbols.values()) |*value| {
- value.deinit();
- }
+ for (symbols.values()) |*value| value.deinit();
symbols.deinit();
}
- for (ctx.symtab.items) |entry| {
+ for (ctx.symtab) |entry| {
const gop = try symbols.getOrPut(@intCast(entry.off));
- if (!gop.found_existing) {
- gop.value_ptr.* = std.ArrayList([]const u8).init(ctx.gpa);
- }
+ if (!gop.found_existing) gop.value_ptr.* = .init(ctx.gpa);
try gop.value_ptr.append(entry.name);
}
- try writer.print("{s}\n", .{archive_symtab_label});
+ try bw.print("{s}\n", .{archive_symtab_label});
for (symbols.keys(), symbols.values()) |off, values| {
- try writer.print("in object {s}\n", .{files.get(off).?});
- for (values.items) |value| {
- try writer.print("{s}\n", .{value});
- }
+ try bw.print("in object {s}\n", .{ctx.objects.get(off).?.name});
+ for (values.items) |value| try bw.print("{s}\n", .{value});
}
}
- fn dumpObjects(ctx: ArchiveContext, step: *Step, check: Check, writer: anytype) !void {
- for (ctx.objects.items) |object| {
- try writer.print("object {s}\n", .{object.name});
- const output = try parseAndDumpObject(step, check, ctx.data[object.off..][0..object.len]);
+ fn dumpObjects(ctx: ArchiveContext, step: *Step, check: Check, bw: *Writer) !void {
+ for (ctx.objects.values()) |object| {
+ try bw.print("object {s}\n", .{object.name});
+ const output = try parseAndDumpObject(step, check, object.data);
defer ctx.gpa.free(output);
- try writer.print("{s}\n", .{output});
+ try bw.print("{s}\n", .{output});
}
}
fn getString(ctx: ArchiveContext, off: u32) []const u8 {
assert(off < ctx.strtab.len);
- const name = mem.sliceTo(@as([*:'\n']const u8, @ptrCast(ctx.strtab.ptr + off)), 0);
+ const name = mem.sliceTo(@as([*:'\n']const u8, @ptrCast(ctx.strtab[off..].ptr)), 0);
return name[0 .. name.len - 1];
}
@@ -1907,24 +1840,23 @@ const ElfDumper = struct {
fn parseAndDumpObject(step: *Step, check: Check, bytes: []const u8) ![]const u8 {
const gpa = step.owner.allocator;
- var stream = std.io.fixedBufferStream(bytes);
- const reader = stream.reader();
+ var br: std.io.Reader = .fixed(bytes);
- const hdr = try reader.readStruct(elf.Elf64_Ehdr);
- if (!mem.eql(u8, hdr.e_ident[0..4], "\x7fELF")) {
- return error.InvalidMagicNumber;
- }
+ const hdr = try br.takeStruct(elf.Elf64_Ehdr);
+ if (!mem.eql(u8, hdr.e_ident[0..4], "\x7fELF")) return error.InvalidMagicNumber;
- const shdrs = @as([*]align(1) const elf.Elf64_Shdr, @ptrCast(bytes.ptr + hdr.e_shoff))[0..hdr.e_shnum];
- const phdrs = @as([*]align(1) const elf.Elf64_Phdr, @ptrCast(bytes.ptr + hdr.e_phoff))[0..hdr.e_phnum];
+ const shdrs = @as([*]align(1) const elf.Elf64_Shdr, @ptrCast(bytes[hdr.e_shoff..].ptr))[0..hdr.e_shnum];
+ const phdrs = @as([*]align(1) const elf.Elf64_Phdr, @ptrCast(bytes[hdr.e_phoff..].ptr))[0..hdr.e_phnum];
- var ctx = ObjectContext{
+ var ctx: ObjectContext = .{
.gpa = gpa,
.data = bytes,
.hdr = hdr,
.shdrs = shdrs,
.phdrs = phdrs,
.shstrtab = undefined,
+ .symtab = .{},
+ .dysymtab = .{},
};
ctx.shstrtab = ctx.getSectionContents(ctx.hdr.e_shstrndx);
@@ -1955,120 +1887,121 @@ const ElfDumper = struct {
else => {},
};
- var output = std.ArrayList(u8).init(gpa);
- const writer = output.writer();
+ var aw: std.io.Writer.Allocating = .init(gpa);
+ defer aw.deinit();
+ const bw = &aw.interface;
switch (check.kind) {
.headers => {
- try ctx.dumpHeader(writer);
- try ctx.dumpShdrs(writer);
- try ctx.dumpPhdrs(writer);
+ try ctx.dumpHeader(bw);
+ try ctx.dumpShdrs(bw);
+ try ctx.dumpPhdrs(bw);
},
.symtab => if (ctx.symtab.symbols.len > 0) {
- try ctx.dumpSymtab(.symtab, writer);
+ try ctx.dumpSymtab(.symtab, bw);
} else return step.fail("no symbol table found", .{}),
.dynamic_symtab => if (ctx.dysymtab.symbols.len > 0) {
- try ctx.dumpSymtab(.dysymtab, writer);
+ try ctx.dumpSymtab(.dysymtab, bw);
} else return step.fail("no dynamic symbol table found", .{}),
.dynamic_section => if (ctx.getSectionByName(".dynamic")) |shndx| {
- try ctx.dumpDynamicSection(shndx, writer);
+ try ctx.dumpDynamicSection(shndx, bw);
} else return step.fail("no .dynamic section found", .{}),
.dump_section => {
- const name = mem.sliceTo(@as([*:0]const u8, @ptrCast(check.data.items.ptr + check.payload.dump_section)), 0);
+ const name = mem.sliceTo(@as([*:0]const u8, @ptrCast(check.data.items[check.payload.dump_section..].ptr)), 0);
const shndx = ctx.getSectionByName(name) orelse return step.fail("no '{s}' section found", .{name});
- try ctx.dumpSection(shndx, writer);
+ try ctx.dumpSection(shndx, bw);
},
else => return step.fail("invalid check kind for ELF file format: {s}", .{@tagName(check.kind)}),
}
- return output.toOwnedSlice();
+ return aw.toOwnedSlice();
}
const ObjectContext = struct {
gpa: Allocator,
data: []const u8,
- hdr: elf.Elf64_Ehdr,
+ hdr: *align(1) const elf.Elf64_Ehdr,
shdrs: []align(1) const elf.Elf64_Shdr,
phdrs: []align(1) const elf.Elf64_Phdr,
shstrtab: []const u8,
- symtab: Symtab = .{},
- dysymtab: Symtab = .{},
+ symtab: Symtab,
+ dysymtab: Symtab,
- fn dumpHeader(ctx: ObjectContext, writer: anytype) !void {
- try writer.writeAll("header\n");
- try writer.print("type {s}\n", .{@tagName(ctx.hdr.e_type)});
- try writer.print("entry {x}\n", .{ctx.hdr.e_entry});
+ fn dumpHeader(ctx: ObjectContext, bw: *Writer) !void {
+ try bw.writeAll("header\n");
+ try bw.print("type {s}\n", .{@tagName(ctx.hdr.e_type)});
+ try bw.print("entry {x}\n", .{ctx.hdr.e_entry});
}
- fn dumpPhdrs(ctx: ObjectContext, writer: anytype) !void {
+ fn dumpPhdrs(ctx: ObjectContext, bw: *Writer) !void {
if (ctx.phdrs.len == 0) return;
- try writer.writeAll("program headers\n");
+ try bw.writeAll("program headers\n");
for (ctx.phdrs, 0..) |phdr, phndx| {
- try writer.print("phdr {d}\n", .{phndx});
- try writer.print("type {s}\n", .{fmtPhType(phdr.p_type)});
- try writer.print("vaddr {x}\n", .{phdr.p_vaddr});
- try writer.print("paddr {x}\n", .{phdr.p_paddr});
- try writer.print("offset {x}\n", .{phdr.p_offset});
- try writer.print("memsz {x}\n", .{phdr.p_memsz});
- try writer.print("filesz {x}\n", .{phdr.p_filesz});
- try writer.print("align {x}\n", .{phdr.p_align});
+ try bw.print("phdr {d}\n", .{phndx});
+ try bw.print("type {f}\n", .{fmtPhType(phdr.p_type)});
+ try bw.print("vaddr {x}\n", .{phdr.p_vaddr});
+ try bw.print("paddr {x}\n", .{phdr.p_paddr});
+ try bw.print("offset {x}\n", .{phdr.p_offset});
+ try bw.print("memsz {x}\n", .{phdr.p_memsz});
+ try bw.print("filesz {x}\n", .{phdr.p_filesz});
+ try bw.print("align {x}\n", .{phdr.p_align});
{
const flags = phdr.p_flags;
- try writer.writeAll("flags");
- if (flags > 0) try writer.writeByte(' ');
+ try bw.writeAll("flags");
+ if (flags > 0) try bw.writeByte(' ');
if (flags & elf.PF_R != 0) {
- try writer.writeByte('R');
+ try bw.writeByte('R');
}
if (flags & elf.PF_W != 0) {
- try writer.writeByte('W');
+ try bw.writeByte('W');
}
if (flags & elf.PF_X != 0) {
- try writer.writeByte('E');
+ try bw.writeByte('E');
}
if (flags & elf.PF_MASKOS != 0) {
- try writer.writeAll("OS");
+ try bw.writeAll("OS");
}
if (flags & elf.PF_MASKPROC != 0) {
- try writer.writeAll("PROC");
+ try bw.writeAll("PROC");
}
- try writer.writeByte('\n');
+ try bw.writeByte('\n');
}
}
}
- fn dumpShdrs(ctx: ObjectContext, writer: anytype) !void {
+ fn dumpShdrs(ctx: ObjectContext, bw: *Writer) !void {
if (ctx.shdrs.len == 0) return;
- try writer.writeAll("section headers\n");
+ try bw.writeAll("section headers\n");
for (ctx.shdrs, 0..) |shdr, shndx| {
- try writer.print("shdr {d}\n", .{shndx});
- try writer.print("name {s}\n", .{ctx.getSectionName(shndx)});
- try writer.print("type {s}\n", .{fmtShType(shdr.sh_type)});
- try writer.print("addr {x}\n", .{shdr.sh_addr});
- try writer.print("offset {x}\n", .{shdr.sh_offset});
- try writer.print("size {x}\n", .{shdr.sh_size});
- try writer.print("addralign {x}\n", .{shdr.sh_addralign});
+ try bw.print("shdr {d}\n", .{shndx});
+ try bw.print("name {s}\n", .{ctx.getSectionName(shndx)});
+ try bw.print("type {f}\n", .{fmtShType(shdr.sh_type)});
+ try bw.print("addr {x}\n", .{shdr.sh_addr});
+ try bw.print("offset {x}\n", .{shdr.sh_offset});
+ try bw.print("size {x}\n", .{shdr.sh_size});
+ try bw.print("addralign {x}\n", .{shdr.sh_addralign});
// TODO dump formatted sh_flags
}
}
- fn dumpDynamicSection(ctx: ObjectContext, shndx: usize, writer: anytype) !void {
+ fn dumpDynamicSection(ctx: ObjectContext, shndx: usize, bw: *Writer) !void {
const shdr = ctx.shdrs[shndx];
const strtab = ctx.getSectionContents(shdr.sh_link);
const data = ctx.getSectionContents(shndx);
const nentries = @divExact(data.len, @sizeOf(elf.Elf64_Dyn));
const entries = @as([*]align(1) const elf.Elf64_Dyn, @ptrCast(data.ptr))[0..nentries];
- try writer.writeAll(ElfDumper.dynamic_section_label ++ "\n");
+ try bw.writeAll(ElfDumper.dynamic_section_label ++ "\n");
for (entries) |entry| {
const key = @as(u64, @bitCast(entry.d_tag));
@@ -2109,7 +2042,7 @@ const ElfDumper = struct {
elf.DT_NULL => "NULL",
else => "UNKNOWN",
};
- try writer.print("{s}", .{key_str});
+ try bw.print("{s}", .{key_str});
switch (key) {
elf.DT_NEEDED,
@@ -2118,7 +2051,7 @@ const ElfDumper = struct {
elf.DT_RUNPATH,
=> {
const name = getString(strtab, @intCast(value));
- try writer.print(" {s}", .{name});
+ try bw.print(" {s}", .{name});
},
elf.DT_INIT_ARRAY,
@@ -2136,7 +2069,7 @@ const ElfDumper = struct {
elf.DT_INIT,
elf.DT_FINI,
elf.DT_NULL,
- => try writer.print(" {x}", .{value}),
+ => try bw.print(" {x}", .{value}),
elf.DT_INIT_ARRAYSZ,
elf.DT_FINI_ARRAYSZ,
@@ -2146,77 +2079,77 @@ const ElfDumper = struct {
elf.DT_RELASZ,
elf.DT_RELAENT,
elf.DT_RELACOUNT,
- => try writer.print(" {d}", .{value}),
+ => try bw.print(" {d}", .{value}),
- elf.DT_PLTREL => try writer.writeAll(switch (value) {
+ elf.DT_PLTREL => try bw.writeAll(switch (value) {
elf.DT_REL => " REL",
elf.DT_RELA => " RELA",
else => " UNKNOWN",
}),
elf.DT_FLAGS => if (value > 0) {
- if (value & elf.DF_ORIGIN != 0) try writer.writeAll(" ORIGIN");
- if (value & elf.DF_SYMBOLIC != 0) try writer.writeAll(" SYMBOLIC");
- if (value & elf.DF_TEXTREL != 0) try writer.writeAll(" TEXTREL");
- if (value & elf.DF_BIND_NOW != 0) try writer.writeAll(" BIND_NOW");
- if (value & elf.DF_STATIC_TLS != 0) try writer.writeAll(" STATIC_TLS");
+ if (value & elf.DF_ORIGIN != 0) try bw.writeAll(" ORIGIN");
+ if (value & elf.DF_SYMBOLIC != 0) try bw.writeAll(" SYMBOLIC");
+ if (value & elf.DF_TEXTREL != 0) try bw.writeAll(" TEXTREL");
+ if (value & elf.DF_BIND_NOW != 0) try bw.writeAll(" BIND_NOW");
+ if (value & elf.DF_STATIC_TLS != 0) try bw.writeAll(" STATIC_TLS");
},
elf.DT_FLAGS_1 => if (value > 0) {
- if (value & elf.DF_1_NOW != 0) try writer.writeAll(" NOW");
- if (value & elf.DF_1_GLOBAL != 0) try writer.writeAll(" GLOBAL");
- if (value & elf.DF_1_GROUP != 0) try writer.writeAll(" GROUP");
- if (value & elf.DF_1_NODELETE != 0) try writer.writeAll(" NODELETE");
- if (value & elf.DF_1_LOADFLTR != 0) try writer.writeAll(" LOADFLTR");
- if (value & elf.DF_1_INITFIRST != 0) try writer.writeAll(" INITFIRST");
- if (value & elf.DF_1_NOOPEN != 0) try writer.writeAll(" NOOPEN");
- if (value & elf.DF_1_ORIGIN != 0) try writer.writeAll(" ORIGIN");
- if (value & elf.DF_1_DIRECT != 0) try writer.writeAll(" DIRECT");
- if (value & elf.DF_1_TRANS != 0) try writer.writeAll(" TRANS");
- if (value & elf.DF_1_INTERPOSE != 0) try writer.writeAll(" INTERPOSE");
- if (value & elf.DF_1_NODEFLIB != 0) try writer.writeAll(" NODEFLIB");
- if (value & elf.DF_1_NODUMP != 0) try writer.writeAll(" NODUMP");
- if (value & elf.DF_1_CONFALT != 0) try writer.writeAll(" CONFALT");
- if (value & elf.DF_1_ENDFILTEE != 0) try writer.writeAll(" ENDFILTEE");
- if (value & elf.DF_1_DISPRELDNE != 0) try writer.writeAll(" DISPRELDNE");
- if (value & elf.DF_1_DISPRELPND != 0) try writer.writeAll(" DISPRELPND");
- if (value & elf.DF_1_NODIRECT != 0) try writer.writeAll(" NODIRECT");
- if (value & elf.DF_1_IGNMULDEF != 0) try writer.writeAll(" IGNMULDEF");
- if (value & elf.DF_1_NOKSYMS != 0) try writer.writeAll(" NOKSYMS");
- if (value & elf.DF_1_NOHDR != 0) try writer.writeAll(" NOHDR");
- if (value & elf.DF_1_EDITED != 0) try writer.writeAll(" EDITED");
- if (value & elf.DF_1_NORELOC != 0) try writer.writeAll(" NORELOC");
- if (value & elf.DF_1_SYMINTPOSE != 0) try writer.writeAll(" SYMINTPOSE");
- if (value & elf.DF_1_GLOBAUDIT != 0) try writer.writeAll(" GLOBAUDIT");
- if (value & elf.DF_1_SINGLETON != 0) try writer.writeAll(" SINGLETON");
- if (value & elf.DF_1_STUB != 0) try writer.writeAll(" STUB");
- if (value & elf.DF_1_PIE != 0) try writer.writeAll(" PIE");
+ if (value & elf.DF_1_NOW != 0) try bw.writeAll(" NOW");
+ if (value & elf.DF_1_GLOBAL != 0) try bw.writeAll(" GLOBAL");
+ if (value & elf.DF_1_GROUP != 0) try bw.writeAll(" GROUP");
+ if (value & elf.DF_1_NODELETE != 0) try bw.writeAll(" NODELETE");
+ if (value & elf.DF_1_LOADFLTR != 0) try bw.writeAll(" LOADFLTR");
+ if (value & elf.DF_1_INITFIRST != 0) try bw.writeAll(" INITFIRST");
+ if (value & elf.DF_1_NOOPEN != 0) try bw.writeAll(" NOOPEN");
+ if (value & elf.DF_1_ORIGIN != 0) try bw.writeAll(" ORIGIN");
+ if (value & elf.DF_1_DIRECT != 0) try bw.writeAll(" DIRECT");
+ if (value & elf.DF_1_TRANS != 0) try bw.writeAll(" TRANS");
+ if (value & elf.DF_1_INTERPOSE != 0) try bw.writeAll(" INTERPOSE");
+ if (value & elf.DF_1_NODEFLIB != 0) try bw.writeAll(" NODEFLIB");
+ if (value & elf.DF_1_NODUMP != 0) try bw.writeAll(" NODUMP");
+ if (value & elf.DF_1_CONFALT != 0) try bw.writeAll(" CONFALT");
+ if (value & elf.DF_1_ENDFILTEE != 0) try bw.writeAll(" ENDFILTEE");
+ if (value & elf.DF_1_DISPRELDNE != 0) try bw.writeAll(" DISPRELDNE");
+ if (value & elf.DF_1_DISPRELPND != 0) try bw.writeAll(" DISPRELPND");
+ if (value & elf.DF_1_NODIRECT != 0) try bw.writeAll(" NODIRECT");
+ if (value & elf.DF_1_IGNMULDEF != 0) try bw.writeAll(" IGNMULDEF");
+ if (value & elf.DF_1_NOKSYMS != 0) try bw.writeAll(" NOKSYMS");
+ if (value & elf.DF_1_NOHDR != 0) try bw.writeAll(" NOHDR");
+ if (value & elf.DF_1_EDITED != 0) try bw.writeAll(" EDITED");
+ if (value & elf.DF_1_NORELOC != 0) try bw.writeAll(" NORELOC");
+ if (value & elf.DF_1_SYMINTPOSE != 0) try bw.writeAll(" SYMINTPOSE");
+ if (value & elf.DF_1_GLOBAUDIT != 0) try bw.writeAll(" GLOBAUDIT");
+ if (value & elf.DF_1_SINGLETON != 0) try bw.writeAll(" SINGLETON");
+ if (value & elf.DF_1_STUB != 0) try bw.writeAll(" STUB");
+ if (value & elf.DF_1_PIE != 0) try bw.writeAll(" PIE");
},
- else => try writer.print(" {x}", .{value}),
+ else => try bw.print(" {x}", .{value}),
}
- try writer.writeByte('\n');
+ try bw.writeByte('\n');
}
}
- fn dumpSymtab(ctx: ObjectContext, comptime @"type": enum { symtab, dysymtab }, writer: anytype) !void {
+ fn dumpSymtab(ctx: ObjectContext, comptime @"type": enum { symtab, dysymtab }, bw: *Writer) !void {
const symtab = switch (@"type") {
.symtab => ctx.symtab,
.dysymtab => ctx.dysymtab,
};
- try writer.writeAll(switch (@"type") {
+ try bw.writeAll(switch (@"type") {
.symtab => symtab_label,
.dysymtab => dynamic_symtab_label,
} ++ "\n");
for (symtab.symbols, 0..) |sym, index| {
- try writer.print("{x} {x}", .{ sym.st_value, sym.st_size });
+ try bw.print("{x} {x}", .{ sym.st_value, sym.st_size });
{
if (elf.SHN_LORESERVE <= sym.st_shndx and sym.st_shndx < elf.SHN_HIRESERVE) {
if (elf.SHN_LOPROC <= sym.st_shndx and sym.st_shndx < elf.SHN_HIPROC) {
- try writer.print(" LO+{d}", .{sym.st_shndx - elf.SHN_LOPROC});
+ try bw.print(" LO+{d}", .{sym.st_shndx - elf.SHN_LOPROC});
} else {
const sym_ndx = switch (sym.st_shndx) {
elf.SHN_ABS => "ABS",
@@ -2224,12 +2157,12 @@ const ElfDumper = struct {
elf.SHN_LIVEPATCH => "LIV",
else => "UNK",
};
- try writer.print(" {s}", .{sym_ndx});
+ try bw.print(" {s}", .{sym_ndx});
}
} else if (sym.st_shndx == elf.SHN_UNDEF) {
- try writer.writeAll(" UND");
+ try bw.writeAll(" UND");
} else {
- try writer.print(" {x}", .{sym.st_shndx});
+ try bw.print(" {x}", .{sym.st_shndx});
}
}
@@ -2246,12 +2179,12 @@ const ElfDumper = struct {
elf.STT_NUM => "NUM",
elf.STT_GNU_IFUNC => "IFUNC",
else => if (elf.STT_LOPROC <= tt and tt < elf.STT_HIPROC) {
- break :blk try writer.print(" LOPROC+{d}", .{tt - elf.STT_LOPROC});
+ break :blk try bw.print(" LOPROC+{d}", .{tt - elf.STT_LOPROC});
} else if (elf.STT_LOOS <= tt and tt < elf.STT_HIOS) {
- break :blk try writer.print(" LOOS+{d}", .{tt - elf.STT_LOOS});
+ break :blk try bw.print(" LOOS+{d}", .{tt - elf.STT_LOOS});
} else "UNK",
};
- try writer.print(" {s}", .{sym_type});
+ try bw.print(" {s}", .{sym_type});
}
blk: {
@@ -2262,28 +2195,28 @@ const ElfDumper = struct {
elf.STB_WEAK => "WEAK",
elf.STB_NUM => "NUM",
else => if (elf.STB_LOPROC <= bind and bind < elf.STB_HIPROC) {
- break :blk try writer.print(" LOPROC+{d}", .{bind - elf.STB_LOPROC});
+ break :blk try bw.print(" LOPROC+{d}", .{bind - elf.STB_LOPROC});
} else if (elf.STB_LOOS <= bind and bind < elf.STB_HIOS) {
- break :blk try writer.print(" LOOS+{d}", .{bind - elf.STB_LOOS});
+ break :blk try bw.print(" LOOS+{d}", .{bind - elf.STB_LOOS});
} else "UNKNOWN",
};
- try writer.print(" {s}", .{sym_bind});
+ try bw.print(" {s}", .{sym_bind});
}
const sym_vis = @as(elf.STV, @enumFromInt(@as(u2, @truncate(sym.st_other))));
- try writer.print(" {s}", .{@tagName(sym_vis)});
+ try bw.print(" {s}", .{@tagName(sym_vis)});
const sym_name = switch (sym.st_type()) {
elf.STT_SECTION => ctx.getSectionName(sym.st_shndx),
else => symtab.getName(index).?,
};
- try writer.print(" {s}\n", .{sym_name});
+ try bw.print(" {s}\n", .{sym_name});
}
}
- fn dumpSection(ctx: ObjectContext, shndx: usize, writer: anytype) !void {
+ fn dumpSection(ctx: ObjectContext, shndx: usize, bw: *Writer) !void {
const data = ctx.getSectionContents(shndx);
- try writer.print("{s}", .{data});
+ try bw.print("{s}", .{data});
}
inline fn getSectionName(ctx: ObjectContext, shndx: usize) []const u8 {
@@ -2321,22 +2254,15 @@ const ElfDumper = struct {
};
fn getString(strtab: []const u8, off: u32) []const u8 {
- assert(off < strtab.len);
- return mem.sliceTo(@as([*:0]const u8, @ptrCast(strtab.ptr + off)), 0);
+ const str = strtab[off..];
+ return str[0..std.mem.indexOfScalar(u8, str, 0).?];
}
- fn fmtShType(sh_type: u32) std.fmt.Formatter(formatShType) {
+ fn fmtShType(sh_type: u32) std.fmt.Formatter(u32, formatShType) {
return .{ .data = sh_type };
}
- fn formatShType(
- sh_type: u32,
- comptime unused_fmt_string: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
- _ = unused_fmt_string;
- _ = options;
+ fn formatShType(sh_type: u32, w: *Writer) Writer.Error!void {
const name = switch (sh_type) {
elf.SHT_NULL => "NULL",
elf.SHT_PROGBITS => "PROGBITS",
@@ -2362,28 +2288,21 @@ const ElfDumper = struct {
elf.SHT_GNU_VERNEED => "VERNEED",
elf.SHT_GNU_VERSYM => "VERSYM",
else => if (elf.SHT_LOOS <= sh_type and sh_type < elf.SHT_HIOS) {
- return try writer.print("LOOS+0x{x}", .{sh_type - elf.SHT_LOOS});
+ return try w.print("LOOS+0x{x}", .{sh_type - elf.SHT_LOOS});
} else if (elf.SHT_LOPROC <= sh_type and sh_type < elf.SHT_HIPROC) {
- return try writer.print("LOPROC+0x{x}", .{sh_type - elf.SHT_LOPROC});
+ return try w.print("LOPROC+0x{x}", .{sh_type - elf.SHT_LOPROC});
} else if (elf.SHT_LOUSER <= sh_type and sh_type < elf.SHT_HIUSER) {
- return try writer.print("LOUSER+0x{x}", .{sh_type - elf.SHT_LOUSER});
+ return try w.print("LOUSER+0x{x}", .{sh_type - elf.SHT_LOUSER});
} else "UNKNOWN",
};
- try writer.writeAll(name);
+ try w.writeAll(name);
}
- fn fmtPhType(ph_type: u32) std.fmt.Formatter(formatPhType) {
+ fn fmtPhType(ph_type: u32) std.fmt.Formatter(u32, formatPhType) {
return .{ .data = ph_type };
}
- fn formatPhType(
- ph_type: u32,
- comptime unused_fmt_string: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
- _ = unused_fmt_string;
- _ = options;
+ fn formatPhType(ph_type: u32, w: *Writer) Writer.Error!void {
const p_type = switch (ph_type) {
elf.PT_NULL => "NULL",
elf.PT_LOAD => "LOAD",
@@ -2398,12 +2317,12 @@ const ElfDumper = struct {
elf.PT_GNU_STACK => "GNU_STACK",
elf.PT_GNU_RELRO => "GNU_RELRO",
else => if (elf.PT_LOOS <= ph_type and ph_type < elf.PT_HIOS) {
- return try writer.print("LOOS+0x{x}", .{ph_type - elf.PT_LOOS});
+ return try w.print("LOOS+0x{x}", .{ph_type - elf.PT_LOOS});
} else if (elf.PT_LOPROC <= ph_type and ph_type < elf.PT_HIPROC) {
- return try writer.print("LOPROC+0x{x}", .{ph_type - elf.PT_LOPROC});
+ return try w.print("LOPROC+0x{x}", .{ph_type - elf.PT_LOPROC});
} else "UNKNOWN",
};
- try writer.writeAll(p_type);
+ try w.writeAll(p_type);
}
};
@@ -2412,49 +2331,39 @@ const WasmDumper = struct {
fn parseAndDump(step: *Step, check: Check, bytes: []const u8) ![]const u8 {
const gpa = step.owner.allocator;
- var fbs = std.io.fixedBufferStream(bytes);
- const reader = fbs.reader();
+ var br: std.io.Reader = .fixed(bytes);
- const buf = try reader.readBytesNoEof(8);
- if (!mem.eql(u8, buf[0..4], &std.wasm.magic)) {
- return error.InvalidMagicByte;
- }
- if (!mem.eql(u8, buf[4..], &std.wasm.version)) {
- return error.UnsupportedWasmVersion;
- }
+ const buf = try br.takeArray(8);
+ if (!mem.eql(u8, buf[0..4], &std.wasm.magic)) return error.InvalidMagicByte;
+ if (!mem.eql(u8, buf[4..8], &std.wasm.version)) return error.UnsupportedWasmVersion;
+
+ var aw: std.io.Writer.Allocating = .init(gpa);
+ defer aw.deinit();
+ const bw = &aw.interface;
- var output = std.ArrayList(u8).init(gpa);
- defer output.deinit();
- parseAndDumpInner(step, check, bytes, &fbs, &output) catch |err| switch (err) {
- error.EndOfStream => try output.appendSlice("\n<UnexpectedEndOfStream>"),
+ parseAndDumpInner(step, check, &br, bw) catch |err| switch (err) {
+ error.EndOfStream => try bw.writeAll("\n<UnexpectedEndOfStream>"),
else => |e| return e,
};
- return output.toOwnedSlice();
+ return aw.toOwnedSlice();
}
fn parseAndDumpInner(
step: *Step,
check: Check,
- bytes: []const u8,
- fbs: *std.io.FixedBufferStream([]const u8),
- output: *std.ArrayList(u8),
+ br: *std.io.Reader,
+ bw: *Writer,
) !void {
- const reader = fbs.reader();
- const writer = output.writer();
-
+ var section_br: std.io.Reader = undefined;
switch (check.kind) {
- .headers => {
- while (reader.readByte()) |current_byte| {
- const section = std.enums.fromInt(std.wasm.Section, current_byte) orelse {
- return step.fail("Found invalid section id '{d}'", .{current_byte});
- };
-
- const section_length = try std.leb.readUleb128(u32, reader);
- try parseAndDumpSection(step, section, bytes[fbs.pos..][0..section_length], writer);
- fbs.pos += section_length;
- } else |_| {} // reached end of stream
+ .headers => while (br.takeEnum(std.wasm.Section, .little)) |section| {
+ section_br = .fixed(try br.take(try br.takeLeb128(u32)));
+ try parseAndDumpSection(step, section, §ion_br, bw);
+ } else |err| switch (err) {
+ error.InvalidEnumTag => return step.fail("invalid section id", .{}),
+ error.EndOfStream => {},
+ else => |e| return e,
},
-
else => return step.fail("invalid check kind for Wasm file format: {s}", .{@tagName(check.kind)}),
}
}
@@ -2462,16 +2371,13 @@ const WasmDumper = struct {
fn parseAndDumpSection(
step: *Step,
section: std.wasm.Section,
- data: []const u8,
- writer: anytype,
+ br: *std.io.Reader,
+ bw: *Writer,
) !void {
- var fbs = std.io.fixedBufferStream(data);
- const reader = fbs.reader();
-
- try writer.print(
+ try bw.print(
\\Section {s}
\\size {d}
- , .{ @tagName(section), data.len });
+ , .{ @tagName(section), br.buffer.len });
switch (section) {
.type,
@@ -2485,96 +2391,83 @@ const WasmDumper = struct {
.code,
.data,
=> {
- const entries = try std.leb.readUleb128(u32, reader);
- try writer.print("\nentries {d}\n", .{entries});
- try parseSection(step, section, data[fbs.pos..], entries, writer);
+ const entries = try br.takeLeb128(u32);
+ try bw.print("\nentries {d}\n", .{entries});
+ try parseSection(step, section, br, entries, bw);
},
.custom => {
- const name_length = try std.leb.readUleb128(u32, reader);
- const name = data[fbs.pos..][0..name_length];
- fbs.pos += name_length;
- try writer.print("\nname {s}\n", .{name});
+ const name = try br.take(try br.takeLeb128(u32));
+ try bw.print("\nname {s}\n", .{name});
if (mem.eql(u8, name, "name")) {
- try parseDumpNames(step, reader, writer, data);
+ try parseDumpNames(step, br, bw);
} else if (mem.eql(u8, name, "producers")) {
- try parseDumpProducers(reader, writer, data);
+ try parseDumpProducers(br, bw);
} else if (mem.eql(u8, name, "target_features")) {
- try parseDumpFeatures(reader, writer, data);
+ try parseDumpFeatures(br, bw);
}
// TODO: Implement parsing and dumping other custom sections (such as relocations)
},
.start => {
- const start = try std.leb.readUleb128(u32, reader);
- try writer.print("\nstart {d}\n", .{start});
+ const start = try br.takeLeb128(u32);
+ try bw.print("\nstart {d}\n", .{start});
},
.data_count => {
- const count = try std.leb.readUleb128(u32, reader);
- try writer.print("\ncount {d}\n", .{count});
+ const count = try br.takeLeb128(u32);
+ try bw.print("\ncount {d}\n", .{count});
},
else => {}, // skip unknown sections
}
}
- fn parseSection(step: *Step, section: std.wasm.Section, data: []const u8, entries: u32, writer: anytype) !void {
- var fbs = std.io.fixedBufferStream(data);
- const reader = fbs.reader();
-
+ fn parseSection(step: *Step, section: std.wasm.Section, br: *std.io.Reader, entries: u32, bw: *Writer) !void {
switch (section) {
.type => {
var i: u32 = 0;
while (i < entries) : (i += 1) {
- const func_type = try reader.readByte();
+ const func_type = try br.takeByte();
if (func_type != std.wasm.function_type) {
return step.fail("expected function type, found byte '{d}'", .{func_type});
}
- const params = try std.leb.readUleb128(u32, reader);
- try writer.print("params {d}\n", .{params});
+ const params = try br.takeLeb128(u32);
+ try bw.print("params {d}\n", .{params});
var index: u32 = 0;
while (index < params) : (index += 1) {
- _ = try parseDumpType(step, std.wasm.Valtype, reader, writer);
+ _ = try parseDumpType(step, std.wasm.Valtype, br, bw);
} else index = 0;
- const returns = try std.leb.readUleb128(u32, reader);
- try writer.print("returns {d}\n", .{returns});
+ const returns = try br.takeLeb128(u32);
+ try bw.print("returns {d}\n", .{returns});
while (index < returns) : (index += 1) {
- _ = try parseDumpType(step, std.wasm.Valtype, reader, writer);
+ _ = try parseDumpType(step, std.wasm.Valtype, br, bw);
}
}
},
.import => {
var i: u32 = 0;
while (i < entries) : (i += 1) {
- const module_name_len = try std.leb.readUleb128(u32, reader);
- const module_name = data[fbs.pos..][0..module_name_len];
- fbs.pos += module_name_len;
- const name_len = try std.leb.readUleb128(u32, reader);
- const name = data[fbs.pos..][0..name_len];
- fbs.pos += name_len;
-
- const kind = std.enums.fromInt(std.wasm.ExternalKind, try reader.readByte()) orelse {
- return step.fail("invalid import kind", .{});
+ const module_name = try br.take(try br.takeLeb128(u32));
+ const name = try br.take(try br.takeLeb128(u32));
+ const kind = br.takeEnum(std.wasm.ExternalKind, .little) catch |err| switch (err) {
+ error.InvalidEnumTag => return step.fail("invalid import kind", .{}),
+ else => |e| return e,
};
- try writer.print(
+ try bw.print(
\\module {s}
\\name {s}
\\kind {s}
, .{ module_name, name, @tagName(kind) });
- try writer.writeByte('\n');
+ try bw.writeByte('\n');
switch (kind) {
- .function => {
- try writer.print("index {d}\n", .{try std.leb.readUleb128(u32, reader)});
- },
- .memory => {
- try parseDumpLimits(reader, writer);
- },
+ .function => try bw.print("index {d}\n", .{try br.takeLeb128(u32)}),
+ .memory => try parseDumpLimits(br, bw),
.global => {
- _ = try parseDumpType(step, std.wasm.Valtype, reader, writer);
- try writer.print("mutable {}\n", .{0x01 == try std.leb.readUleb128(u32, reader)});
+ _ = try parseDumpType(step, std.wasm.Valtype, br, bw);
+ try bw.print("mutable {}\n", .{0x01 == try br.takeLeb128(u32)});
},
.table => {
- _ = try parseDumpType(step, std.wasm.RefType, reader, writer);
- try parseDumpLimits(reader, writer);
+ _ = try parseDumpType(step, std.wasm.RefType, br, bw);
+ try parseDumpLimits(br, bw);
},
}
}
@@ -2582,60 +2475,58 @@ const WasmDumper = struct {
.function => {
var i: u32 = 0;
while (i < entries) : (i += 1) {
- try writer.print("index {d}\n", .{try std.leb.readUleb128(u32, reader)});
+ try bw.print("index {d}\n", .{try br.takeLeb128(u32)});
}
},
.table => {
var i: u32 = 0;
while (i < entries) : (i += 1) {
- _ = try parseDumpType(step, std.wasm.RefType, reader, writer);
- try parseDumpLimits(reader, writer);
+ _ = try parseDumpType(step, std.wasm.RefType, br, bw);
+ try parseDumpLimits(br, bw);
}
},
.memory => {
var i: u32 = 0;
while (i < entries) : (i += 1) {
- try parseDumpLimits(reader, writer);
+ try parseDumpLimits(br, bw);
}
},
.global => {
var i: u32 = 0;
while (i < entries) : (i += 1) {
- _ = try parseDumpType(step, std.wasm.Valtype, reader, writer);
- try writer.print("mutable {}\n", .{0x01 == try std.leb.readUleb128(u1, reader)});
- try parseDumpInit(step, reader, writer);
+ _ = try parseDumpType(step, std.wasm.Valtype, br, bw);
+ try bw.print("mutable {}\n", .{0x01 == try br.takeLeb128(u1)});
+ try parseDumpInit(step, br, bw);
}
},
.@"export" => {
var i: u32 = 0;
while (i < entries) : (i += 1) {
- const name_len = try std.leb.readUleb128(u32, reader);
- const name = data[fbs.pos..][0..name_len];
- fbs.pos += name_len;
- const kind_byte = try std.leb.readUleb128(u8, reader);
- const kind = std.enums.fromInt(std.wasm.ExternalKind, kind_byte) orelse {
- return step.fail("invalid export kind value '{d}'", .{kind_byte});
+ const name = try br.take(try br.takeLeb128(u32));
+ const kind = br.takeEnum(std.wasm.ExternalKind, .little) catch |err| switch (err) {
+ error.InvalidEnumTag => return step.fail("invalid export kind value", .{}),
+ else => |e| return e,
};
- const index = try std.leb.readUleb128(u32, reader);
- try writer.print(
+ const index = try br.takeLeb128(u32);
+ try bw.print(
\\name {s}
\\kind {s}
\\index {d}
, .{ name, @tagName(kind), index });
- try writer.writeByte('\n');
+ try bw.writeByte('\n');
}
},
.element => {
var i: u32 = 0;
while (i < entries) : (i += 1) {
- try writer.print("table index {d}\n", .{try std.leb.readUleb128(u32, reader)});
- try parseDumpInit(step, reader, writer);
+ try bw.print("table index {d}\n", .{try br.takeLeb128(u32)});
+ try parseDumpInit(step, br, bw);
- const function_indexes = try std.leb.readUleb128(u32, reader);
+ const function_indexes = try br.takeLeb128(u32);
var function_index: u32 = 0;
- try writer.print("indexes {d}\n", .{function_indexes});
+ try bw.print("indexes {d}\n", .{function_indexes});
while (function_index < function_indexes) : (function_index += 1) {
- try writer.print("index {d}\n", .{try std.leb.readUleb128(u32, reader)});
+ try bw.print("index {d}\n", .{try br.takeLeb128(u32)});
}
}
},
@@ -2643,101 +2534,95 @@ const WasmDumper = struct {
.data => {
var i: u32 = 0;
while (i < entries) : (i += 1) {
- const flags = try std.leb.readUleb128(u32, reader);
- const index = if (flags & 0x02 != 0)
- try std.leb.readUleb128(u32, reader)
- else
- 0;
- try writer.print("memory index 0x{x}\n", .{index});
- if (flags == 0) {
- try parseDumpInit(step, reader, writer);
- }
-
- const size = try std.leb.readUleb128(u32, reader);
- try writer.print("size {d}\n", .{size});
- try reader.skipBytes(size, .{}); // we do not care about the content of the segments
+ const flags: packed struct(u32) {
+ passive: bool,
+ memidx: bool,
+ unused: u30,
+ } = @bitCast(try br.takeLeb128(u32));
+ const index = if (flags.memidx) try br.takeLeb128(u32) else 0;
+ try bw.print("memory index 0x{x}\n", .{index});
+ if (!flags.passive) try parseDumpInit(step, br, bw);
+ const size = try br.takeLeb128(u32);
+ try bw.print("size {d}\n", .{size});
+ _ = try br.discard(.limited(size)); // we do not care about the content of the segments
}
},
else => unreachable,
}
}
- fn parseDumpType(step: *Step, comptime E: type, reader: anytype, writer: anytype) !E {
- const byte = try reader.readByte();
- const tag = std.enums.fromInt(E, byte) orelse {
- return step.fail("invalid wasm type value '{d}'", .{byte});
+ fn parseDumpType(step: *Step, comptime E: type, br: *std.io.Reader, bw: *Writer) !E {
+ const tag = br.takeEnum(E, .little) catch |err| switch (err) {
+ error.InvalidEnumTag => return step.fail("invalid wasm type value", .{}),
+ else => |e| return e,
};
- try writer.print("type {s}\n", .{@tagName(tag)});
+ try bw.print("type {s}\n", .{@tagName(tag)});
return tag;
}
- fn parseDumpLimits(reader: anytype, writer: anytype) !void {
- const flags = try std.leb.readUleb128(u8, reader);
- const min = try std.leb.readUleb128(u32, reader);
+ fn parseDumpLimits(br: *std.io.Reader, bw: *Writer) !void {
+ const flags = try br.takeLeb128(u8);
+ const min = try br.takeLeb128(u32);
- try writer.print("min {x}\n", .{min});
- if (flags != 0) {
- try writer.print("max {x}\n", .{try std.leb.readUleb128(u32, reader)});
- }
+ try bw.print("min {x}\n", .{min});
+ if (flags != 0) try bw.print("max {x}\n", .{try br.takeLeb128(u32)});
}
- fn parseDumpInit(step: *Step, reader: anytype, writer: anytype) !void {
- const byte = try reader.readByte();
- const opcode = std.enums.fromInt(std.wasm.Opcode, byte) orelse {
- return step.fail("invalid wasm opcode '{d}'", .{byte});
+ fn parseDumpInit(step: *Step, br: *std.io.Reader, bw: *Writer) !void {
+ const opcode = br.takeEnum(std.wasm.Opcode, .little) catch |err| switch (err) {
+ error.InvalidEnumTag => return step.fail("invalid wasm opcode", .{}),
+ else => |e| return e,
};
switch (opcode) {
- .i32_const => try writer.print("i32.const {x}\n", .{try std.leb.readIleb128(i32, reader)}),
- .i64_const => try writer.print("i64.const {x}\n", .{try std.leb.readIleb128(i64, reader)}),
- .f32_const => try writer.print("f32.const {x}\n", .{@as(f32, @bitCast(try reader.readInt(u32, .little)))}),
- .f64_const => try writer.print("f64.const {x}\n", .{@as(f64, @bitCast(try reader.readInt(u64, .little)))}),
- .global_get => try writer.print("global.get {x}\n", .{try std.leb.readUleb128(u32, reader)}),
+ .i32_const => try bw.print("i32.const {x}\n", .{try br.takeLeb128(i32)}),
+ .i64_const => try bw.print("i64.const {x}\n", .{try br.takeLeb128(i64)}),
+ .f32_const => try bw.print("f32.const {x}\n", .{@as(f32, @bitCast(try br.takeInt(u32, .little)))}),
+ .f64_const => try bw.print("f64.const {x}\n", .{@as(f64, @bitCast(try br.takeInt(u64, .little)))}),
+ .global_get => try bw.print("global.get {x}\n", .{try br.takeLeb128(u32)}),
else => unreachable,
}
- const end_opcode = try std.leb.readUleb128(u8, reader);
+ const end_opcode = try br.takeLeb128(u8);
if (end_opcode != @intFromEnum(std.wasm.Opcode.end)) {
return step.fail("expected 'end' opcode in init expression", .{});
}
}
/// https://webassembly.github.io/spec/core/appendix/custom.html
- fn parseDumpNames(step: *Step, reader: anytype, writer: anytype, data: []const u8) !void {
- while (reader.context.pos < data.len) {
- switch (try parseDumpType(step, std.wasm.NameSubsection, reader, writer)) {
+ fn parseDumpNames(step: *Step, br: *std.io.Reader, bw: *Writer) !void {
+ var subsection_br: std.io.Reader = undefined;
+ while (br.seek < br.buffer.len) {
+ switch (try parseDumpType(step, std.wasm.NameSubsection, br, bw)) {
// The module name subsection ... consists of a single name
// that is assigned to the module itself.
.module => {
- const size = try std.leb.readUleb128(u32, reader);
- const name_len = try std.leb.readUleb128(u32, reader);
- if (size != name_len + 1) return error.BadSubsectionSize;
- if (reader.context.pos + name_len > data.len) return error.UnexpectedEndOfStream;
- try writer.print("name {s}\n", .{data[reader.context.pos..][0..name_len]});
- reader.context.pos += name_len;
+ subsection_br = .fixed(try br.take(try br.takeLeb128(u32)));
+ const name = try subsection_br.take(try subsection_br.takeLeb128(u32));
+ try bw.print(
+ \\name {s}
+ \\
+ , .{name});
+ if (subsection_br.seek != subsection_br.buffer.len) return error.BadSubsectionSize;
},
// The function name subsection ... consists of a name map
// assigning function names to function indices.
.function, .global, .data_segment => {
- const size = try std.leb.readUleb128(u32, reader);
- const entries = try std.leb.readUleb128(u32, reader);
- try writer.print(
- \\size {d}
+ subsection_br = .fixed(try br.take(try br.takeLeb128(u32)));
+ const entries = try br.takeLeb128(u32);
+ try bw.print(
\\names {d}
\\
- , .{ size, entries });
+ , .{entries});
for (0..entries) |_| {
- const index = try std.leb.readUleb128(u32, reader);
- const name_len = try std.leb.readUleb128(u32, reader);
- if (reader.context.pos + name_len > data.len) return error.UnexpectedEndOfStream;
- const name = data[reader.context.pos..][0..name_len];
- reader.context.pos += name.len;
-
- try writer.print(
+ const index = try br.takeLeb128(u32);
+ const name = try br.take(try br.takeLeb128(u32));
+ try bw.print(
\\index {d}
\\name {s}
\\
, .{ index, name });
}
+ if (subsection_br.seek != subsection_br.buffer.len) return error.BadSubsectionSize;
},
// The local name subsection ... consists of an indirect name
@@ -2752,52 +2637,49 @@ const WasmDumper = struct {
}
}
- fn parseDumpProducers(reader: anytype, writer: anytype, data: []const u8) !void {
- const field_count = try std.leb.readUleb128(u32, reader);
- try writer.print("fields {d}\n", .{field_count});
+ fn parseDumpProducers(br: *std.io.Reader, bw: *Writer) !void {
+ const field_count = try br.takeLeb128(u32);
+ try bw.print(
+ \\fields {d}
+ \\
+ , .{field_count});
var current_field: u32 = 0;
while (current_field < field_count) : (current_field += 1) {
- const field_name_length = try std.leb.readUleb128(u32, reader);
- const field_name = data[reader.context.pos..][0..field_name_length];
- reader.context.pos += field_name_length;
-
- const value_count = try std.leb.readUleb128(u32, reader);
- try writer.print(
+ const field_name = try br.take(try br.takeLeb128(u32));
+ const value_count = try br.takeLeb128(u32);
+ try bw.print(
\\field_name {s}
\\values {d}
+ \\
, .{ field_name, value_count });
- try writer.writeByte('\n');
var current_value: u32 = 0;
while (current_value < value_count) : (current_value += 1) {
- const value_length = try std.leb.readUleb128(u32, reader);
- const value = data[reader.context.pos..][0..value_length];
- reader.context.pos += value_length;
-
- const version_length = try std.leb.readUleb128(u32, reader);
- const version = data[reader.context.pos..][0..version_length];
- reader.context.pos += version_length;
-
- try writer.print(
+ const value = try br.take(try br.takeLeb128(u32));
+ const version = try br.take(try br.takeLeb128(u32));
+ try bw.print(
\\value_name {s}
\\version {s}
+ \\
, .{ value, version });
- try writer.writeByte('\n');
}
}
}
- fn parseDumpFeatures(reader: anytype, writer: anytype, data: []const u8) !void {
- const feature_count = try std.leb.readUleb128(u32, reader);
- try writer.print("features {d}\n", .{feature_count});
+ fn parseDumpFeatures(br: *std.io.Reader, bw: *Writer) !void {
+ const feature_count = try br.takeLeb128(u32);
+ try bw.print(
+ \\features {d}
+ \\
+ , .{feature_count});
var index: u32 = 0;
while (index < feature_count) : (index += 1) {
- const prefix_byte = try std.leb.readUleb128(u8, reader);
- const name_length = try std.leb.readUleb128(u32, reader);
- const feature_name = data[reader.context.pos..][0..name_length];
- reader.context.pos += name_length;
-
- try writer.print("{c} {s}\n", .{ prefix_byte, feature_name });
+ const prefix_byte = try br.takeLeb128(u8);
+ const feature_name = try br.take(try br.takeLeb128(u32));
+ try bw.print(
+ \\{c} {s}
+ \\
+ , .{ prefix_byte, feature_name });
}
}
};
lib/std/Build/Step/Compile.zig
@@ -1542,7 +1542,7 @@ fn getZigArgs(compile: *Compile, fuzz: bool) ![][]const u8 {
if (compile.kind == .lib and compile.linkage != null and compile.linkage.? == .dynamic) {
if (compile.version) |version| {
try zig_args.append("--version");
- try zig_args.append(b.fmt("{}", .{version}));
+ try zig_args.append(b.fmt("{f}", .{version}));
}
if (compile.rootModuleTarget().os.tag.isDarwin()) {
@@ -1696,9 +1696,7 @@ fn getZigArgs(compile: *Compile, fuzz: bool) ![][]const u8 {
if (compile.build_id orelse b.build_id) |build_id| {
try zig_args.append(switch (build_id) {
- .hexstring => |hs| b.fmt("--build-id=0x{s}", .{
- std.fmt.fmtSliceHexLower(hs.toSlice()),
- }),
+ .hexstring => |hs| b.fmt("--build-id=0x{x}", .{hs.toSlice()}),
.none, .fast, .uuid, .sha1, .md5 => b.fmt("--build-id={s}", .{@tagName(build_id)}),
});
}
@@ -1706,7 +1704,7 @@ fn getZigArgs(compile: *Compile, fuzz: bool) ![][]const u8 {
const opt_zig_lib_dir = if (compile.zig_lib_dir) |dir|
dir.getPath2(b, step)
else if (b.graph.zig_lib_directory.path) |_|
- b.fmt("{}", .{b.graph.zig_lib_directory})
+ b.fmt("{f}", .{b.graph.zig_lib_directory})
else
null;
@@ -1746,8 +1744,7 @@ fn getZigArgs(compile: *Compile, fuzz: bool) ![][]const u8 {
}
if (compile.error_limit) |err_limit| try zig_args.appendSlice(&.{
- "--error-limit",
- b.fmt("{}", .{err_limit}),
+ "--error-limit", b.fmt("{d}", .{err_limit}),
});
try addFlag(&zig_args, "incremental", b.graph.incremental);
@@ -1793,11 +1790,7 @@ fn getZigArgs(compile: *Compile, fuzz: bool) ![][]const u8 {
var args_hash: [Sha256.digest_length]u8 = undefined;
Sha256.hash(args, &args_hash, .{});
var args_hex_hash: [Sha256.digest_length * 2]u8 = undefined;
- _ = try std.fmt.bufPrint(
- &args_hex_hash,
- "{s}",
- .{std.fmt.fmtSliceHexLower(&args_hash)},
- );
+ _ = try std.fmt.bufPrint(&args_hex_hash, "{x}", .{&args_hash});
const args_file = "args" ++ fs.path.sep_str ++ args_hex_hash;
try b.cache_root.handle.writeFile(.{ .sub_path = args_file, .data = args });
@@ -1836,7 +1829,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
// Update generated files
if (maybe_output_dir) |output_dir| {
if (compile.emit_directory) |lp| {
- lp.path = b.fmt("{}", .{output_dir});
+ lp.path = b.fmt("{f}", .{output_dir});
}
// zig fmt: off
lib/std/Build/Step/ConfigHeader.zig
@@ -2,6 +2,7 @@ const std = @import("std");
const ConfigHeader = @This();
const Step = std.Build.Step;
const Allocator = std.mem.Allocator;
+const Writer = std.io.Writer;
pub const Style = union(enum) {
/// A configure format supported by autotools that uses `#undef foo` to
@@ -87,7 +88,7 @@ pub fn create(owner: *std.Build, options: Options) *ConfigHeader {
owner.fmt("configure {s} header to {s}", .{ @tagName(options.style), include_path });
config_header.* = .{
- .step = Step.init(.{
+ .step = .init(.{
.id = base_id,
.name = name,
.owner = owner,
@@ -95,7 +96,7 @@ pub fn create(owner: *std.Build, options: Options) *ConfigHeader {
.first_ret_addr = options.first_ret_addr orelse @returnAddress(),
}),
.style = options.style,
- .values = std.StringArrayHashMap(Value).init(owner.allocator),
+ .values = .init(owner.allocator),
.max_bytes = options.max_bytes,
.include_path = include_path,
@@ -195,8 +196,9 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
man.hash.addBytes(config_header.include_path);
man.hash.addOptionalBytes(config_header.include_guard_override);
- var output = std.ArrayList(u8).init(gpa);
- defer output.deinit();
+ var aw: std.io.Writer.Allocating = .init(gpa);
+ defer aw.deinit();
+ const bw = &aw.interface;
const header_text = "This file was generated by ConfigHeader using the Zig Build System.";
const c_generated_line = "/* " ++ header_text ++ " */\n";
@@ -204,7 +206,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
switch (config_header.style) {
.autoconf_undef, .autoconf, .autoconf_at => |file_source| {
- try output.appendSlice(c_generated_line);
+ try bw.writeAll(c_generated_line);
const src_path = file_source.getPath2(b, step);
const contents = std.fs.cwd().readFileAlloc(arena, src_path, config_header.max_bytes) catch |err| {
return step.fail("unable to read autoconf input file '{s}': {s}", .{
@@ -212,32 +214,33 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
});
};
switch (config_header.style) {
- .autoconf_undef, .autoconf => try render_autoconf_undef(step, contents, &output, config_header.values, src_path),
- .autoconf_at => try render_autoconf_at(step, contents, &output, config_header.values, src_path),
+ .autoconf_undef, .autoconf => try render_autoconf_undef(step, contents, bw, config_header.values, src_path),
+ .autoconf_at => try render_autoconf_at(step, contents, &aw, config_header.values, src_path),
else => unreachable,
}
},
.cmake => |file_source| {
- try output.appendSlice(c_generated_line);
+ try bw.writeAll(c_generated_line);
const src_path = file_source.getPath2(b, step);
const contents = std.fs.cwd().readFileAlloc(arena, src_path, config_header.max_bytes) catch |err| {
return step.fail("unable to read cmake input file '{s}': {s}", .{
src_path, @errorName(err),
});
};
- try render_cmake(step, contents, &output, config_header.values, src_path);
+ try render_cmake(step, contents, bw, config_header.values, src_path);
},
.blank => {
- try output.appendSlice(c_generated_line);
- try render_blank(&output, config_header.values, config_header.include_path, config_header.include_guard_override);
+ try bw.writeAll(c_generated_line);
+ try render_blank(gpa, bw, config_header.values, config_header.include_path, config_header.include_guard_override);
},
.nasm => {
- try output.appendSlice(asm_generated_line);
- try render_nasm(&output, config_header.values);
+ try bw.writeAll(asm_generated_line);
+ try render_nasm(bw, config_header.values);
},
}
- man.hash.addBytes(output.items);
+ const output = aw.getWritten();
+ man.hash.addBytes(output);
if (try step.cacheHit(&man)) {
const digest = man.final();
@@ -256,13 +259,13 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
const sub_path_dirname = std.fs.path.dirname(sub_path).?;
b.cache_root.handle.makePath(sub_path_dirname) catch |err| {
- return step.fail("unable to make path '{}{s}': {s}", .{
+ return step.fail("unable to make path '{f}{s}': {s}", .{
b.cache_root, sub_path_dirname, @errorName(err),
});
};
- b.cache_root.handle.writeFile(.{ .sub_path = sub_path, .data = output.items }) catch |err| {
- return step.fail("unable to write file '{}{s}': {s}", .{
+ b.cache_root.handle.writeFile(.{ .sub_path = sub_path, .data = output }) catch |err| {
+ return step.fail("unable to write file '{f}{s}': {s}", .{
b.cache_root, sub_path, @errorName(err),
});
};
@@ -274,7 +277,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
fn render_autoconf_undef(
step: *Step,
contents: []const u8,
- output: *std.ArrayList(u8),
+ bw: *Writer,
values: std.StringArrayHashMap(Value),
src_path: []const u8,
) !void {
@@ -289,15 +292,15 @@ fn render_autoconf_undef(
var line_it = std.mem.splitScalar(u8, contents, '\n');
while (line_it.next()) |line| : (line_index += 1) {
if (!std.mem.startsWith(u8, line, "#")) {
- try output.appendSlice(line);
- try output.appendSlice("\n");
+ try bw.writeAll(line);
+ try bw.writeByte('\n');
continue;
}
var it = std.mem.tokenizeAny(u8, line[1..], " \t\r");
const undef = it.next().?;
if (!std.mem.eql(u8, undef, "undef")) {
- try output.appendSlice(line);
- try output.appendSlice("\n");
+ try bw.writeAll(line);
+ try bw.writeByte('\n');
continue;
}
const name = it.next().?;
@@ -309,7 +312,7 @@ fn render_autoconf_undef(
continue;
};
is_used.set(index);
- try renderValueC(output, name, values.values()[index]);
+ try renderValueC(bw, name, values.values()[index]);
}
var unused_value_it = is_used.iterator(.{ .kind = .unset });
@@ -326,12 +329,13 @@ fn render_autoconf_undef(
fn render_autoconf_at(
step: *Step,
contents: []const u8,
- output: *std.ArrayList(u8),
+ aw: *std.io.Writer.Allocating,
values: std.StringArrayHashMap(Value),
src_path: []const u8,
) !void {
const build = step.owner;
const allocator = build.allocator;
+ const bw = &aw.interface;
const used = allocator.alloc(bool, values.count()) catch @panic("OOM");
for (used) |*u| u.* = false;
@@ -343,11 +347,11 @@ fn render_autoconf_at(
while (line_it.next()) |line| : (line_index += 1) {
const last_line = line_it.index == line_it.buffer.len;
- const old_len = output.items.len;
- expand_variables_autoconf_at(output, line, values, used) catch |err| switch (err) {
+ const old_len = aw.getWritten().len;
+ expand_variables_autoconf_at(bw, line, values, used) catch |err| switch (err) {
error.MissingValue => {
- const name = output.items[old_len..];
- defer output.shrinkRetainingCapacity(old_len);
+ const name = aw.getWritten()[old_len..];
+ defer aw.shrinkRetainingCapacity(old_len);
try step.addError("{s}:{d}: error: unspecified config header value: '{s}'", .{
src_path, line_index + 1, name,
});
@@ -362,9 +366,7 @@ fn render_autoconf_at(
continue;
},
};
- if (!last_line) {
- try output.append('\n');
- }
+ if (!last_line) try bw.writeByte('\n');
}
for (values.unmanaged.entries.slice().items(.key), used) |name, u| {
@@ -374,15 +376,13 @@ fn render_autoconf_at(
}
}
- if (any_errors) {
- return error.MakeFailed;
- }
+ if (any_errors) return error.MakeFailed;
}
fn render_cmake(
step: *Step,
contents: []const u8,
- output: *std.ArrayList(u8),
+ bw: *Writer,
values: std.StringArrayHashMap(Value),
src_path: []const u8,
) !void {
@@ -417,10 +417,8 @@ fn render_cmake(
defer allocator.free(line);
if (!std.mem.startsWith(u8, line, "#")) {
- try output.appendSlice(line);
- if (!last_line) {
- try output.appendSlice("\n");
- }
+ try bw.writeAll(line);
+ if (!last_line) try bw.writeByte('\n');
continue;
}
var it = std.mem.tokenizeAny(u8, line[1..], " \t\r");
@@ -428,10 +426,8 @@ fn render_cmake(
if (!std.mem.eql(u8, cmakedefine, "cmakedefine") and
!std.mem.eql(u8, cmakedefine, "cmakedefine01"))
{
- try output.appendSlice(line);
- if (!last_line) {
- try output.appendSlice("\n");
- }
+ try bw.writeAll(line);
+ if (!last_line) try bw.writeByte('\n');
continue;
}
@@ -502,7 +498,7 @@ fn render_cmake(
value = Value{ .ident = it.rest() };
}
- try renderValueC(output, name, value);
+ try renderValueC(bw, name, value);
}
if (any_errors) {
@@ -511,13 +507,14 @@ fn render_cmake(
}
fn render_blank(
- output: *std.ArrayList(u8),
+ gpa: std.mem.Allocator,
+ bw: *Writer,
defines: std.StringArrayHashMap(Value),
include_path: []const u8,
include_guard_override: ?[]const u8,
) !void {
const include_guard_name = include_guard_override orelse blk: {
- const name = try output.allocator.dupe(u8, include_path);
+ const name = try gpa.dupe(u8, include_path);
for (name) |*byte| {
switch (byte.*) {
'a'...'z' => byte.* = byte.* - 'a' + 'A',
@@ -527,92 +524,53 @@ fn render_blank(
}
break :blk name;
};
+ defer if (include_guard_override == null) gpa.free(include_guard_name);
- try output.appendSlice("#ifndef ");
- try output.appendSlice(include_guard_name);
- try output.appendSlice("\n#define ");
- try output.appendSlice(include_guard_name);
- try output.appendSlice("\n");
+ try bw.print(
+ \\#ifndef {[0]s}
+ \\#define {[0]s}
+ \\
+ , .{include_guard_name});
const values = defines.values();
- for (defines.keys(), 0..) |name, i| {
- try renderValueC(output, name, values[i]);
- }
+ for (defines.keys(), 0..) |name, i| try renderValueC(bw, name, values[i]);
- try output.appendSlice("#endif /* ");
- try output.appendSlice(include_guard_name);
- try output.appendSlice(" */\n");
+ try bw.print(
+ \\#endif /* {s} */
+ \\
+ , .{include_guard_name});
}
-fn render_nasm(output: *std.ArrayList(u8), defines: std.StringArrayHashMap(Value)) !void {
- const values = defines.values();
- for (defines.keys(), 0..) |name, i| {
- try renderValueNasm(output, name, values[i]);
- }
+fn render_nasm(bw: *Writer, defines: std.StringArrayHashMap(Value)) !void {
+ for (defines.keys(), defines.values()) |name, value| try renderValueNasm(bw, name, value);
}
-fn renderValueC(output: *std.ArrayList(u8), name: []const u8, value: Value) !void {
+fn renderValueC(bw: *Writer, name: []const u8, value: Value) !void {
switch (value) {
- .undef => {
- try output.appendSlice("/* #undef ");
- try output.appendSlice(name);
- try output.appendSlice(" */\n");
- },
- .defined => {
- try output.appendSlice("#define ");
- try output.appendSlice(name);
- try output.appendSlice("\n");
- },
- .boolean => |b| {
- try output.appendSlice("#define ");
- try output.appendSlice(name);
- try output.appendSlice(if (b) " 1\n" else " 0\n");
- },
- .int => |i| {
- try output.writer().print("#define {s} {d}\n", .{ name, i });
- },
- .ident => |ident| {
- try output.writer().print("#define {s} {s}\n", .{ name, ident });
- },
- .string => |string| {
- // TODO: use C-specific escaping instead of zig string literals
- try output.writer().print("#define {s} \"{}\"\n", .{ name, std.zig.fmtEscapes(string) });
- },
+ .undef => try bw.print("/* #undef {s} */\n", .{name}),
+ .defined => try bw.print("#define {s}\n", .{name}),
+ .boolean => |b| try bw.print("#define {s} {c}\n", .{ name, @as(u8, '0') + @intFromBool(b) }),
+ .int => |i| try bw.print("#define {s} {d}\n", .{ name, i }),
+ .ident => |ident| try bw.print("#define {s} {s}\n", .{ name, ident }),
+ // TODO: use C-specific escaping instead of zig string literals
+ .string => |string| try bw.print("#define {s} \"{f}\"\n", .{ name, std.zig.fmtString(string) }),
}
}
-fn renderValueNasm(output: *std.ArrayList(u8), name: []const u8, value: Value) !void {
+fn renderValueNasm(bw: *Writer, name: []const u8, value: Value) !void {
switch (value) {
- .undef => {
- try output.appendSlice("; %undef ");
- try output.appendSlice(name);
- try output.appendSlice("\n");
- },
- .defined => {
- try output.appendSlice("%define ");
- try output.appendSlice(name);
- try output.appendSlice("\n");
- },
- .boolean => |b| {
- try output.appendSlice("%define ");
- try output.appendSlice(name);
- try output.appendSlice(if (b) " 1\n" else " 0\n");
- },
- .int => |i| {
- try output.writer().print("%define {s} {d}\n", .{ name, i });
- },
- .ident => |ident| {
- try output.writer().print("%define {s} {s}\n", .{ name, ident });
- },
- .string => |string| {
- // TODO: use nasm-specific escaping instead of zig string literals
- try output.writer().print("%define {s} \"{}\"\n", .{ name, std.zig.fmtEscapes(string) });
- },
+ .undef => try bw.print("; %undef {s}\n", .{name}),
+ .defined => try bw.print("%define {s}\n", .{name}),
+ .boolean => |b| try bw.print("%define {s} {c}\n", .{ name, @as(u8, '0') + @intFromBool(b) }),
+ .int => |i| try bw.print("%define {s} {d}\n", .{ name, i }),
+ .ident => |ident| try bw.print("%define {s} {s}\n", .{ name, ident }),
+ // TODO: use nasm-specific escaping instead of zig string literals
+ .string => |string| try bw.print("%define {s} \"{f}\"\n", .{ name, std.zig.fmtString(string) }),
}
}
fn expand_variables_autoconf_at(
- output: *std.ArrayList(u8),
+ bw: *Writer,
contents: []const u8,
values: std.StringArrayHashMap(Value),
used: []bool,
@@ -637,23 +595,17 @@ fn expand_variables_autoconf_at(
const key = contents[curr + 1 .. close_pos];
const index = values.getIndex(key) orelse {
// Report the missing key to the caller.
- try output.appendSlice(key);
+ try bw.writeAll(key);
return error.MissingValue;
};
const value = values.unmanaged.entries.slice().items(.value)[index];
used[index] = true;
- try output.appendSlice(contents[source_offset..curr]);
+ try bw.writeAll(contents[source_offset..curr]);
switch (value) {
.undef, .defined => {},
- .boolean => |b| {
- try output.append(if (b) '1' else '0');
- },
- .int => |i| {
- try output.writer().print("{d}", .{i});
- },
- .ident, .string => |s| {
- try output.appendSlice(s);
- },
+ .boolean => |b| try bw.writeByte(@as(u8, '0') + @intFromBool(b)),
+ .int => |i| try bw.print("{d}", .{i}),
+ .ident, .string => |s| try bw.writeAll(s),
}
curr = close_pos;
@@ -661,7 +613,7 @@ fn expand_variables_autoconf_at(
}
}
- try output.appendSlice(contents[source_offset..]);
+ try bw.writeAll(contents[source_offset..]);
}
fn expand_variables_cmake(
@@ -669,7 +621,7 @@ fn expand_variables_cmake(
contents: []const u8,
values: std.StringArrayHashMap(Value),
) ![]const u8 {
- var result = std.ArrayList(u8).init(allocator);
+ var result: std.ArrayList(u8) = .init(allocator);
errdefer result.deinit();
const valid_varname_chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789/_.+-";
@@ -681,7 +633,7 @@ fn expand_variables_cmake(
source: usize,
target: usize,
};
- var var_stack = std.ArrayList(Position).init(allocator);
+ var var_stack: std.ArrayList(Position) = .init(allocator);
defer var_stack.deinit();
loop: while (curr < contents.len) : (curr += 1) {
switch (contents[curr]) {
@@ -707,7 +659,7 @@ fn expand_variables_cmake(
try result.append(if (b) '1' else '0');
},
.int => |i| {
- try result.writer().print("{d}", .{i});
+ try result.print("{d}", .{i});
},
.ident, .string => |s| {
try result.appendSlice(s);
@@ -764,7 +716,7 @@ fn expand_variables_cmake(
try result.append(if (b) '1' else '0');
},
.int => |i| {
- try result.writer().print("{d}", .{i});
+ try result.print("{d}", .{i});
},
.ident, .string => |s| {
try result.appendSlice(s);
@@ -801,17 +753,17 @@ fn testReplaceVariablesAutoconfAt(
expected: []const u8,
values: std.StringArrayHashMap(Value),
) !void {
- var output = std.ArrayList(u8).init(allocator);
+ var output: std.io.Writer.Allocating = .init(allocator);
defer output.deinit();
const used = try allocator.alloc(bool, values.count());
for (used) |*u| u.* = false;
defer allocator.free(used);
- try expand_variables_autoconf_at(&output, contents, values, used);
+ try expand_variables_autoconf_at(&output.interface, contents, values, used);
for (used) |u| if (!u) return error.UnusedValue;
- try std.testing.expectEqualStrings(expected, output.items);
+ try std.testing.expectEqualStrings(expected, output.getWritten());
}
fn testReplaceVariablesCMake(
@@ -828,7 +780,7 @@ fn testReplaceVariablesCMake(
test "expand_variables_autoconf_at simple cases" {
const allocator = std.testing.allocator;
- var values = std.StringArrayHashMap(Value).init(allocator);
+ var values: std.StringArrayHashMap(Value) = .init(allocator);
defer values.deinit();
// empty strings are preserved
@@ -924,7 +876,7 @@ test "expand_variables_autoconf_at simple cases" {
test "expand_variables_autoconf_at edge cases" {
const allocator = std.testing.allocator;
- var values = std.StringArrayHashMap(Value).init(allocator);
+ var values: std.StringArrayHashMap(Value) = .init(allocator);
defer values.deinit();
// @-vars resolved only when they wrap valid characters, otherwise considered literals
@@ -940,7 +892,7 @@ test "expand_variables_autoconf_at edge cases" {
test "expand_variables_cmake simple cases" {
const allocator = std.testing.allocator;
- var values = std.StringArrayHashMap(Value).init(allocator);
+ var values: std.StringArrayHashMap(Value) = .init(allocator);
defer values.deinit();
try values.putNoClobber("undef", .undef);
@@ -1028,7 +980,7 @@ test "expand_variables_cmake simple cases" {
test "expand_variables_cmake edge cases" {
const allocator = std.testing.allocator;
- var values = std.StringArrayHashMap(Value).init(allocator);
+ var values: std.StringArrayHashMap(Value) = .init(allocator);
defer values.deinit();
// special symbols
@@ -1089,7 +1041,7 @@ test "expand_variables_cmake edge cases" {
test "expand_variables_cmake escaped characters" {
const allocator = std.testing.allocator;
- var values = std.StringArrayHashMap(Value).init(allocator);
+ var values: std.StringArrayHashMap(Value) = .init(allocator);
defer values.deinit();
try values.putNoClobber("string", Value{ .string = "text" });
lib/std/Build/Step/InstallArtifact.zig
@@ -164,7 +164,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
const full_h_prefix = b.getInstallPath(h_dir, dir.dest_rel_path);
var src_dir = src_dir_path.root_dir.handle.openDir(src_dir_path.subPathOrDot(), .{ .iterate = true }) catch |err| {
- return step.fail("unable to open source directory '{}': {s}", .{
+ return step.fail("unable to open source directory '{f}': {s}", .{
src_dir_path, @errorName(err),
});
};
lib/std/Build/Step/InstallDir.zig
@@ -65,7 +65,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
const src_dir_path = install_dir.options.source_dir.getPath3(b, step);
const need_derived_inputs = try step.addDirectoryWatchInput(install_dir.options.source_dir);
var src_dir = src_dir_path.root_dir.handle.openDir(src_dir_path.subPathOrDot(), .{ .iterate = true }) catch |err| {
- return step.fail("unable to open source directory '{}': {s}", .{
+ return step.fail("unable to open source directory '{f}': {s}", .{
src_dir_path, @errorName(err),
});
};
lib/std/Build/Step/Options.zig
@@ -12,23 +12,23 @@ pub const base_id: Step.Id = .options;
step: Step,
generated_file: GeneratedFile,
-contents: std.ArrayList(u8),
-args: std.ArrayList(Arg),
-encountered_types: std.StringHashMap(void),
+contents: std.ArrayListUnmanaged(u8),
+args: std.ArrayListUnmanaged(Arg),
+encountered_types: std.StringHashMapUnmanaged(void),
pub fn create(owner: *std.Build) *Options {
const options = owner.allocator.create(Options) catch @panic("OOM");
options.* = .{
- .step = Step.init(.{
+ .step = .init(.{
.id = base_id,
.name = "options",
.owner = owner,
.makeFn = make,
}),
.generated_file = undefined,
- .contents = std.ArrayList(u8).init(owner.allocator),
- .args = std.ArrayList(Arg).init(owner.allocator),
- .encountered_types = std.StringHashMap(void).init(owner.allocator),
+ .contents = .empty,
+ .args = .empty,
+ .encountered_types = .empty,
};
options.generated_file = .{ .step = &options.step };
@@ -40,110 +40,119 @@ pub fn addOption(options: *Options, comptime T: type, name: []const u8, value: T
}
fn addOptionFallible(options: *Options, comptime T: type, name: []const u8, value: T) !void {
- const out = options.contents.writer();
- try printType(options, out, T, value, 0, name);
+ try printType(options, &options.contents, T, value, 0, name);
}
-fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent: u8, name: ?[]const u8) !void {
+fn printType(
+ options: *Options,
+ out: *std.ArrayListUnmanaged(u8),
+ comptime T: type,
+ value: T,
+ indent: u8,
+ name: ?[]const u8,
+) !void {
+ const gpa = options.step.owner.allocator;
switch (T) {
[]const []const u8 => {
if (name) |payload| {
- try out.print("pub const {}: []const []const u8 = ", .{std.zig.fmtId(payload)});
+ try out.print(gpa, "pub const {f}: []const []const u8 = ", .{std.zig.fmtId(payload)});
}
- try out.writeAll("&[_][]const u8{\n");
+ try out.appendSlice(gpa, "&[_][]const u8{\n");
for (value) |slice| {
- try out.writeByteNTimes(' ', indent);
- try out.print(" \"{}\",\n", .{std.zig.fmtEscapes(slice)});
+ try out.appendNTimes(gpa, ' ', indent);
+ try out.print(gpa, " \"{f}\",\n", .{std.zig.fmtString(slice)});
}
if (name != null) {
- try out.writeAll("};\n");
+ try out.appendSlice(gpa, "};\n");
} else {
- try out.writeAll("},\n");
+ try out.appendSlice(gpa, "},\n");
}
return;
},
[]const u8 => {
if (name) |some| {
- try out.print("pub const {}: []const u8 = \"{}\";", .{ std.zig.fmtId(some), std.zig.fmtEscapes(value) });
+ try out.print(gpa, "pub const {f}: []const u8 = \"{f}\";", .{
+ std.zig.fmtId(some), std.zig.fmtString(value),
+ });
} else {
- try out.print("\"{}\",", .{std.zig.fmtEscapes(value)});
+ try out.print(gpa, "\"{f}\",", .{std.zig.fmtString(value)});
}
- return out.writeAll("\n");
+ return out.appendSlice(gpa, "\n");
},
[:0]const u8 => {
if (name) |some| {
- try out.print("pub const {}: [:0]const u8 = \"{}\";", .{ std.zig.fmtId(some), std.zig.fmtEscapes(value) });
+ try out.print(gpa, "pub const {f}: [:0]const u8 = \"{f}\";", .{ std.zig.fmtId(some), std.zig.fmtString(value) });
} else {
- try out.print("\"{}\",", .{std.zig.fmtEscapes(value)});
+ try out.print(gpa, "\"{f}\",", .{std.zig.fmtString(value)});
}
- return out.writeAll("\n");
+ return out.appendSlice(gpa, "\n");
},
?[]const u8 => {
if (name) |some| {
- try out.print("pub const {}: ?[]const u8 = ", .{std.zig.fmtId(some)});
+ try out.print(gpa, "pub const {f}: ?[]const u8 = ", .{std.zig.fmtId(some)});
}
if (value) |payload| {
- try out.print("\"{}\"", .{std.zig.fmtEscapes(payload)});
+ try out.print(gpa, "\"{f}\"", .{std.zig.fmtString(payload)});
} else {
- try out.writeAll("null");
+ try out.appendSlice(gpa, "null");
}
if (name != null) {
- try out.writeAll(";\n");
+ try out.appendSlice(gpa, ";\n");
} else {
- try out.writeAll(",\n");
+ try out.appendSlice(gpa, ",\n");
}
return;
},
?[:0]const u8 => {
if (name) |some| {
- try out.print("pub const {}: ?[:0]const u8 = ", .{std.zig.fmtId(some)});
+ try out.print(gpa, "pub const {f}: ?[:0]const u8 = ", .{std.zig.fmtId(some)});
}
if (value) |payload| {
- try out.print("\"{}\"", .{std.zig.fmtEscapes(payload)});
+ try out.print(gpa, "\"{f}\"", .{std.zig.fmtString(payload)});
} else {
- try out.writeAll("null");
+ try out.appendSlice(gpa, "null");
}
if (name != null) {
- try out.writeAll(";\n");
+ try out.appendSlice(gpa, ";\n");
} else {
- try out.writeAll(",\n");
+ try out.appendSlice(gpa, ",\n");
}
return;
},
std.SemanticVersion => {
if (name) |some| {
- try out.print("pub const {}: @import(\"std\").SemanticVersion = ", .{std.zig.fmtId(some)});
+ try out.print(gpa, "pub const {f}: @import(\"std\").SemanticVersion = ", .{std.zig.fmtId(some)});
}
- try out.writeAll(".{\n");
- try out.writeByteNTimes(' ', indent);
- try out.print(" .major = {d},\n", .{value.major});
- try out.writeByteNTimes(' ', indent);
- try out.print(" .minor = {d},\n", .{value.minor});
- try out.writeByteNTimes(' ', indent);
- try out.print(" .patch = {d},\n", .{value.patch});
+ try out.appendSlice(gpa, ".{\n");
+ try out.appendNTimes(gpa, ' ', indent);
+ try out.print(gpa, " .major = {d},\n", .{value.major});
+ try out.appendNTimes(gpa, ' ', indent);
+ try out.print(gpa, " .minor = {d},\n", .{value.minor});
+ try out.appendNTimes(gpa, ' ', indent);
+ try out.print(gpa, " .patch = {d},\n", .{value.patch});
if (value.pre) |some| {
- try out.writeByteNTimes(' ', indent);
- try out.print(" .pre = \"{}\",\n", .{std.zig.fmtEscapes(some)});
+ try out.appendNTimes(gpa, ' ', indent);
+ try out.print(gpa, " .pre = \"{f}\",\n", .{std.zig.fmtString(some)});
}
if (value.build) |some| {
- try out.writeByteNTimes(' ', indent);
- try out.print(" .build = \"{}\",\n", .{std.zig.fmtEscapes(some)});
+ try out.appendNTimes(gpa, ' ', indent);
+ try out.print(gpa, " .build = \"{f}\",\n", .{std.zig.fmtString(some)});
}
if (name != null) {
- try out.writeAll("};\n");
+ try out.appendSlice(gpa, "};\n");
} else {
- try out.writeAll("},\n");
+ try out.appendSlice(gpa, "},\n");
}
return;
},
@@ -153,21 +162,21 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent
switch (@typeInfo(T)) {
.array => {
if (name) |some| {
- try out.print("pub const {}: {s} = ", .{ std.zig.fmtId(some), @typeName(T) });
+ try out.print(gpa, "pub const {f}: {s} = ", .{ std.zig.fmtId(some), @typeName(T) });
}
- try out.print("{s} {{\n", .{@typeName(T)});
+ try out.print(gpa, "{s} {{\n", .{@typeName(T)});
for (value) |item| {
- try out.writeByteNTimes(' ', indent + 4);
+ try out.appendNTimes(gpa, ' ', indent + 4);
try printType(options, out, @TypeOf(item), item, indent + 4, null);
}
- try out.writeByteNTimes(' ', indent);
- try out.writeAll("}");
+ try out.appendNTimes(gpa, ' ', indent);
+ try out.appendSlice(gpa, "}");
if (name != null) {
- try out.writeAll(";\n");
+ try out.appendSlice(gpa, ";\n");
} else {
- try out.writeAll(",\n");
+ try out.appendSlice(gpa, ",\n");
}
return;
},
@@ -177,27 +186,27 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent
}
if (name) |some| {
- try out.print("pub const {}: {s} = ", .{ std.zig.fmtId(some), @typeName(T) });
+ try out.print(gpa, "pub const {f}: {s} = ", .{ std.zig.fmtId(some), @typeName(T) });
}
- try out.print("&[_]{s} {{\n", .{@typeName(p.child)});
+ try out.print(gpa, "&[_]{s} {{\n", .{@typeName(p.child)});
for (value) |item| {
- try out.writeByteNTimes(' ', indent + 4);
+ try out.appendNTimes(gpa, ' ', indent + 4);
try printType(options, out, @TypeOf(item), item, indent + 4, null);
}
- try out.writeByteNTimes(' ', indent);
- try out.writeAll("}");
+ try out.appendNTimes(gpa, ' ', indent);
+ try out.appendSlice(gpa, "}");
if (name != null) {
- try out.writeAll(";\n");
+ try out.appendSlice(gpa, ";\n");
} else {
- try out.writeAll(",\n");
+ try out.appendSlice(gpa, ",\n");
}
return;
},
.optional => {
if (name) |some| {
- try out.print("pub const {}: {s} = ", .{ std.zig.fmtId(some), @typeName(T) });
+ try out.print(gpa, "pub const {f}: {s} = ", .{ std.zig.fmtId(some), @typeName(T) });
}
if (value) |inner| {
@@ -206,13 +215,13 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent
_ = options.contents.pop();
_ = options.contents.pop();
} else {
- try out.writeAll("null");
+ try out.appendSlice(gpa, "null");
}
if (name != null) {
- try out.writeAll(";\n");
+ try out.appendSlice(gpa, ";\n");
} else {
- try out.writeAll(",\n");
+ try out.appendSlice(gpa, ",\n");
}
return;
},
@@ -224,9 +233,9 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent
.null,
=> {
if (name) |some| {
- try out.print("pub const {}: {s} = {any};\n", .{ std.zig.fmtId(some), @typeName(T), value });
+ try out.print(gpa, "pub const {f}: {s} = {any};\n", .{ std.zig.fmtId(some), @typeName(T), value });
} else {
- try out.print("{any},\n", .{value});
+ try out.print(gpa, "{any},\n", .{value});
}
return;
},
@@ -234,10 +243,10 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent
try printEnum(options, out, T, info, indent);
if (name) |some| {
- try out.print("pub const {}: {} = .{p_};\n", .{
+ try out.print(gpa, "pub const {f}: {f} = .{f};\n", .{
std.zig.fmtId(some),
std.zig.fmtId(@typeName(T)),
- std.zig.fmtId(@tagName(value)),
+ std.zig.fmtIdFlags(@tagName(value), .{ .allow_underscore = true, .allow_primitive = true }),
});
}
return;
@@ -246,7 +255,7 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent
try printStruct(options, out, T, info, indent);
if (name) |some| {
- try out.print("pub const {}: {} = ", .{
+ try out.print(gpa, "pub const {f}: {f} = ", .{
std.zig.fmtId(some),
std.zig.fmtId(@typeName(T)),
});
@@ -258,7 +267,7 @@ fn printType(options: *Options, out: anytype, comptime T: type, value: T, indent
}
}
-fn printUserDefinedType(options: *Options, out: anytype, comptime T: type, indent: u8) !void {
+fn printUserDefinedType(options: *Options, out: *std.ArrayListUnmanaged(u8), comptime T: type, indent: u8) !void {
switch (@typeInfo(T)) {
.@"enum" => |info| {
return try printEnum(options, out, T, info, indent);
@@ -270,94 +279,119 @@ fn printUserDefinedType(options: *Options, out: anytype, comptime T: type, inden
}
}
-fn printEnum(options: *Options, out: anytype, comptime T: type, comptime val: std.builtin.Type.Enum, indent: u8) !void {
- const gop = try options.encountered_types.getOrPut(@typeName(T));
+fn printEnum(
+ options: *Options,
+ out: *std.ArrayListUnmanaged(u8),
+ comptime T: type,
+ comptime val: std.builtin.Type.Enum,
+ indent: u8,
+) !void {
+ const gpa = options.step.owner.allocator;
+ const gop = try options.encountered_types.getOrPut(gpa, @typeName(T));
if (gop.found_existing) return;
- try out.writeByteNTimes(' ', indent);
- try out.print("pub const {} = enum ({s}) {{\n", .{ std.zig.fmtId(@typeName(T)), @typeName(val.tag_type) });
+ try out.appendNTimes(gpa, ' ', indent);
+ try out.print(gpa, "pub const {f} = enum ({s}) {{\n", .{ std.zig.fmtId(@typeName(T)), @typeName(val.tag_type) });
inline for (val.fields) |field| {
- try out.writeByteNTimes(' ', indent);
- try out.print(" {p} = {d},\n", .{ std.zig.fmtId(field.name), field.value });
+ try out.appendNTimes(gpa, ' ', indent);
+ try out.print(gpa, " {f} = {d},\n", .{
+ std.zig.fmtIdFlags(field.name, .{ .allow_primitive = true }), field.value,
+ });
}
if (!val.is_exhaustive) {
- try out.writeByteNTimes(' ', indent);
- try out.writeAll(" _,\n");
+ try out.appendNTimes(gpa, ' ', indent);
+ try out.appendSlice(gpa, " _,\n");
}
- try out.writeByteNTimes(' ', indent);
- try out.writeAll("};\n");
+ try out.appendNTimes(gpa, ' ', indent);
+ try out.appendSlice(gpa, "};\n");
}
-fn printStruct(options: *Options, out: anytype, comptime T: type, comptime val: std.builtin.Type.Struct, indent: u8) !void {
- const gop = try options.encountered_types.getOrPut(@typeName(T));
+fn printStruct(options: *Options, out: *std.ArrayListUnmanaged(u8), comptime T: type, comptime val: std.builtin.Type.Struct, indent: u8) !void {
+ const gpa = options.step.owner.allocator;
+ const gop = try options.encountered_types.getOrPut(gpa, @typeName(T));
if (gop.found_existing) return;
- try out.writeByteNTimes(' ', indent);
- try out.print("pub const {} = ", .{std.zig.fmtId(@typeName(T))});
+ try out.appendNTimes(gpa, ' ', indent);
+ try out.print(gpa, "pub const {f} = ", .{std.zig.fmtId(@typeName(T))});
switch (val.layout) {
- .@"extern" => try out.writeAll("extern struct"),
- .@"packed" => try out.writeAll("packed struct"),
- else => try out.writeAll("struct"),
+ .@"extern" => try out.appendSlice(gpa, "extern struct"),
+ .@"packed" => try out.appendSlice(gpa, "packed struct"),
+ else => try out.appendSlice(gpa, "struct"),
}
- try out.writeAll(" {\n");
+ try out.appendSlice(gpa, " {\n");
inline for (val.fields) |field| {
- try out.writeByteNTimes(' ', indent);
+ try out.appendNTimes(gpa, ' ', indent);
const type_name = @typeName(field.type);
// If the type name doesn't contains a '.' the type is from zig builtins.
if (std.mem.containsAtLeast(u8, type_name, 1, ".")) {
- try out.print(" {p_}: {}", .{ std.zig.fmtId(field.name), std.zig.fmtId(type_name) });
+ try out.print(gpa, " {f}: {f}", .{
+ std.zig.fmtIdFlags(field.name, .{ .allow_underscore = true, .allow_primitive = true }),
+ std.zig.fmtId(type_name),
+ });
} else {
- try out.print(" {p_}: {s}", .{ std.zig.fmtId(field.name), type_name });
+ try out.print(gpa, " {f}: {s}", .{
+ std.zig.fmtIdFlags(field.name, .{ .allow_underscore = true, .allow_primitive = true }),
+ type_name,
+ });
}
if (field.defaultValue()) |default_value| {
- try out.writeAll(" = ");
+ try out.appendSlice(gpa, " = ");
switch (@typeInfo(@TypeOf(default_value))) {
- .@"enum" => try out.print(".{s},\n", .{@tagName(default_value)}),
+ .@"enum" => try out.print(gpa, ".{s},\n", .{@tagName(default_value)}),
.@"struct" => |info| {
try printStructValue(options, out, info, default_value, indent + 4);
},
else => try printType(options, out, @TypeOf(default_value), default_value, indent, null),
}
} else {
- try out.writeAll(",\n");
+ try out.appendSlice(gpa, ",\n");
}
}
// TODO: write declarations
- try out.writeByteNTimes(' ', indent);
- try out.writeAll("};\n");
+ try out.appendNTimes(gpa, ' ', indent);
+ try out.appendSlice(gpa, "};\n");
inline for (val.fields) |field| {
try printUserDefinedType(options, out, field.type, 0);
}
}
-fn printStructValue(options: *Options, out: anytype, comptime struct_val: std.builtin.Type.Struct, val: anytype, indent: u8) !void {
- try out.writeAll(".{\n");
+fn printStructValue(
+ options: *Options,
+ out: *std.ArrayListUnmanaged(u8),
+ comptime struct_val: std.builtin.Type.Struct,
+ val: anytype,
+ indent: u8,
+) !void {
+ const gpa = options.step.owner.allocator;
+ try out.appendSlice(gpa, ".{\n");
if (struct_val.is_tuple) {
inline for (struct_val.fields) |field| {
- try out.writeByteNTimes(' ', indent);
+ try out.appendNTimes(gpa, ' ', indent);
try printType(options, out, @TypeOf(@field(val, field.name)), @field(val, field.name), indent, null);
}
} else {
inline for (struct_val.fields) |field| {
- try out.writeByteNTimes(' ', indent);
- try out.print(" .{p_} = ", .{std.zig.fmtId(field.name)});
+ try out.appendNTimes(gpa, ' ', indent);
+ try out.print(gpa, " .{f} = ", .{
+ std.zig.fmtIdFlags(field.name, .{ .allow_primitive = true, .allow_underscore = true }),
+ });
const field_name = @field(val, field.name);
switch (@typeInfo(@TypeOf(field_name))) {
- .@"enum" => try out.print(".{s},\n", .{@tagName(field_name)}),
+ .@"enum" => try out.print(gpa, ".{s},\n", .{@tagName(field_name)}),
.@"struct" => |struct_info| {
try printStructValue(options, out, struct_info, field_name, indent + 4);
},
@@ -367,10 +401,10 @@ fn printStructValue(options: *Options, out: anytype, comptime struct_val: std.bu
}
if (indent == 0) {
- try out.writeAll("};\n");
+ try out.appendSlice(gpa, "};\n");
} else {
- try out.writeByteNTimes(' ', indent);
- try out.writeAll("},\n");
+ try out.appendNTimes(gpa, ' ', indent);
+ try out.appendSlice(gpa, "},\n");
}
}
@@ -440,7 +474,7 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
error.FileNotFound => {
const sub_dirname = fs.path.dirname(sub_path).?;
b.cache_root.handle.makePath(sub_dirname) catch |e| {
- return step.fail("unable to make path '{}{s}': {s}", .{
+ return step.fail("unable to make path '{f}{s}': {s}", .{
b.cache_root, sub_dirname, @errorName(e),
});
};
@@ -452,13 +486,13 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
const tmp_sub_path_dirname = fs.path.dirname(tmp_sub_path).?;
b.cache_root.handle.makePath(tmp_sub_path_dirname) catch |err| {
- return step.fail("unable to make temporary directory '{}{s}': {s}", .{
+ return step.fail("unable to make temporary directory '{f}{s}': {s}", .{
b.cache_root, tmp_sub_path_dirname, @errorName(err),
});
};
b.cache_root.handle.writeFile(.{ .sub_path = tmp_sub_path, .data = options.contents.items }) catch |err| {
- return step.fail("unable to write options to '{}{s}': {s}", .{
+ return step.fail("unable to write options to '{f}{s}': {s}", .{
b.cache_root, tmp_sub_path, @errorName(err),
});
};
@@ -467,7 +501,7 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
error.PathAlreadyExists => {
// Other process beat us to it. Clean up the temp file.
b.cache_root.handle.deleteFile(tmp_sub_path) catch |e| {
- try step.addError("warning: unable to delete temp file '{}{s}': {s}", .{
+ try step.addError("warning: unable to delete temp file '{f}{s}': {s}", .{
b.cache_root, tmp_sub_path, @errorName(e),
});
};
@@ -475,7 +509,7 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
return;
},
else => {
- return step.fail("unable to rename options from '{}{s}' to '{}{s}': {s}", .{
+ return step.fail("unable to rename options from '{f}{s}' to '{f}{s}': {s}", .{
b.cache_root, tmp_sub_path,
b.cache_root, sub_path,
@errorName(err),
@@ -483,7 +517,7 @@ fn make(step: *Step, make_options: Step.MakeOptions) !void {
},
};
},
- else => |e| return step.fail("unable to access options file '{}{s}': {s}", .{
+ else => |e| return step.fail("unable to access options file '{f}{s}': {s}", .{
b.cache_root, sub_path, @errorName(e),
}),
}
@@ -643,5 +677,5 @@ test Options {
\\
, options.contents.items);
- _ = try std.zig.Ast.parse(arena.allocator(), try options.contents.toOwnedSliceSentinel(0), .zig);
+ _ = try std.zig.Ast.parse(arena.allocator(), try options.contents.toOwnedSliceSentinel(arena.allocator(), 0), .zig);
}
lib/std/Build/Step/Run.zig
@@ -832,7 +832,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
else => unreachable,
};
b.cache_root.handle.makePath(output_sub_dir_path) catch |err| {
- return step.fail("unable to make path '{}{s}': {s}", .{
+ return step.fail("unable to make path '{f}{s}': {s}", .{
b.cache_root, output_sub_dir_path, @errorName(err),
});
};
@@ -864,7 +864,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
else => unreachable,
};
b.cache_root.handle.makePath(output_sub_dir_path) catch |err| {
- return step.fail("unable to make path '{}{s}': {s}", .{
+ return step.fail("unable to make path '{f}{s}': {s}", .{
b.cache_root, output_sub_dir_path, @errorName(err),
});
};
@@ -903,21 +903,21 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
b.cache_root.handle.rename(tmp_dir_path, o_sub_path) catch |err| {
if (err == error.PathAlreadyExists) {
b.cache_root.handle.deleteTree(o_sub_path) catch |del_err| {
- return step.fail("unable to remove dir '{}'{s}: {s}", .{
+ return step.fail("unable to remove dir '{f}'{s}: {s}", .{
b.cache_root,
tmp_dir_path,
@errorName(del_err),
});
};
b.cache_root.handle.rename(tmp_dir_path, o_sub_path) catch |retry_err| {
- return step.fail("unable to rename dir '{}{s}' to '{}{s}': {s}", .{
+ return step.fail("unable to rename dir '{f}{s}' to '{f}{s}': {s}", .{
b.cache_root, tmp_dir_path,
b.cache_root, o_sub_path,
@errorName(retry_err),
});
};
} else {
- return step.fail("unable to rename dir '{}{s}' to '{}{s}': {s}", .{
+ return step.fail("unable to rename dir '{f}{s}' to '{f}{s}': {s}", .{
b.cache_root, tmp_dir_path,
b.cache_root, o_sub_path,
@errorName(err),
@@ -964,7 +964,7 @@ pub fn rerunInFuzzMode(
.artifact => |pa| {
const artifact = pa.artifact;
const file_path: []const u8 = p: {
- if (artifact == run.producer.?) break :p b.fmt("{}", .{run.rebuilt_executable.?});
+ if (artifact == run.producer.?) break :p b.fmt("{f}", .{run.rebuilt_executable.?});
break :p artifact.installed_path orelse artifact.generated_bin.?.path.?;
};
try argv_list.append(arena, b.fmt("{s}{s}", .{
@@ -1011,24 +1011,17 @@ fn populateGeneratedPaths(
}
}
-fn formatTerm(
- term: ?std.process.Child.Term,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
-) !void {
- _ = fmt;
- _ = options;
+fn formatTerm(term: ?std.process.Child.Term, w: *std.io.Writer) std.io.Writer.Error!void {
if (term) |t| switch (t) {
- .Exited => |code| try writer.print("exited with code {}", .{code}),
- .Signal => |sig| try writer.print("terminated with signal {}", .{sig}),
- .Stopped => |sig| try writer.print("stopped with signal {}", .{sig}),
- .Unknown => |code| try writer.print("terminated for unknown reason with code {}", .{code}),
+ .Exited => |code| try w.print("exited with code {d}", .{code}),
+ .Signal => |sig| try w.print("terminated with signal {d}", .{sig}),
+ .Stopped => |sig| try w.print("stopped with signal {d}", .{sig}),
+ .Unknown => |code| try w.print("terminated for unknown reason with code {d}", .{code}),
} else {
- try writer.writeAll("exited with any code");
+ try w.writeAll("exited with any code");
}
}
-fn fmtTerm(term: ?std.process.Child.Term) std.fmt.Formatter(formatTerm) {
+fn fmtTerm(term: ?std.process.Child.Term) std.fmt.Formatter(?std.process.Child.Term, formatTerm) {
return .{ .data = term };
}
@@ -1262,12 +1255,12 @@ fn runCommand(
const sub_path = b.pathJoin(&output_components);
const sub_path_dirname = fs.path.dirname(sub_path).?;
b.cache_root.handle.makePath(sub_path_dirname) catch |err| {
- return step.fail("unable to make path '{}{s}': {s}", .{
+ return step.fail("unable to make path '{f}{s}': {s}", .{
b.cache_root, sub_path_dirname, @errorName(err),
});
};
b.cache_root.handle.writeFile(.{ .sub_path = sub_path, .data = stream.bytes.? }) catch |err| {
- return step.fail("unable to write file '{}{s}': {s}", .{
+ return step.fail("unable to write file '{f}{s}': {s}", .{
b.cache_root, sub_path, @errorName(err),
});
};
@@ -1346,7 +1339,7 @@ fn runCommand(
},
.expect_term => |expected_term| {
if (!termMatches(expected_term, result.term)) {
- return step.fail("the following command {} (expected {}):\n{s}", .{
+ return step.fail("the following command {f} (expected {f}):\n{s}", .{
fmtTerm(result.term),
fmtTerm(expected_term),
try Step.allocPrintCmd(arena, cwd, final_argv),
@@ -1366,7 +1359,7 @@ fn runCommand(
};
const expected_term: std.process.Child.Term = .{ .Exited = 0 };
if (!termMatches(expected_term, result.term)) {
- return step.fail("{s}the following command {} (expected {}):\n{s}", .{
+ return step.fail("{s}the following command {f} (expected {f}):\n{s}", .{
prefix,
fmtTerm(result.term),
fmtTerm(expected_term),
@@ -1797,10 +1790,10 @@ fn evalGeneric(run: *Run, child: *std.process.Child) !StdIoResult {
stdout_bytes = try poller.fifo(.stdout).toOwnedSlice();
stderr_bytes = try poller.fifo(.stderr).toOwnedSlice();
} else {
- stdout_bytes = try stdout.reader().readAllAlloc(arena, run.max_stdio_size);
+ stdout_bytes = try stdout.deprecatedReader().readAllAlloc(arena, run.max_stdio_size);
}
} else if (child.stderr) |stderr| {
- stderr_bytes = try stderr.reader().readAllAlloc(arena, run.max_stdio_size);
+ stderr_bytes = try stderr.deprecatedReader().readAllAlloc(arena, run.max_stdio_size);
}
if (stderr_bytes) |bytes| if (bytes.len > 0) {
lib/std/Build/Step/UpdateSourceFiles.zig
@@ -76,7 +76,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
for (usf.output_source_files.items) |output_source_file| {
if (fs.path.dirname(output_source_file.sub_path)) |dirname| {
b.build_root.handle.makePath(dirname) catch |err| {
- return step.fail("unable to make path '{}{s}': {s}", .{
+ return step.fail("unable to make path '{f}{s}': {s}", .{
b.build_root, dirname, @errorName(err),
});
};
@@ -84,7 +84,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
switch (output_source_file.contents) {
.bytes => |bytes| {
b.build_root.handle.writeFile(.{ .sub_path = output_source_file.sub_path, .data = bytes }) catch |err| {
- return step.fail("unable to write file '{}{s}': {s}", .{
+ return step.fail("unable to write file '{f}{s}': {s}", .{
b.build_root, output_source_file.sub_path, @errorName(err),
});
};
@@ -101,7 +101,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
output_source_file.sub_path,
.{},
) catch |err| {
- return step.fail("unable to update file from '{s}' to '{}{s}': {s}", .{
+ return step.fail("unable to update file from '{s}' to '{f}{s}': {s}", .{
source_path, b.build_root, output_source_file.sub_path, @errorName(err),
});
};
lib/std/Build/Step/WriteFile.zig
@@ -217,7 +217,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
const src_dir_path = dir.source.getPath3(b, step);
var src_dir = src_dir_path.root_dir.handle.openDir(src_dir_path.subPathOrDot(), .{ .iterate = true }) catch |err| {
- return step.fail("unable to open source directory '{}': {s}", .{
+ return step.fail("unable to open source directory '{f}': {s}", .{
src_dir_path, @errorName(err),
});
};
@@ -258,7 +258,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
write_file.generated_directory.path = try b.cache_root.join(arena, &.{ "o", &digest });
var cache_dir = b.cache_root.handle.makeOpenPath(cache_path, .{}) catch |err| {
- return step.fail("unable to make path '{}{s}': {s}", .{
+ return step.fail("unable to make path '{f}{s}': {s}", .{
b.cache_root, cache_path, @errorName(err),
});
};
@@ -269,7 +269,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
for (write_file.files.items) |file| {
if (fs.path.dirname(file.sub_path)) |dirname| {
cache_dir.makePath(dirname) catch |err| {
- return step.fail("unable to make path '{}{s}{c}{s}': {s}", .{
+ return step.fail("unable to make path '{f}{s}{c}{s}': {s}", .{
b.cache_root, cache_path, fs.path.sep, dirname, @errorName(err),
});
};
@@ -277,7 +277,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
switch (file.contents) {
.bytes => |bytes| {
cache_dir.writeFile(.{ .sub_path = file.sub_path, .data = bytes }) catch |err| {
- return step.fail("unable to write file '{}{s}{c}{s}': {s}", .{
+ return step.fail("unable to write file '{f}{s}{c}{s}': {s}", .{
b.cache_root, cache_path, fs.path.sep, file.sub_path, @errorName(err),
});
};
@@ -291,7 +291,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
file.sub_path,
.{},
) catch |err| {
- return step.fail("unable to update file from '{s}' to '{}{s}{c}{s}': {s}", .{
+ return step.fail("unable to update file from '{s}' to '{f}{s}{c}{s}': {s}", .{
source_path,
b.cache_root,
cache_path,
@@ -315,7 +315,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
if (dest_dirname.len != 0) {
cache_dir.makePath(dest_dirname) catch |err| {
- return step.fail("unable to make path '{}{s}{c}{s}': {s}", .{
+ return step.fail("unable to make path '{f}{s}{c}{s}': {s}", .{
b.cache_root, cache_path, fs.path.sep, dest_dirname, @errorName(err),
});
};
@@ -338,7 +338,7 @@ fn make(step: *Step, options: Step.MakeOptions) !void {
dest_path,
.{},
) catch |err| {
- return step.fail("unable to update file from '{}' to '{}{s}{c}{s}': {s}", .{
+ return step.fail("unable to update file from '{f}' to '{f}{s}{c}{s}': {s}", .{
src_entry_path, b.cache_root, cache_path, fs.path.sep, dest_path, @errorName(err),
});
};
lib/std/Build/Cache.zig
@@ -68,7 +68,7 @@ const PrefixedPath = struct {
fn findPrefix(cache: *const Cache, file_path: []const u8) !PrefixedPath {
const gpa = cache.gpa;
- const resolved_path = try fs.path.resolve(gpa, &[_][]const u8{file_path});
+ const resolved_path = try fs.path.resolve(gpa, &.{file_path});
errdefer gpa.free(resolved_path);
return findPrefixResolved(cache, resolved_path);
}
@@ -132,7 +132,7 @@ pub const Hasher = crypto.auth.siphash.SipHash128(1, 3);
/// Initial state with random bytes, that can be copied.
/// Refresh this with new random bytes when the manifest
/// format is modified in a non-backwards-compatible way.
-pub const hasher_init: Hasher = Hasher.init(&[_]u8{
+pub const hasher_init: Hasher = Hasher.init(&.{
0x33, 0x52, 0xa2, 0x84,
0xcf, 0x17, 0x56, 0x57,
0x01, 0xbb, 0xcd, 0xe4,
@@ -286,11 +286,8 @@ pub const HashHelper = struct {
pub fn binToHex(bin_digest: BinDigest) HexDigest {
var out_digest: HexDigest = undefined;
- _ = fmt.bufPrint(
- &out_digest,
- "{s}",
- .{fmt.fmtSliceHexLower(&bin_digest)},
- ) catch unreachable;
+ var w: std.io.Writer = .fixed(&out_digest);
+ w.printHex(&bin_digest, .lower) catch unreachable;
return out_digest;
}
@@ -337,7 +334,6 @@ pub const Manifest = struct {
manifest_create: fs.File.OpenError,
manifest_read: fs.File.ReadError,
manifest_lock: fs.File.LockError,
- manifest_seek: fs.File.SeekError,
file_open: FileOp,
file_stat: FileOp,
file_read: FileOp,
@@ -611,12 +607,6 @@ pub const Manifest = struct {
var file = self.files.pop().?;
file.key.deinit(self.cache.gpa);
}
- // Also, seek the file back to the start.
- self.manifest_file.?.seekTo(0) catch |err| {
- self.diagnostic = .{ .manifest_seek = err };
- return error.CacheCheckFailed;
- };
-
switch (try self.hitWithCurrentLock()) {
.hit => break :hit,
.miss => |m| break :digests m.file_digests_populated,
@@ -661,9 +651,8 @@ pub const Manifest = struct {
return true;
}
- /// Assumes that `self.hash.hasher` has been updated only with the original digest, that
- /// `self.files` contains only the original input files, and that `self.manifest_file.?` is
- /// seeked to the start of the file.
+ /// Assumes that `self.hash.hasher` has been updated only with the original digest and that
+ /// `self.files` contains only the original input files.
fn hitWithCurrentLock(self: *Manifest) HitError!union(enum) {
hit,
miss: struct {
@@ -672,12 +661,13 @@ pub const Manifest = struct {
} {
const gpa = self.cache.gpa;
const input_file_count = self.files.entries.len;
-
- const file_contents = self.manifest_file.?.reader().readAllAlloc(gpa, manifest_file_size_max) catch |err| switch (err) {
+ var manifest_reader = self.manifest_file.?.reader(&.{}); // Reads positionally from zero.
+ const limit: std.io.Limit = .limited(manifest_file_size_max);
+ const file_contents = manifest_reader.interface.allocRemaining(gpa, limit) catch |err| switch (err) {
error.OutOfMemory => return error.OutOfMemory,
error.StreamTooLong => return error.OutOfMemory,
- else => |e| {
- self.diagnostic = .{ .manifest_read = e };
+ error.ReadFailed => {
+ self.diagnostic = .{ .manifest_read = manifest_reader.err.? };
return error.CacheCheckFailed;
},
};
@@ -1063,14 +1053,17 @@ pub const Manifest = struct {
}
fn addDepFileMaybePost(self: *Manifest, dir: fs.Dir, dep_file_basename: []const u8) !void {
- const dep_file_contents = try dir.readFileAlloc(self.cache.gpa, dep_file_basename, manifest_file_size_max);
- defer self.cache.gpa.free(dep_file_contents);
+ const gpa = self.cache.gpa;
+ const dep_file_contents = try dir.readFileAlloc(gpa, dep_file_basename, manifest_file_size_max);
+ defer gpa.free(dep_file_contents);
- var error_buf = std.ArrayList(u8).init(self.cache.gpa);
- defer error_buf.deinit();
+ var error_buf: std.ArrayListUnmanaged(u8) = .empty;
+ defer error_buf.deinit(gpa);
- var it: DepTokenizer = .{ .bytes = dep_file_contents };
+ var resolve_buf: std.ArrayListUnmanaged(u8) = .empty;
+ defer resolve_buf.deinit(gpa);
+ var it: DepTokenizer = .{ .bytes = dep_file_contents };
while (it.next()) |token| {
switch (token) {
// We don't care about targets, we only want the prereqs
@@ -1080,16 +1073,14 @@ pub const Manifest = struct {
_ = try self.addFile(file_path, null);
} else try self.addFilePost(file_path),
.prereq_must_resolve => {
- var resolve_buf = std.ArrayList(u8).init(self.cache.gpa);
- defer resolve_buf.deinit();
-
- try token.resolve(resolve_buf.writer());
+ resolve_buf.clearRetainingCapacity();
+ try token.resolve(gpa, &resolve_buf);
if (self.manifest_file == null) {
_ = try self.addFile(resolve_buf.items, null);
} else try self.addFilePost(resolve_buf.items);
},
else => |err| {
- try err.printError(error_buf.writer());
+ try err.printError(gpa, &error_buf);
log.err("failed parsing {s}: {s}", .{ dep_file_basename, error_buf.items });
return error.InvalidDepFile;
},
@@ -1127,24 +1118,25 @@ pub const Manifest = struct {
if (self.manifest_dirty) {
self.manifest_dirty = false;
- var contents = std.ArrayList(u8).init(self.cache.gpa);
- defer contents.deinit();
+ const gpa = self.cache.gpa;
+ var contents: std.ArrayListUnmanaged(u8) = .empty;
+ defer contents.deinit(gpa);
- const writer = contents.writer();
- try writer.writeAll(manifest_header ++ "\n");
+ try contents.appendSlice(gpa, manifest_header ++ "\n");
for (self.files.keys()) |file| {
- try writer.print("{d} {d} {d} {} {d} {s}\n", .{
+ try contents.print(gpa, "{d} {d} {d} {x} {d} {s}\n", .{
file.stat.size,
file.stat.inode,
file.stat.mtime,
- fmt.fmtSliceHexLower(&file.bin_digest),
+ &file.bin_digest,
file.prefixed_path.prefix,
file.prefixed_path.sub_path,
});
}
try manifest_file.setEndPos(contents.items.len);
- try manifest_file.pwriteAll(contents.items, 0);
+ var pos: usize = 0;
+ while (pos < contents.items.len) pos += try manifest_file.pwrite(contents.items[pos..], pos);
}
if (self.want_shared_lock) {
lib/std/Build/Module.zig
@@ -186,7 +186,7 @@ pub const IncludeDir = union(enum) {
.embed_path => |lazy_path| {
// Special case: this is a single arg.
const resolved = lazy_path.getPath3(b, asking_step);
- const arg = b.fmt("--embed-dir={}", .{resolved});
+ const arg = b.fmt("--embed-dir={f}", .{resolved});
return zig_args.append(arg);
},
};
lib/std/Build/Step.zig
@@ -287,7 +287,8 @@ pub fn cast(step: *Step, comptime T: type) ?*T {
/// For debugging purposes, prints identifying information about this Step.
pub fn dump(step: *Step, file: std.fs.File) void {
- const w = file.writer();
+ var fw = file.writer(&.{});
+ const w = &fw.interface;
const tty_config = std.io.tty.detectConfig(file);
const debug_info = std.debug.getSelfDebugInfo() catch |err| {
w.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{
@@ -482,9 +483,9 @@ pub fn evalZigProcess(
pub fn installFile(s: *Step, src_lazy_path: Build.LazyPath, dest_path: []const u8) !std.fs.Dir.PrevStatus {
const b = s.owner;
const src_path = src_lazy_path.getPath3(b, s);
- try handleVerbose(b, null, &.{ "install", "-C", b.fmt("{}", .{src_path}), dest_path });
+ try handleVerbose(b, null, &.{ "install", "-C", b.fmt("{f}", .{src_path}), dest_path });
return src_path.root_dir.handle.updateFile(src_path.sub_path, std.fs.cwd(), dest_path, .{}) catch |err| {
- return s.fail("unable to update file from '{}' to '{s}': {s}", .{
+ return s.fail("unable to update file from '{f}' to '{s}': {s}", .{
src_path, dest_path, @errorName(err),
});
};
@@ -821,7 +822,7 @@ fn failWithCacheError(s: *Step, man: *const Build.Cache.Manifest, err: Build.Cac
switch (err) {
error.CacheCheckFailed => switch (man.diagnostic) {
.none => unreachable,
- .manifest_create, .manifest_read, .manifest_lock, .manifest_seek => |e| return s.fail("failed to check cache: {s} {s}", .{
+ .manifest_create, .manifest_read, .manifest_lock => |e| return s.fail("failed to check cache: {s} {s}", .{
@tagName(man.diagnostic), @errorName(e),
}),
.file_open, .file_stat, .file_read, .file_hash => |op| {
lib/std/Build/Watch.zig
@@ -211,7 +211,7 @@ const Os = switch (builtin.os.tag) {
.ADD = true,
.ONLYDIR = true,
}, fan_mask, path.root_dir.handle.fd, path.subPathOrDot()) catch |err| {
- fatal("unable to watch {}: {s}", .{ path, @errorName(err) });
+ fatal("unable to watch {f}: {s}", .{ path, @errorName(err) });
};
}
break :rs &dh_gop.value_ptr.reaction_set;
@@ -265,7 +265,7 @@ const Os = switch (builtin.os.tag) {
.ONLYDIR = true,
}, fan_mask, path.root_dir.handle.fd, path.subPathOrDot()) catch |err| switch (err) {
error.FileNotFound => {}, // Expected, harmless.
- else => |e| std.log.warn("unable to unwatch '{}': {s}", .{ path, @errorName(e) }),
+ else => |e| std.log.warn("unable to unwatch '{f}': {s}", .{ path, @errorName(e) }),
};
w.dir_table.swapRemoveAt(i);
@@ -659,7 +659,7 @@ const Os = switch (builtin.os.tag) {
path.root_dir.handle.fd
else
posix.openat(path.root_dir.handle.fd, path.sub_path, dir_open_flags, 0) catch |err| {
- fatal("failed to open directory {}: {s}", .{ path, @errorName(err) });
+ fatal("failed to open directory {f}: {s}", .{ path, @errorName(err) });
};
// Empirically the dir has to stay open or else no events are triggered.
errdefer if (!skip_open_dir) posix.close(dir_fd);
lib/std/crypto/25519/curve25519.zig
@@ -124,9 +124,9 @@ test "curve25519" {
const p = try Curve25519.basePoint.clampedMul(s);
try p.rejectIdentity();
var buf: [128]u8 = undefined;
- try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&p.toBytes())}), "E6F2A4D1C28EE5C7AD0329268255A468AD407D2672824C0C0EB30EA6EF450145");
+ try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&p.toBytes()}), "E6F2A4D1C28EE5C7AD0329268255A468AD407D2672824C0C0EB30EA6EF450145");
const q = try p.clampedMul(s);
- try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&q.toBytes())}), "3614E119FFE55EC55B87D6B19971A9F4CBC78EFE80BEC55B96392BABCC712537");
+ try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&q.toBytes()}), "3614E119FFE55EC55B87D6B19971A9F4CBC78EFE80BEC55B96392BABCC712537");
try Curve25519.rejectNonCanonical(s);
s[31] |= 0x80;
lib/std/crypto/25519/ed25519.zig
@@ -509,8 +509,8 @@ test "key pair creation" {
_ = try fmt.hexToBytes(seed[0..], "8052030376d47112be7f73ed7a019293dd12ad910b654455798b4667d73de166");
const key_pair = try Ed25519.KeyPair.generateDeterministic(seed);
var buf: [256]u8 = undefined;
- try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&key_pair.secret_key.toBytes())}), "8052030376D47112BE7F73ED7A019293DD12AD910B654455798B4667D73DE1662D6F7455D97B4A3A10D7293909D1A4F2058CB9A370E43FA8154BB280DB839083");
- try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&key_pair.public_key.toBytes())}), "2D6F7455D97B4A3A10D7293909D1A4F2058CB9A370E43FA8154BB280DB839083");
+ try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&key_pair.secret_key.toBytes()}), "8052030376D47112BE7F73ED7A019293DD12AD910B654455798B4667D73DE1662D6F7455D97B4A3A10D7293909D1A4F2058CB9A370E43FA8154BB280DB839083");
+ try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&key_pair.public_key.toBytes()}), "2D6F7455D97B4A3A10D7293909D1A4F2058CB9A370E43FA8154BB280DB839083");
}
test "signature" {
@@ -520,7 +520,7 @@ test "signature" {
const sig = try key_pair.sign("test", null);
var buf: [128]u8 = undefined;
- try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&sig.toBytes())}), "10A442B4A80CC4225B154F43BEF28D2472CA80221951262EB8E0DF9091575E2687CC486E77263C3418C757522D54F84B0359236ABBBD4ACD20DC297FDCA66808");
+ try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&sig.toBytes()}), "10A442B4A80CC4225B154F43BEF28D2472CA80221951262EB8E0DF9091575E2687CC486E77263C3418C757522D54F84B0359236ABBBD4ACD20DC297FDCA66808");
try sig.verify("test", key_pair.public_key);
try std.testing.expectError(error.SignatureVerificationFailed, sig.verify("TEST", key_pair.public_key));
}
lib/std/crypto/25519/edwards25519.zig
@@ -546,7 +546,7 @@ test "packing/unpacking" {
var b = Edwards25519.basePoint;
const pk = try b.mul(s);
var buf: [128]u8 = undefined;
- try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&pk.toBytes())}), "074BC7E0FCBD587FDBC0969444245FADC562809C8F6E97E949AF62484B5B81A6");
+ try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&pk.toBytes()}), "074BC7E0FCBD587FDBC0969444245FADC562809C8F6E97E949AF62484B5B81A6");
const small_order_ss: [7][32]u8 = .{
.{
lib/std/crypto/25519/ristretto255.zig
@@ -175,21 +175,21 @@ pub const Ristretto255 = struct {
test "ristretto255" {
const p = Ristretto255.basePoint;
var buf: [256]u8 = undefined;
- try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&p.toBytes())}), "E2F2AE0A6ABC4E71A884A961C500515F58E30B6AA582DD8DB6A65945E08D2D76");
+ try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&p.toBytes()}), "E2F2AE0A6ABC4E71A884A961C500515F58E30B6AA582DD8DB6A65945E08D2D76");
var r: [Ristretto255.encoded_length]u8 = undefined;
_ = try fmt.hexToBytes(r[0..], "6a493210f7499cd17fecb510ae0cea23a110e8d5b901f8acadd3095c73a3b919");
var q = try Ristretto255.fromBytes(r);
q = q.dbl().add(p);
- try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&q.toBytes())}), "E882B131016B52C1D3337080187CF768423EFCCBB517BB495AB812C4160FF44E");
+ try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&q.toBytes()}), "E882B131016B52C1D3337080187CF768423EFCCBB517BB495AB812C4160FF44E");
const s = [_]u8{15} ++ [_]u8{0} ** 31;
const w = try p.mul(s);
- try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&w.toBytes())}), "E0C418F7C8D9C4CDD7395B93EA124F3AD99021BB681DFC3302A9D99A2E53E64E");
+ try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&w.toBytes()}), "E0C418F7C8D9C4CDD7395B93EA124F3AD99021BB681DFC3302A9D99A2E53E64E");
try std.testing.expect(p.dbl().dbl().dbl().dbl().equivalent(w.add(p)));
const h = [_]u8{69} ** 32 ++ [_]u8{42} ** 32;
const ph = Ristretto255.fromUniform(h);
- try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&ph.toBytes())}), "DCCA54E037A4311EFBEEF413ACD21D35276518970B7A61DC88F8587B493D5E19");
+ try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&ph.toBytes()}), "DCCA54E037A4311EFBEEF413ACD21D35276518970B7A61DC88F8587B493D5E19");
}
lib/std/crypto/25519/scalar.zig
@@ -850,10 +850,10 @@ test "scalar25519" {
var y = x.toBytes();
try rejectNonCanonical(y);
var buf: [128]u8 = undefined;
- try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&y)}), "1E979B917937F3DE71D18077F961F6CEFF01030405060708010203040506070F");
+ try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&y}), "1E979B917937F3DE71D18077F961F6CEFF01030405060708010203040506070F");
const reduced = reduce(field_order_s);
- try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&reduced)}), "0000000000000000000000000000000000000000000000000000000000000000");
+ try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&reduced}), "0000000000000000000000000000000000000000000000000000000000000000");
}
test "non-canonical scalar25519" {
@@ -867,7 +867,7 @@ test "mulAdd overflow check" {
const c: [32]u8 = [_]u8{0xff} ** 32;
const x = mulAdd(a, b, c);
var buf: [128]u8 = undefined;
- try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&x)}), "D14DF91389432C25AD60FF9791B9FD1D67BEF517D273ECCE3D9A307C1B419903");
+ try std.testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&x}), "D14DF91389432C25AD60FF9791B9FD1D67BEF517D273ECCE3D9A307C1B419903");
}
test "scalar field inversion" {
lib/std/crypto/tls/Client.zig
@@ -1512,11 +1512,11 @@ fn logSecrets(key_log_file: std.fs.File, context: anytype, secrets: anytype) voi
const locked = if (key_log_file.lock(.exclusive)) |_| true else |_| false;
defer if (locked) key_log_file.unlock();
key_log_file.seekFromEnd(0) catch {};
- inline for (@typeInfo(@TypeOf(secrets)).@"struct".fields) |field| key_log_file.writer().print("{s}" ++
- (if (@hasField(@TypeOf(context), "counter")) "_{d}" else "") ++ " {} {}\n", .{field.name} ++
+ inline for (@typeInfo(@TypeOf(secrets)).@"struct".fields) |field| key_log_file.deprecatedWriter().print("{s}" ++
+ (if (@hasField(@TypeOf(context), "counter")) "_{d}" else "") ++ " {x} {x}\n", .{field.name} ++
(if (@hasField(@TypeOf(context), "counter")) .{context.counter} else .{}) ++ .{
- std.fmt.fmtSliceHexLower(context.client_random),
- std.fmt.fmtSliceHexLower(@field(secrets, field.name)),
+ context.client_random,
+ @field(secrets, field.name),
}) catch {};
}
lib/std/crypto/benchmark.zig
@@ -458,7 +458,7 @@ fn mode(comptime x: comptime_int) comptime_int {
}
pub fn main() !void {
- const stdout = std.fs.File.stdout().writer();
+ const stdout = std.fs.File.stdout().deprecatedWriter();
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();
lib/std/crypto/chacha20.zig
@@ -1145,7 +1145,7 @@ test "xchacha20" {
var c: [m.len]u8 = undefined;
XChaCha20IETF.xor(c[0..], m[0..], 0, key, nonce);
var buf: [2 * c.len]u8 = undefined;
- try testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&c)}), "E0A1BCF939654AFDBDC1746EC49832647C19D891F0D1A81FC0C1703B4514BDEA584B512F6908C2C5E9DD18D5CBC1805DE5803FE3B9CA5F193FB8359E91FAB0C3BB40309A292EB1CF49685C65C4A3ADF4F11DB0CD2B6B67FBC174BC2E860E8F769FD3565BBFAD1C845E05A0FED9BE167C240D");
+ try testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&c}), "E0A1BCF939654AFDBDC1746EC49832647C19D891F0D1A81FC0C1703B4514BDEA584B512F6908C2C5E9DD18D5CBC1805DE5803FE3B9CA5F193FB8359E91FAB0C3BB40309A292EB1CF49685C65C4A3ADF4F11DB0CD2B6B67FBC174BC2E860E8F769FD3565BBFAD1C845E05A0FED9BE167C240D");
}
{
const ad = "Additional data";
@@ -1154,7 +1154,7 @@ test "xchacha20" {
var out: [m.len]u8 = undefined;
try XChaCha20Poly1305.decrypt(out[0..], c[0..m.len], c[m.len..].*, ad, nonce, key);
var buf: [2 * c.len]u8 = undefined;
- try testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{s}", .{std.fmt.fmtSliceHexUpper(&c)}), "994D2DD32333F48E53650C02C7A2ABB8E018B0836D7175AEC779F52E961780768F815C58F1AA52D211498DB89B9216763F569C9433A6BBFCEFB4D4A49387A4C5207FBB3B5A92B5941294DF30588C6740D39DC16FA1F0E634F7246CF7CDCB978E44347D89381B7A74EB7084F754B90BDE9AAF5A94B8F2A85EFD0B50692AE2D425E234");
+ try testing.expectEqualStrings(try std.fmt.bufPrint(&buf, "{X}", .{&c}), "994D2DD32333F48E53650C02C7A2ABB8E018B0836D7175AEC779F52E961780768F815C58F1AA52D211498DB89B9216763F569C9433A6BBFCEFB4D4A49387A4C5207FBB3B5A92B5941294DF30588C6740D39DC16FA1F0E634F7246CF7CDCB978E44347D89381B7A74EB7084F754B90BDE9AAF5A94B8F2A85EFD0B50692AE2D425E234");
try testing.expectEqualSlices(u8, out[0..], m);
c[0] +%= 1;
try testing.expectError(error.AuthenticationFailed, XChaCha20Poly1305.decrypt(out[0..], c[0..m.len], c[m.len..].*, ad, nonce, key));
lib/std/crypto/ml_kem.zig
@@ -1737,11 +1737,11 @@ test "NIST KAT test" {
var f = sha2.Sha256.init(.{});
const fw = f.writer();
var g = NistDRBG.init(seed);
- try std.fmt.format(fw, "# {s}\n\n", .{mode.name});
+ try std.fmt.deprecatedFormat(fw, "# {s}\n\n", .{mode.name});
for (0..100) |i| {
g.fill(&seed);
- try std.fmt.format(fw, "count = {}\n", .{i});
- try std.fmt.format(fw, "seed = {s}\n", .{std.fmt.fmtSliceHexUpper(&seed)});
+ try std.fmt.deprecatedFormat(fw, "count = {}\n", .{i});
+ try std.fmt.deprecatedFormat(fw, "seed = {X}\n", .{&seed});
var g2 = NistDRBG.init(seed);
// This is not equivalent to g2.fill(kseed[:]). As the reference
@@ -1756,16 +1756,16 @@ test "NIST KAT test" {
const e = kp.public_key.encaps(eseed);
const ss2 = try kp.secret_key.decaps(&e.ciphertext);
try testing.expectEqual(ss2, e.shared_secret);
- try std.fmt.format(fw, "pk = {s}\n", .{std.fmt.fmtSliceHexUpper(&kp.public_key.toBytes())});
- try std.fmt.format(fw, "sk = {s}\n", .{std.fmt.fmtSliceHexUpper(&kp.secret_key.toBytes())});
- try std.fmt.format(fw, "ct = {s}\n", .{std.fmt.fmtSliceHexUpper(&e.ciphertext)});
- try std.fmt.format(fw, "ss = {s}\n\n", .{std.fmt.fmtSliceHexUpper(&e.shared_secret)});
+ try std.fmt.deprecatedFormat(fw, "pk = {X}\n", .{&kp.public_key.toBytes()});
+ try std.fmt.deprecatedFormat(fw, "sk = {X}\n", .{&kp.secret_key.toBytes()});
+ try std.fmt.deprecatedFormat(fw, "ct = {X}\n", .{&e.ciphertext});
+ try std.fmt.deprecatedFormat(fw, "ss = {X}\n\n", .{&e.shared_secret});
}
var out: [32]u8 = undefined;
f.final(&out);
var outHex: [64]u8 = undefined;
- _ = try std.fmt.bufPrint(&outHex, "{s}", .{std.fmt.fmtSliceHexLower(&out)});
+ _ = try std.fmt.bufPrint(&outHex, "{x}", .{&out});
try testing.expectEqual(outHex, modeHash[1].*);
}
}
lib/std/debug/Dwarf.zig
@@ -2302,11 +2302,7 @@ pub const ElfModule = struct {
};
defer debuginfod_dir.close();
- const filename = std.fmt.allocPrint(
- gpa,
- "{s}/debuginfo",
- .{std.fmt.fmtSliceHexLower(id)},
- ) catch break :blk;
+ const filename = std.fmt.allocPrint(gpa, "{x}/debuginfo", .{id}) catch break :blk;
defer gpa.free(filename);
const path: Path = .{
@@ -2330,12 +2326,8 @@ pub const ElfModule = struct {
var id_prefix_buf: [2]u8 = undefined;
var filename_buf: [38 + extension.len]u8 = undefined;
- _ = std.fmt.bufPrint(&id_prefix_buf, "{s}", .{std.fmt.fmtSliceHexLower(id[0..1])}) catch unreachable;
- const filename = std.fmt.bufPrint(
- &filename_buf,
- "{s}" ++ extension,
- .{std.fmt.fmtSliceHexLower(id[1..])},
- ) catch break :blk;
+ _ = std.fmt.bufPrint(&id_prefix_buf, "{x}", .{id[0..1]}) catch unreachable;
+ const filename = std.fmt.bufPrint(&filename_buf, "{x}" ++ extension, .{id[1..]}) catch break :blk;
for (global_debug_directories) |global_directory| {
const path: Path = .{
lib/std/debug/Pdb.zig
@@ -395,7 +395,7 @@ const Msf = struct {
streams: []MsfStream,
fn init(allocator: Allocator, file: File) !Msf {
- const in = file.reader();
+ const in = file.deprecatedReader();
const superblock = try in.readStruct(pdb.SuperBlock);
@@ -514,7 +514,7 @@ const MsfStream = struct {
var offset = self.pos % self.block_size;
try self.in_file.seekTo(block * self.block_size + offset);
- const in = self.in_file.reader();
+ const in = self.in_file.deprecatedReader();
var size: usize = 0;
var rem_buffer = buffer;
lib/std/fmt/format_float.zig โ lib/std/fmt/float.zig
@@ -11,7 +11,7 @@ const special_exponent = 0x7fffffff;
pub const min_buffer_size = 53;
/// Returns the minimum buffer size needed to print every float of a specific type and format.
-pub fn bufferSize(comptime mode: Format, comptime T: type) comptime_int {
+pub fn bufferSize(comptime mode: Mode, comptime T: type) comptime_int {
comptime std.debug.assert(@typeInfo(T) == .float);
return switch (mode) {
.scientific => 53,
@@ -27,17 +27,17 @@ pub fn bufferSize(comptime mode: Format, comptime T: type) comptime_int {
};
}
-pub const FormatError = error{
+pub const Error = error{
BufferTooSmall,
};
-pub const Format = enum {
+pub const Mode = enum {
scientific,
decimal,
};
-pub const FormatOptions = struct {
- mode: Format = .scientific,
+pub const Options = struct {
+ mode: Mode = .scientific,
precision: ?usize = null,
};
@@ -52,11 +52,11 @@ pub const FormatOptions = struct {
///
/// When printing full precision decimals, use `bufferSize` to get the required space. It is
/// recommended to bound decimal output with a fixed precision to reduce the required buffer size.
-pub fn formatFloat(buf: []u8, v_: anytype, options: FormatOptions) FormatError![]const u8 {
- const v = switch (@TypeOf(v_)) {
+pub fn render(buf: []u8, value: anytype, options: Options) Error![]const u8 {
+ const v = switch (@TypeOf(value)) {
// comptime_float internally is a f128; this preserves precision.
- comptime_float => @as(f128, v_),
- else => v_,
+ comptime_float => @as(f128, value),
+ else => value,
};
const T = @TypeOf(v);
@@ -192,7 +192,7 @@ fn round(comptime T: type, f: FloatDecimal(T), mode: RoundMode, precision: usize
/// will not fit.
///
/// It is recommended to bound decimal formatting with an exact precision.
-pub fn formatScientific(comptime T: type, buf: []u8, f_: FloatDecimal(T), precision: ?usize) FormatError![]const u8 {
+pub fn formatScientific(comptime T: type, buf: []u8, f_: FloatDecimal(T), precision: ?usize) Error![]const u8 {
std.debug.assert(buf.len >= min_buffer_size);
var f = f_;
@@ -263,7 +263,7 @@ pub fn formatScientific(comptime T: type, buf: []u8, f_: FloatDecimal(T), precis
/// The buffer provided must be greater than `min_buffer_size` bytes in length. If no precision is
/// specified, this may still return an error. If precision is specified, `2 + precision` bytes will
/// always be written.
-pub fn formatDecimal(comptime T: type, buf: []u8, f_: FloatDecimal(T), precision: ?usize) FormatError![]const u8 {
+pub fn formatDecimal(comptime T: type, buf: []u8, f_: FloatDecimal(T), precision: ?usize) Error![]const u8 {
std.debug.assert(buf.len >= min_buffer_size);
var f = f_;
@@ -1520,7 +1520,7 @@ fn check(comptime T: type, value: T, comptime expected: []const u8) !void {
var buf: [6000]u8 = undefined;
const value_bits: I = @bitCast(value);
- const s = try formatFloat(&buf, value, .{});
+ const s = try render(&buf, value, .{});
try std.testing.expectEqualStrings(expected, s);
if (T == f80 and builtin.target.os.tag == .windows and builtin.target.cpu.arch == .x86_64) return;
lib/std/fs/File.zig
@@ -1,3 +1,20 @@
+const builtin = @import("builtin");
+const Os = std.builtin.Os;
+const native_os = builtin.os.tag;
+const is_windows = native_os == .windows;
+
+const File = @This();
+const std = @import("../std.zig");
+const Allocator = std.mem.Allocator;
+const posix = std.posix;
+const io = std.io;
+const math = std.math;
+const assert = std.debug.assert;
+const linux = std.os.linux;
+const windows = std.os.windows;
+const maxInt = std.math.maxInt;
+const Alignment = std.mem.Alignment;
+
/// The OS-specific file descriptor or file handle.
handle: Handle,
@@ -168,6 +185,18 @@ pub const CreateFlags = struct {
mode: Mode = default_mode,
};
+pub fn stdout() File {
+ return .{ .handle = if (is_windows) windows.peb().ProcessParameters.hStdOutput else posix.STDOUT_FILENO };
+}
+
+pub fn stderr() File {
+ return .{ .handle = if (is_windows) windows.peb().ProcessParameters.hStdError else posix.STDERR_FILENO };
+}
+
+pub fn stdin() File {
+ return .{ .handle = if (is_windows) windows.peb().ProcessParameters.hStdInput else posix.STDIN_FILENO };
+}
+
/// Upon success, the stream is in an uninitialized state. To continue using it,
/// you must use the open() function.
pub fn close(self: File) void {
@@ -351,8 +380,10 @@ pub fn getPos(self: File) GetSeekPosError!u64 {
return posix.lseek_CUR_get(self.handle);
}
+pub const GetEndPosError = std.os.windows.GetFileSizeError || StatError;
+
/// TODO: integrate with async I/O
-pub fn getEndPos(self: File) GetSeekPosError!u64 {
+pub fn getEndPos(self: File) GetEndPosError!u64 {
if (builtin.os.tag == .windows) {
return windows.GetFileSizeEx(self.handle);
}
@@ -477,7 +508,6 @@ pub const Stat = struct {
pub const StatError = posix.FStatError;
/// Returns `Stat` containing basic information about the `File`.
-/// Use `metadata` to retrieve more detailed information (e.g. creation time, permissions).
/// TODO: integrate with async I/O
pub fn stat(self: File) StatError!Stat {
if (builtin.os.tag == .windows) {
@@ -743,361 +773,6 @@ pub fn setPermissions(self: File, permissions: Permissions) SetPermissionsError!
}
}
-/// Cross-platform representation of file metadata.
-/// Platform-specific functionality is available through the `inner` field.
-pub const Metadata = struct {
- /// Exposes platform-specific functionality.
- inner: switch (builtin.os.tag) {
- .windows => MetadataWindows,
- .linux => MetadataLinux,
- .wasi => MetadataWasi,
- else => MetadataUnix,
- },
-
- const Self = @This();
-
- /// Returns the size of the file
- pub fn size(self: Self) u64 {
- return self.inner.size();
- }
-
- /// Returns a `Permissions` struct, representing the permissions on the file
- pub fn permissions(self: Self) Permissions {
- return self.inner.permissions();
- }
-
- /// Returns the `Kind` of file.
- /// On Windows, can only return: `.file`, `.directory`, `.sym_link` or `.unknown`
- pub fn kind(self: Self) Kind {
- return self.inner.kind();
- }
-
- /// Returns the last time the file was accessed in nanoseconds since UTC 1970-01-01
- pub fn accessed(self: Self) i128 {
- return self.inner.accessed();
- }
-
- /// Returns the time the file was modified in nanoseconds since UTC 1970-01-01
- pub fn modified(self: Self) i128 {
- return self.inner.modified();
- }
-
- /// Returns the time the file was created in nanoseconds since UTC 1970-01-01
- /// On Windows, this cannot return null
- /// On Linux, this returns null if the filesystem does not support creation times
- /// On Unices, this returns null if the filesystem or OS does not support creation times
- /// On MacOS, this returns the ctime if the filesystem does not support creation times; this is insanity, and yet another reason to hate on Apple
- pub fn created(self: Self) ?i128 {
- return self.inner.created();
- }
-};
-
-pub const MetadataUnix = struct {
- stat: posix.Stat,
-
- const Self = @This();
-
- /// Returns the size of the file
- pub fn size(self: Self) u64 {
- return @intCast(self.stat.size);
- }
-
- /// Returns a `Permissions` struct, representing the permissions on the file
- pub fn permissions(self: Self) Permissions {
- return .{ .inner = .{ .mode = self.stat.mode } };
- }
-
- /// Returns the `Kind` of the file
- pub fn kind(self: Self) Kind {
- if (builtin.os.tag == .wasi and !builtin.link_libc) return switch (self.stat.filetype) {
- .BLOCK_DEVICE => .block_device,
- .CHARACTER_DEVICE => .character_device,
- .DIRECTORY => .directory,
- .SYMBOLIC_LINK => .sym_link,
- .REGULAR_FILE => .file,
- .SOCKET_STREAM, .SOCKET_DGRAM => .unix_domain_socket,
- else => .unknown,
- };
-
- const m = self.stat.mode & posix.S.IFMT;
-
- switch (m) {
- posix.S.IFBLK => return .block_device,
- posix.S.IFCHR => return .character_device,
- posix.S.IFDIR => return .directory,
- posix.S.IFIFO => return .named_pipe,
- posix.S.IFLNK => return .sym_link,
- posix.S.IFREG => return .file,
- posix.S.IFSOCK => return .unix_domain_socket,
- else => {},
- }
-
- if (builtin.os.tag.isSolarish()) switch (m) {
- posix.S.IFDOOR => return .door,
- posix.S.IFPORT => return .event_port,
- else => {},
- };
-
- return .unknown;
- }
-
- /// Returns the last time the file was accessed in nanoseconds since UTC 1970-01-01
- pub fn accessed(self: Self) i128 {
- const atime = self.stat.atime();
- return @as(i128, atime.sec) * std.time.ns_per_s + atime.nsec;
- }
-
- /// Returns the last time the file was modified in nanoseconds since UTC 1970-01-01
- pub fn modified(self: Self) i128 {
- const mtime = self.stat.mtime();
- return @as(i128, mtime.sec) * std.time.ns_per_s + mtime.nsec;
- }
-
- /// Returns the time the file was created in nanoseconds since UTC 1970-01-01.
- /// Returns null if this is not supported by the OS or filesystem
- pub fn created(self: Self) ?i128 {
- if (!@hasDecl(@TypeOf(self.stat), "birthtime")) return null;
- const birthtime = self.stat.birthtime();
-
- // If the filesystem doesn't support this the value *should* be:
- // On FreeBSD: nsec = 0, sec = -1
- // On NetBSD and OpenBSD: nsec = 0, sec = 0
- // On MacOS, it is set to ctime -- we cannot detect this!!
- switch (builtin.os.tag) {
- .freebsd => if (birthtime.sec == -1 and birthtime.nsec == 0) return null,
- .netbsd, .openbsd => if (birthtime.sec == 0 and birthtime.nsec == 0) return null,
- .macos => {},
- else => @compileError("Creation time detection not implemented for OS"),
- }
-
- return @as(i128, birthtime.sec) * std.time.ns_per_s + birthtime.nsec;
- }
-};
-
-/// `MetadataUnix`, but using Linux's `statx` syscall.
-pub const MetadataLinux = struct {
- statx: std.os.linux.Statx,
-
- const Self = @This();
-
- /// Returns the size of the file
- pub fn size(self: Self) u64 {
- return self.statx.size;
- }
-
- /// Returns a `Permissions` struct, representing the permissions on the file
- pub fn permissions(self: Self) Permissions {
- return Permissions{ .inner = PermissionsUnix{ .mode = self.statx.mode } };
- }
-
- /// Returns the `Kind` of the file
- pub fn kind(self: Self) Kind {
- const m = self.statx.mode & posix.S.IFMT;
-
- switch (m) {
- posix.S.IFBLK => return .block_device,
- posix.S.IFCHR => return .character_device,
- posix.S.IFDIR => return .directory,
- posix.S.IFIFO => return .named_pipe,
- posix.S.IFLNK => return .sym_link,
- posix.S.IFREG => return .file,
- posix.S.IFSOCK => return .unix_domain_socket,
- else => {},
- }
-
- return .unknown;
- }
-
- /// Returns the last time the file was accessed in nanoseconds since UTC 1970-01-01
- pub fn accessed(self: Self) i128 {
- return @as(i128, self.statx.atime.sec) * std.time.ns_per_s + self.statx.atime.nsec;
- }
-
- /// Returns the last time the file was modified in nanoseconds since UTC 1970-01-01
- pub fn modified(self: Self) i128 {
- return @as(i128, self.statx.mtime.sec) * std.time.ns_per_s + self.statx.mtime.nsec;
- }
-
- /// Returns the time the file was created in nanoseconds since UTC 1970-01-01.
- /// Returns null if this is not supported by the filesystem, or on kernels before than version 4.11
- pub fn created(self: Self) ?i128 {
- if (self.statx.mask & std.os.linux.STATX_BTIME == 0) return null;
- return @as(i128, self.statx.btime.sec) * std.time.ns_per_s + self.statx.btime.nsec;
- }
-};
-
-pub const MetadataWasi = struct {
- stat: std.os.wasi.filestat_t,
-
- pub fn size(self: @This()) u64 {
- return self.stat.size;
- }
-
- pub fn permissions(self: @This()) Permissions {
- return .{ .inner = .{ .mode = self.stat.mode } };
- }
-
- pub fn kind(self: @This()) Kind {
- return switch (self.stat.filetype) {
- .BLOCK_DEVICE => .block_device,
- .CHARACTER_DEVICE => .character_device,
- .DIRECTORY => .directory,
- .SYMBOLIC_LINK => .sym_link,
- .REGULAR_FILE => .file,
- .SOCKET_STREAM, .SOCKET_DGRAM => .unix_domain_socket,
- else => .unknown,
- };
- }
-
- pub fn accessed(self: @This()) i128 {
- return self.stat.atim;
- }
-
- pub fn modified(self: @This()) i128 {
- return self.stat.mtim;
- }
-
- pub fn created(self: @This()) ?i128 {
- return self.stat.ctim;
- }
-};
-
-pub const MetadataWindows = struct {
- attributes: windows.DWORD,
- reparse_tag: windows.DWORD,
- _size: u64,
- access_time: i128,
- modified_time: i128,
- creation_time: i128,
-
- const Self = @This();
-
- /// Returns the size of the file
- pub fn size(self: Self) u64 {
- return self._size;
- }
-
- /// Returns a `Permissions` struct, representing the permissions on the file
- pub fn permissions(self: Self) Permissions {
- return .{ .inner = .{ .attributes = self.attributes } };
- }
-
- /// Returns the `Kind` of the file.
- /// Can only return: `.file`, `.directory`, `.sym_link` or `.unknown`
- pub fn kind(self: Self) Kind {
- if (self.attributes & windows.FILE_ATTRIBUTE_REPARSE_POINT != 0) {
- if (self.reparse_tag & windows.reparse_tag_name_surrogate_bit != 0) {
- return .sym_link;
- }
- } else if (self.attributes & windows.FILE_ATTRIBUTE_DIRECTORY != 0) {
- return .directory;
- } else {
- return .file;
- }
- return .unknown;
- }
-
- /// Returns the last time the file was accessed in nanoseconds since UTC 1970-01-01
- pub fn accessed(self: Self) i128 {
- return self.access_time;
- }
-
- /// Returns the time the file was modified in nanoseconds since UTC 1970-01-01
- pub fn modified(self: Self) i128 {
- return self.modified_time;
- }
-
- /// Returns the time the file was created in nanoseconds since UTC 1970-01-01.
- /// This never returns null, only returning an optional for compatibility with other OSes
- pub fn created(self: Self) ?i128 {
- return self.creation_time;
- }
-};
-
-pub const MetadataError = posix.FStatError;
-
-pub fn metadata(self: File) MetadataError!Metadata {
- return .{
- .inner = switch (builtin.os.tag) {
- .windows => blk: {
- var io_status_block: windows.IO_STATUS_BLOCK = undefined;
- var info: windows.FILE_ALL_INFORMATION = undefined;
-
- const rc = windows.ntdll.NtQueryInformationFile(self.handle, &io_status_block, &info, @sizeOf(windows.FILE_ALL_INFORMATION), .FileAllInformation);
- switch (rc) {
- .SUCCESS => {},
- // Buffer overflow here indicates that there is more information available than was able to be stored in the buffer
- // size provided. This is treated as success because the type of variable-length information that this would be relevant for
- // (name, volume name, etc) we don't care about.
- .BUFFER_OVERFLOW => {},
- .INVALID_PARAMETER => unreachable,
- .ACCESS_DENIED => return error.AccessDenied,
- else => return windows.unexpectedStatus(rc),
- }
-
- const reparse_tag: windows.DWORD = reparse_blk: {
- if (info.BasicInformation.FileAttributes & windows.FILE_ATTRIBUTE_REPARSE_POINT != 0) {
- var tag_info: windows.FILE_ATTRIBUTE_TAG_INFO = undefined;
- const tag_rc = windows.ntdll.NtQueryInformationFile(self.handle, &io_status_block, &tag_info, @sizeOf(windows.FILE_ATTRIBUTE_TAG_INFO), .FileAttributeTagInformation);
- switch (tag_rc) {
- .SUCCESS => {},
- // INFO_LENGTH_MISMATCH and ACCESS_DENIED are the only documented possible errors
- // https://learn.microsoft.com/en-us/openspecs/windows_protocols/ms-fscc/d295752f-ce89-4b98-8553-266d37c84f0e
- .INFO_LENGTH_MISMATCH => unreachable,
- .ACCESS_DENIED => return error.AccessDenied,
- else => return windows.unexpectedStatus(rc),
- }
- break :reparse_blk tag_info.ReparseTag;
- }
- break :reparse_blk 0;
- };
-
- break :blk .{
- .attributes = info.BasicInformation.FileAttributes,
- .reparse_tag = reparse_tag,
- ._size = @as(u64, @bitCast(info.StandardInformation.EndOfFile)),
- .access_time = windows.fromSysTime(info.BasicInformation.LastAccessTime),
- .modified_time = windows.fromSysTime(info.BasicInformation.LastWriteTime),
- .creation_time = windows.fromSysTime(info.BasicInformation.CreationTime),
- };
- },
- .linux => blk: {
- var stx = std.mem.zeroes(linux.Statx);
-
- // We are gathering information for Metadata, which is meant to contain all the
- // native OS information about the file, so use all known flags.
- const rc = linux.statx(
- self.handle,
- "",
- linux.AT.EMPTY_PATH,
- linux.STATX_BASIC_STATS | linux.STATX_BTIME,
- &stx,
- );
-
- switch (linux.E.init(rc)) {
- .SUCCESS => {},
- .ACCES => unreachable,
- .BADF => unreachable,
- .FAULT => unreachable,
- .INVAL => unreachable,
- .LOOP => unreachable,
- .NAMETOOLONG => unreachable,
- .NOENT => unreachable,
- .NOMEM => return error.SystemResources,
- .NOTDIR => unreachable,
- else => |err| return posix.unexpectedErrno(err),
- }
-
- break :blk .{
- .statx = stx,
- };
- },
- .wasi => .{ .stat = try std.os.fstat_wasi(self.handle) },
- else => .{ .stat = try posix.fstat(self.handle) },
- },
- };
-}
-
pub const UpdateTimesError = posix.FutimensError || windows.SetFileTimeError;
/// The underlying file system may have a different granularity than nanoseconds,
@@ -1130,19 +805,12 @@ pub fn updateTimes(
try posix.futimens(self.handle, ×);
}
-/// Reads all the bytes from the current position to the end of the file.
-/// On success, caller owns returned buffer.
-/// If the file is larger than `max_bytes`, returns `error.FileTooBig`.
+/// Deprecated in favor of `Reader`.
pub fn readToEndAlloc(self: File, allocator: Allocator, max_bytes: usize) ![]u8 {
return self.readToEndAllocOptions(allocator, max_bytes, null, .of(u8), null);
}
-/// Reads all the bytes from the current position to the end of the file.
-/// On success, caller owns returned buffer.
-/// If the file is larger than `max_bytes`, returns `error.FileTooBig`.
-/// If `size_hint` is specified the initial buffer size is calculated using
-/// that value, otherwise an arbitrary value is used instead.
-/// Allows specifying alignment and a sentinel value.
+/// Deprecated in favor of `Reader`.
pub fn readToEndAllocOptions(
self: File,
allocator: Allocator,
@@ -1161,7 +829,7 @@ pub fn readToEndAllocOptions(
var array_list = try std.ArrayListAligned(u8, alignment).initCapacity(allocator, initial_cap);
defer array_list.deinit();
- self.reader().readAllArrayListAligned(alignment, &array_list, max_bytes) catch |err| switch (err) {
+ self.deprecatedReader().readAllArrayListAligned(alignment, &array_list, max_bytes) catch |err| switch (err) {
error.StreamTooLong => return error.FileTooBig,
else => |e| return e,
};
@@ -1184,8 +852,7 @@ pub fn read(self: File, buffer: []u8) ReadError!usize {
return posix.read(self.handle, buffer);
}
-/// Returns the number of bytes read. If the number read is smaller than `buffer.len`, it
-/// means the file reached the end. Reaching the end of a file is not an error condition.
+/// Deprecated in favor of `Reader`.
pub fn readAll(self: File, buffer: []u8) ReadError!usize {
var index: usize = 0;
while (index != buffer.len) {
@@ -1206,10 +873,7 @@ pub fn pread(self: File, buffer: []u8, offset: u64) PReadError!usize {
return posix.pread(self.handle, buffer, offset);
}
-/// Returns the number of bytes read. If the number read is smaller than `buffer.len`, it
-/// means the file reached the end. Reaching the end of a file is not an error condition.
-/// On Windows, this function currently does alter the file pointer.
-/// https://github.com/ziglang/zig/issues/12783
+/// Deprecated in favor of `Reader`.
pub fn preadAll(self: File, buffer: []u8, offset: u64) PReadError!usize {
var index: usize = 0;
while (index != buffer.len) {
@@ -1223,8 +887,7 @@ pub fn preadAll(self: File, buffer: []u8, offset: u64) PReadError!usize {
/// See https://github.com/ziglang/zig/issues/7699
pub fn readv(self: File, iovecs: []const posix.iovec) ReadError!usize {
if (is_windows) {
- // TODO improve this to use ReadFileScatter
- if (iovecs.len == 0) return @as(usize, 0);
+ if (iovecs.len == 0) return 0;
const first = iovecs[0];
return windows.ReadFile(self.handle, first.base[0..first.len], null);
}
@@ -1232,19 +895,7 @@ pub fn readv(self: File, iovecs: []const posix.iovec) ReadError!usize {
return posix.readv(self.handle, iovecs);
}
-/// Returns the number of bytes read. If the number read is smaller than the total bytes
-/// from all the buffers, it means the file reached the end. Reaching the end of a file
-/// is not an error condition.
-///
-/// The `iovecs` parameter is mutable because:
-/// * This function needs to mutate the fields in order to handle partial
-/// reads from the underlying OS layer.
-/// * The OS layer expects pointer addresses to be inside the application's address space
-/// even if the length is zero. Meanwhile, in Zig, slices may have undefined pointer
-/// addresses when the length is zero. So this function modifies the base fields
-/// when the length is zero.
-///
-/// Related open issue: https://github.com/ziglang/zig/issues/7699
+/// Deprecated in favor of `Reader`.
pub fn readvAll(self: File, iovecs: []posix.iovec) ReadError!usize {
if (iovecs.len == 0) return 0;
@@ -1279,8 +930,7 @@ pub fn readvAll(self: File, iovecs: []posix.iovec) ReadError!usize {
/// https://github.com/ziglang/zig/issues/12783
pub fn preadv(self: File, iovecs: []const posix.iovec, offset: u64) PReadError!usize {
if (is_windows) {
- // TODO improve this to use ReadFileScatter
- if (iovecs.len == 0) return @as(usize, 0);
+ if (iovecs.len == 0) return 0;
const first = iovecs[0];
return windows.ReadFile(self.handle, first.base[0..first.len], offset);
}
@@ -1288,14 +938,7 @@ pub fn preadv(self: File, iovecs: []const posix.iovec, offset: u64) PReadError!u
return posix.preadv(self.handle, iovecs, offset);
}
-/// Returns the number of bytes read. If the number read is smaller than the total bytes
-/// from all the buffers, it means the file reached the end. Reaching the end of a file
-/// is not an error condition.
-/// The `iovecs` parameter is mutable because this function needs to mutate the fields in
-/// order to handle partial reads from the underlying OS layer.
-/// See https://github.com/ziglang/zig/issues/7699
-/// On Windows, this function currently does alter the file pointer.
-/// https://github.com/ziglang/zig/issues/12783
+/// Deprecated in favor of `Reader`.
pub fn preadvAll(self: File, iovecs: []posix.iovec, offset: u64) PReadError!usize {
if (iovecs.len == 0) return 0;
@@ -1328,6 +971,7 @@ pub fn write(self: File, bytes: []const u8) WriteError!usize {
return posix.write(self.handle, bytes);
}
+/// Deprecated in favor of `Writer`.
pub fn writeAll(self: File, bytes: []const u8) WriteError!void {
var index: usize = 0;
while (index < bytes.len) {
@@ -1345,8 +989,7 @@ pub fn pwrite(self: File, bytes: []const u8, offset: u64) PWriteError!usize {
return posix.pwrite(self.handle, bytes, offset);
}
-/// On Windows, this function currently does alter the file pointer.
-/// https://github.com/ziglang/zig/issues/12783
+/// Deprecated in favor of `Writer`.
pub fn pwriteAll(self: File, bytes: []const u8, offset: u64) PWriteError!void {
var index: usize = 0;
while (index < bytes.len) {
@@ -1355,11 +998,10 @@ pub fn pwriteAll(self: File, bytes: []const u8, offset: u64) PWriteError!void {
}
/// See https://github.com/ziglang/zig/issues/7699
-/// See equivalent function: `std.net.Stream.writev`.
pub fn writev(self: File, iovecs: []const posix.iovec_const) WriteError!usize {
if (is_windows) {
// TODO improve this to use WriteFileScatter
- if (iovecs.len == 0) return @as(usize, 0);
+ if (iovecs.len == 0) return 0;
const first = iovecs[0];
return windows.WriteFile(self.handle, first.base[0..first.len], null);
}
@@ -1367,15 +1009,7 @@ pub fn writev(self: File, iovecs: []const posix.iovec_const) WriteError!usize {
return posix.writev(self.handle, iovecs);
}
-/// The `iovecs` parameter is mutable because:
-/// * This function needs to mutate the fields in order to handle partial
-/// writes from the underlying OS layer.
-/// * The OS layer expects pointer addresses to be inside the application's address space
-/// even if the length is zero. Meanwhile, in Zig, slices may have undefined pointer
-/// addresses when the length is zero. So this function modifies the base fields
-/// when the length is zero.
-/// See https://github.com/ziglang/zig/issues/7699
-/// See equivalent function: `std.net.Stream.writevAll`.
+/// Deprecated in favor of `Writer`.
pub fn writevAll(self: File, iovecs: []posix.iovec_const) WriteError!void {
if (iovecs.len == 0) return;
@@ -1405,8 +1039,7 @@ pub fn writevAll(self: File, iovecs: []posix.iovec_const) WriteError!void {
/// https://github.com/ziglang/zig/issues/12783
pub fn pwritev(self: File, iovecs: []posix.iovec_const, offset: u64) PWriteError!usize {
if (is_windows) {
- // TODO improve this to use WriteFileScatter
- if (iovecs.len == 0) return @as(usize, 0);
+ if (iovecs.len == 0) return 0;
const first = iovecs[0];
return windows.WriteFile(self.handle, first.base[0..first.len], offset);
}
@@ -1414,14 +1047,9 @@ pub fn pwritev(self: File, iovecs: []posix.iovec_const, offset: u64) PWriteError
return posix.pwritev(self.handle, iovecs, offset);
}
-/// The `iovecs` parameter is mutable because this function needs to mutate the fields in
-/// order to handle partial writes from the underlying OS layer.
-/// See https://github.com/ziglang/zig/issues/7699
-/// On Windows, this function currently does alter the file pointer.
-/// https://github.com/ziglang/zig/issues/12783
+/// Deprecated in favor of `Writer`.
pub fn pwritevAll(self: File, iovecs: []posix.iovec_const, offset: u64) PWriteError!void {
if (iovecs.len == 0) return;
-
var i: usize = 0;
var off: u64 = 0;
while (true) {
@@ -1439,14 +1067,14 @@ pub fn pwritevAll(self: File, iovecs: []posix.iovec_const, offset: u64) PWriteEr
pub const CopyRangeError = posix.CopyFileRangeError;
+/// Deprecated in favor of `Writer`.
pub fn copyRange(in: File, in_offset: u64, out: File, out_offset: u64, len: u64) CopyRangeError!u64 {
const adjusted_len = math.cast(usize, len) orelse maxInt(usize);
const result = try posix.copy_file_range(in.handle, in_offset, out.handle, out_offset, adjusted_len, 0);
return result;
}
-/// Returns the number of bytes copied. If the number read is smaller than `buffer.len`, it
-/// means the in file reached the end. Reaching the end of a file is not an error condition.
+/// Deprecated in favor of `Writer`.
pub fn copyRangeAll(in: File, in_offset: u64, out: File, out_offset: u64, len: u64) CopyRangeError!u64 {
var total_bytes_copied: u64 = 0;
var in_off = in_offset;
@@ -1461,24 +1089,18 @@ pub fn copyRangeAll(in: File, in_offset: u64, out: File, out_offset: u64, len: u
return total_bytes_copied;
}
+/// Deprecated in favor of `Writer`.
pub const WriteFileOptions = struct {
in_offset: u64 = 0,
-
- /// `null` means the entire file. `0` means no bytes from the file.
- /// When this is `null`, trailers must be sent in a separate writev() call
- /// due to a flaw in the BSD sendfile API. Other operating systems, such as
- /// Linux, already do this anyway due to API limitations.
- /// If the size of the source file is known, passing the size here will save one syscall.
in_len: ?u64 = null,
-
headers_and_trailers: []posix.iovec_const = &[0]posix.iovec_const{},
-
- /// The trailer count is inferred from `headers_and_trailers.len - header_count`
header_count: usize = 0,
};
+/// Deprecated in favor of `Writer`.
pub const WriteFileError = ReadError || error{EndOfStream} || WriteError;
+/// Deprecated in favor of `Writer`.
pub fn writeFileAll(self: File, in_file: File, args: WriteFileOptions) WriteFileError!void {
return self.writeFileAllSendfile(in_file, args) catch |err| switch (err) {
error.Unseekable,
@@ -1488,35 +1110,27 @@ pub fn writeFileAll(self: File, in_file: File, args: WriteFileOptions) WriteFile
error.NetworkUnreachable,
error.NetworkSubsystemFailed,
=> return self.writeFileAllUnseekable(in_file, args),
-
else => |e| return e,
};
}
-/// Does not try seeking in either of the File parameters.
-/// See `writeFileAll` as an alternative to calling this.
+/// Deprecated in favor of `Writer`.
pub fn writeFileAllUnseekable(self: File, in_file: File, args: WriteFileOptions) WriteFileError!void {
const headers = args.headers_and_trailers[0..args.header_count];
const trailers = args.headers_and_trailers[args.header_count..];
-
try self.writevAll(headers);
-
- try in_file.reader().skipBytes(args.in_offset, .{ .buf_size = 4096 });
-
+ try in_file.deprecatedReader().skipBytes(args.in_offset, .{ .buf_size = 4096 });
var fifo = std.fifo.LinearFifo(u8, .{ .Static = 4096 }).init();
if (args.in_len) |len| {
- var stream = std.io.limitedReader(in_file.reader(), len);
- try fifo.pump(stream.reader(), self.writer());
+ var stream = std.io.limitedReader(in_file.deprecatedReader(), len);
+ try fifo.pump(stream.reader(), self.deprecatedWriter());
} else {
- try fifo.pump(in_file.reader(), self.writer());
+ try fifo.pump(in_file.deprecatedReader(), self.deprecatedWriter());
}
-
try self.writevAll(trailers);
}
-/// Low level function which can fail for OS-specific reasons.
-/// See `writeFileAll` as an alternative to calling this.
-/// TODO integrate with async I/O
+/// Deprecated in favor of `Writer`.
fn writeFileAllSendfile(self: File, in_file: File, args: WriteFileOptions) posix.SendFileError!void {
const count = blk: {
if (args.in_len) |l| {
@@ -1581,18 +1195,23 @@ fn writeFileAllSendfile(self: File, in_file: File, args: WriteFileOptions) posix
}
}
-pub const Reader = io.GenericReader(File, ReadError, read);
+/// Deprecated in favor of `Reader`.
+pub const DeprecatedReader = io.GenericReader(File, ReadError, read);
-pub fn reader(file: File) Reader {
+/// Deprecated in favor of `Reader`.
+pub fn deprecatedReader(file: File) DeprecatedReader {
return .{ .context = file };
}
-pub const Writer = io.GenericWriter(File, WriteError, write);
+/// Deprecated in favor of `Writer`.
+pub const DeprecatedWriter = io.GenericWriter(File, WriteError, write);
-pub fn writer(file: File) Writer {
+/// Deprecated in favor of `Writer`.
+pub fn deprecatedWriter(file: File) DeprecatedWriter {
return .{ .context = file };
}
+/// Deprecated in favor of `Reader` and `Writer`.
pub const SeekableStream = io.SeekableStream(
File,
SeekError,
@@ -1603,10 +1222,715 @@ pub const SeekableStream = io.SeekableStream(
getEndPos,
);
+/// Deprecated in favor of `Reader` and `Writer`.
pub fn seekableStream(file: File) SeekableStream {
return .{ .context = file };
}
+/// Memoizes key information about a file handle such as:
+/// * The size from calling stat, or the error that occurred therein.
+/// * The current seek position.
+/// * The error that occurred when trying to seek.
+/// * Whether reading should be done positionally or streaming.
+/// * Whether reading should be done via fd-to-fd syscalls (e.g. `sendfile`)
+/// versus plain variants (e.g. `read`).
+///
+/// Fulfills the `std.io.Reader` interface.
+pub const Reader = struct {
+ file: File,
+ err: ?ReadError = null,
+ mode: Reader.Mode = .positional,
+ pos: u64 = 0,
+ size: ?u64 = null,
+ size_err: ?GetEndPosError = null,
+ seek_err: ?Reader.SeekError = null,
+ interface: std.io.Reader,
+
+ pub const SeekError = File.SeekError || error{
+ /// Seeking fell back to reading, and reached the end before the requested seek position.
+ /// `pos` remains at the end of the file.
+ EndOfStream,
+ /// Seeking fell back to reading, which failed.
+ ReadFailed,
+ };
+
+ pub const Mode = enum {
+ streaming,
+ positional,
+ /// Avoid syscalls other than `read` and `readv`.
+ streaming_reading,
+ /// Avoid syscalls other than `pread` and `preadv`.
+ positional_reading,
+ /// Indicates reading cannot continue because of a seek failure.
+ failure,
+
+ pub fn toStreaming(m: @This()) @This() {
+ return switch (m) {
+ .positional, .streaming => .streaming,
+ .positional_reading, .streaming_reading => .streaming_reading,
+ .failure => .failure,
+ };
+ }
+
+ pub fn toReading(m: @This()) @This() {
+ return switch (m) {
+ .positional, .positional_reading => .positional_reading,
+ .streaming, .streaming_reading => .streaming_reading,
+ .failure => .failure,
+ };
+ }
+ };
+
+ pub fn initInterface(buffer: []u8) std.io.Reader {
+ return .{
+ .vtable = &.{
+ .stream = Reader.stream,
+ .discard = Reader.discard,
+ },
+ .buffer = buffer,
+ .seek = 0,
+ .end = 0,
+ };
+ }
+
+ pub fn init(file: File, buffer: []u8) Reader {
+ return .{
+ .file = file,
+ .interface = initInterface(buffer),
+ };
+ }
+
+ pub fn initSize(file: File, buffer: []u8, size: ?u64) Reader {
+ return .{
+ .file = file,
+ .interface = initInterface(buffer),
+ .size = size,
+ };
+ }
+
+ pub fn initMode(file: File, buffer: []u8, init_mode: Reader.Mode) Reader {
+ return .{
+ .file = file,
+ .interface = initInterface(buffer),
+ .mode = init_mode,
+ };
+ }
+
+ pub fn getSize(r: *Reader) GetEndPosError!u64 {
+ return r.size orelse {
+ if (r.size_err) |err| return err;
+ if (r.file.getEndPos()) |size| {
+ r.size = size;
+ return size;
+ } else |err| {
+ r.size_err = err;
+ return err;
+ }
+ };
+ }
+
+ pub fn seekBy(r: *Reader, offset: i64) Reader.SeekError!void {
+ switch (r.mode) {
+ .positional, .positional_reading => {
+ // TODO: make += operator allow any integer types
+ r.pos = @intCast(@as(i64, @intCast(r.pos)) + offset);
+ },
+ .streaming, .streaming_reading => {
+ const seek_err = r.seek_err orelse e: {
+ if (posix.lseek_CUR(r.file.handle, offset)) |_| {
+ // TODO: make += operator allow any integer types
+ r.pos = @intCast(@as(i64, @intCast(r.pos)) + offset);
+ return;
+ } else |err| {
+ r.seek_err = err;
+ break :e err;
+ }
+ };
+ var remaining = std.math.cast(u64, offset) orelse return seek_err;
+ while (remaining > 0) {
+ const n = discard(&r.interface, .limited(remaining)) catch |err| {
+ r.seek_err = err;
+ return err;
+ };
+ r.pos += n;
+ remaining -= n;
+ }
+ },
+ .failure => return r.seek_err.?,
+ }
+ }
+
+ pub fn seekTo(r: *Reader, offset: u64) Reader.SeekError!void {
+ switch (r.mode) {
+ .positional, .positional_reading => {
+ r.pos = offset;
+ },
+ .streaming, .streaming_reading => {
+ if (offset >= r.pos) return Reader.seekBy(r, offset - r.pos);
+ if (r.seek_err) |err| return err;
+ posix.lseek_SET(r.file.handle, offset) catch |err| {
+ r.seek_err = err;
+ return err;
+ };
+ r.pos = offset;
+ },
+ .failure => return r.seek_err.?,
+ }
+ }
+
+ /// Number of slices to store on the stack, when trying to send as many byte
+ /// vectors through the underlying read calls as possible.
+ const max_buffers_len = 16;
+
+ fn stream(io_reader: *std.io.Reader, w: *std.io.Writer, limit: std.io.Limit) std.io.Reader.StreamError!usize {
+ const r: *Reader = @fieldParentPtr("interface", io_reader);
+ switch (r.mode) {
+ .positional, .streaming => return w.sendFile(r, limit) catch |write_err| switch (write_err) {
+ error.Unimplemented => {
+ r.mode = r.mode.toReading();
+ return 0;
+ },
+ else => |e| return e,
+ },
+ .positional_reading => {
+ if (is_windows) {
+ // Unfortunately, `ReadFileScatter` cannot be used since it
+ // requires page alignment.
+ const dest = limit.slice(try w.writableSliceGreedy(1));
+ const n = try readPositional(r, dest);
+ w.advance(n);
+ return n;
+ }
+ var iovecs_buffer: [max_buffers_len]posix.iovec = undefined;
+ const dest = try w.writableVectorPosix(&iovecs_buffer, limit);
+ assert(dest[0].len > 0);
+ const n = posix.preadv(r.file.handle, dest, r.pos) catch |err| switch (err) {
+ error.Unseekable => {
+ r.mode = r.mode.toStreaming();
+ if (r.pos != 0) r.seekBy(@intCast(r.pos)) catch {
+ r.mode = .failure;
+ return error.ReadFailed;
+ };
+ return 0;
+ },
+ else => |e| {
+ r.err = e;
+ return error.ReadFailed;
+ },
+ };
+ if (n == 0) {
+ r.size = r.pos;
+ return error.EndOfStream;
+ }
+ r.pos += n;
+ return n;
+ },
+ .streaming_reading => {
+ if (is_windows) {
+ // Unfortunately, `ReadFileScatter` cannot be used since it
+ // requires page alignment.
+ const dest = limit.slice(try w.writableSliceGreedy(1));
+ const n = try readStreaming(r, dest);
+ w.advance(n);
+ return n;
+ }
+ var iovecs_buffer: [max_buffers_len]posix.iovec = undefined;
+ const dest = try w.writableVectorPosix(&iovecs_buffer, limit);
+ assert(dest[0].len > 0);
+ const n = posix.readv(r.file.handle, dest) catch |err| {
+ r.err = err;
+ return error.ReadFailed;
+ };
+ if (n == 0) {
+ r.size = r.pos;
+ return error.EndOfStream;
+ }
+ r.pos += n;
+ return n;
+ },
+ .failure => return error.ReadFailed,
+ }
+ }
+
+ fn discard(io_reader: *std.io.Reader, limit: std.io.Limit) std.io.Reader.Error!usize {
+ const r: *Reader = @fieldParentPtr("interface", io_reader);
+ const file = r.file;
+ const pos = r.pos;
+ switch (r.mode) {
+ .positional, .positional_reading => {
+ const size = r.size orelse {
+ if (file.getEndPos()) |size| {
+ r.size = size;
+ } else |err| {
+ r.size_err = err;
+ r.mode = r.mode.toStreaming();
+ }
+ return 0;
+ };
+ const delta = @min(@intFromEnum(limit), size - pos);
+ r.pos = pos + delta;
+ return delta;
+ },
+ .streaming, .streaming_reading => {
+ // Unfortunately we can't seek forward without knowing the
+ // size because the seek syscalls provided to us will not
+ // return the true end position if a seek would exceed the
+ // end.
+ fallback: {
+ if (r.size_err == null and r.seek_err == null) break :fallback;
+ var trash_buffer: [128]u8 = undefined;
+ const trash = &trash_buffer;
+ if (is_windows) {
+ const n = windows.ReadFile(file.handle, trash, null) catch |err| {
+ r.err = err;
+ return error.ReadFailed;
+ };
+ if (n == 0) {
+ r.size = pos;
+ return error.EndOfStream;
+ }
+ r.pos = pos + n;
+ return n;
+ }
+ var iovecs: [max_buffers_len]std.posix.iovec = undefined;
+ var iovecs_i: usize = 0;
+ var remaining = @intFromEnum(limit);
+ while (remaining > 0 and iovecs_i < iovecs.len) {
+ iovecs[iovecs_i] = .{ .base = trash, .len = @min(trash.len, remaining) };
+ remaining -= iovecs[iovecs_i].len;
+ iovecs_i += 1;
+ }
+ const n = posix.readv(file.handle, iovecs[0..iovecs_i]) catch |err| {
+ r.err = err;
+ return error.ReadFailed;
+ };
+ if (n == 0) {
+ r.size = pos;
+ return error.EndOfStream;
+ }
+ r.pos = pos + n;
+ return n;
+ }
+ const size = r.size orelse {
+ if (file.getEndPos()) |size| {
+ r.size = size;
+ } else |err| {
+ r.size_err = err;
+ }
+ return 0;
+ };
+ const n = @min(size - pos, std.math.maxInt(i64), @intFromEnum(limit));
+ file.seekBy(n) catch |err| {
+ r.seek_err = err;
+ return 0;
+ };
+ r.pos = pos + n;
+ return n;
+ },
+ .failure => return error.ReadFailed,
+ }
+ }
+
+ pub fn readPositional(r: *Reader, dest: []u8) std.io.Reader.Error!usize {
+ const n = r.file.pread(dest, r.pos) catch |err| switch (err) {
+ error.Unseekable => {
+ r.mode = r.mode.toStreaming();
+ if (r.pos != 0) r.seekBy(@intCast(r.pos)) catch {
+ r.mode = .failure;
+ return error.ReadFailed;
+ };
+ return 0;
+ },
+ else => |e| {
+ r.err = e;
+ return error.ReadFailed;
+ },
+ };
+ if (n == 0) {
+ r.size = r.pos;
+ return error.EndOfStream;
+ }
+ r.pos += n;
+ return n;
+ }
+
+ pub fn readStreaming(r: *Reader, dest: []u8) std.io.Reader.Error!usize {
+ const n = r.file.read(dest) catch |err| {
+ r.err = err;
+ return error.ReadFailed;
+ };
+ if (n == 0) {
+ r.size = r.pos;
+ return error.EndOfStream;
+ }
+ r.pos += n;
+ return n;
+ }
+
+ pub fn read(r: *Reader, dest: []u8) std.io.Reader.Error!usize {
+ switch (r.mode) {
+ .positional, .positional_reading => return readPositional(r, dest),
+ .streaming, .streaming_reading => return readStreaming(r, dest),
+ .failure => return error.ReadFailed,
+ }
+ }
+
+ pub fn atEnd(r: *Reader) bool {
+ // Even if stat fails, size is set when end is encountered.
+ const size = r.size orelse return false;
+ return size - r.pos == 0;
+ }
+};
+
+pub const Writer = struct {
+ file: File,
+ err: ?WriteError = null,
+ mode: Writer.Mode = .positional,
+ pos: u64 = 0,
+ sendfile_err: ?SendfileError = null,
+ copy_file_range_err: ?CopyFileRangeError = null,
+ fcopyfile_err: ?FcopyfileError = null,
+ seek_err: ?SeekError = null,
+ interface: std.io.Writer,
+
+ pub const Mode = Reader.Mode;
+
+ pub const SendfileError = error{
+ UnsupportedOperation,
+ SystemResources,
+ InputOutput,
+ BrokenPipe,
+ WouldBlock,
+ Unexpected,
+ };
+
+ pub const CopyFileRangeError = std.os.freebsd.CopyFileRangeError || std.os.linux.wrapped.CopyFileRangeError;
+
+ pub const FcopyfileError = error{
+ OperationNotSupported,
+ OutOfMemory,
+ Unexpected,
+ };
+
+ /// Number of slices to store on the stack, when trying to send as many byte
+ /// vectors through the underlying write calls as possible.
+ const max_buffers_len = 16;
+
+ pub fn init(file: File, buffer: []u8) Writer {
+ return initMode(file, buffer, .positional);
+ }
+
+ pub fn initMode(file: File, buffer: []u8, init_mode: Writer.Mode) Writer {
+ return .{
+ .file = file,
+ .interface = initInterface(buffer),
+ .mode = init_mode,
+ };
+ }
+
+ pub fn initInterface(buffer: []u8) std.io.Writer {
+ return .{
+ .vtable = &.{
+ .drain = drain,
+ .sendFile = sendFile,
+ },
+ .buffer = buffer,
+ };
+ }
+
+ pub fn moveToReader(w: *Writer) Reader {
+ defer w.* = undefined;
+ return .{
+ .file = w.file,
+ .mode = w.mode,
+ .pos = w.pos,
+ .seek_err = w.seek_err,
+ };
+ }
+
+ pub fn drain(io_writer: *std.io.Writer, data: []const []const u8, splat: usize) std.io.Writer.Error!usize {
+ const w: *Writer = @fieldParentPtr("interface", io_writer);
+ const handle = w.file.handle;
+ const buffered = io_writer.buffered();
+ var splat_buffer: [256]u8 = undefined;
+ if (is_windows) {
+ var i: usize = 0;
+ while (i < buffered.len) {
+ const n = windows.WriteFile(handle, buffered[i..], null) catch |err| {
+ w.err = err;
+ w.pos += i;
+ _ = io_writer.consume(i);
+ return error.WriteFailed;
+ };
+ i += n;
+ if (data.len > 0 and buffered.len - i < n) {
+ w.pos += i;
+ return io_writer.consume(i);
+ }
+ }
+ if (i != 0 or data.len == 0 or (data.len == 1 and splat == 0)) {
+ w.pos += i;
+ return io_writer.consume(i);
+ }
+ const n = windows.WriteFile(handle, data[0], null) catch |err| {
+ w.err = err;
+ return 0;
+ };
+ w.pos += n;
+ return n;
+ }
+ if (data.len == 0) {
+ var i: usize = 0;
+ while (i < buffered.len) {
+ i += std.posix.write(handle, buffered) catch |err| {
+ w.err = err;
+ w.pos += i;
+ _ = io_writer.consume(i);
+ return error.WriteFailed;
+ };
+ }
+ w.pos += i;
+ return io_writer.consumeAll();
+ }
+ var iovecs: [max_buffers_len]std.posix.iovec_const = undefined;
+ var len: usize = 0;
+ if (buffered.len > 0) {
+ iovecs[len] = .{ .base = buffered.ptr, .len = buffered.len };
+ len += 1;
+ }
+ for (data) |d| {
+ if (d.len == 0) continue;
+ if (iovecs.len - len == 0) break;
+ iovecs[len] = .{ .base = d.ptr, .len = d.len };
+ len += 1;
+ }
+ switch (splat) {
+ 0 => if (data[data.len - 1].len != 0) {
+ len -= 1;
+ },
+ 1 => {},
+ else => switch (data[data.len - 1].len) {
+ 0 => {},
+ 1 => {
+ const memset_len = @min(splat_buffer.len, splat);
+ const buf = splat_buffer[0..memset_len];
+ @memset(buf, data[data.len - 1][0]);
+ iovecs[len - 1] = .{ .base = buf.ptr, .len = buf.len };
+ var remaining_splat = splat - buf.len;
+ while (remaining_splat > splat_buffer.len and len < iovecs.len) {
+ iovecs[len] = .{ .base = &splat_buffer, .len = splat_buffer.len };
+ remaining_splat -= splat_buffer.len;
+ len += 1;
+ }
+ if (remaining_splat > 0 and len < iovecs.len) {
+ iovecs[len] = .{ .base = &splat_buffer, .len = remaining_splat };
+ len += 1;
+ }
+ return std.posix.writev(handle, iovecs[0..len]) catch |err| {
+ w.err = err;
+ return error.WriteFailed;
+ };
+ },
+ else => for (0..splat - 1) |_| {
+ if (iovecs.len - len == 0) break;
+ iovecs[len] = .{ .base = data[data.len - 1].ptr, .len = data[data.len - 1].len };
+ len += 1;
+ },
+ },
+ }
+ const n = std.posix.writev(handle, iovecs[0..len]) catch |err| {
+ w.err = err;
+ return error.WriteFailed;
+ };
+ w.pos += n;
+ return io_writer.consume(n);
+ }
+
+ pub fn sendFile(
+ io_writer: *std.io.Writer,
+ file_reader: *Reader,
+ limit: std.io.Limit,
+ ) std.io.Writer.FileError!usize {
+ const w: *Writer = @fieldParentPtr("interface", io_writer);
+ const out_fd = w.file.handle;
+ const in_fd = file_reader.file.handle;
+ // TODO try using copy_file_range on FreeBSD
+ // TODO try using sendfile on macOS
+ // TODO try using sendfile on FreeBSD
+ if (native_os == .linux and w.mode == .streaming) sf: {
+ // Try using sendfile on Linux.
+ if (w.sendfile_err != null) break :sf;
+ // Linux sendfile does not support headers.
+ const buffered = limit.slice(file_reader.interface.buffer);
+ if (io_writer.end != 0 or buffered.len != 0) return drain(io_writer, &.{buffered}, 1);
+ const max_count = 0x7ffff000; // Avoid EINVAL.
+ var off: std.os.linux.off_t = undefined;
+ const off_ptr: ?*std.os.linux.off_t, const count: usize = switch (file_reader.mode) {
+ .positional => o: {
+ const size = file_reader.size orelse {
+ if (file_reader.file.getEndPos()) |size| {
+ file_reader.size = size;
+ } else |err| {
+ file_reader.size_err = err;
+ file_reader.mode = .streaming;
+ }
+ return 0;
+ };
+ off = std.math.cast(std.os.linux.off_t, file_reader.pos) orelse return error.ReadFailed;
+ break :o .{ &off, @min(@intFromEnum(limit), size - file_reader.pos, max_count) };
+ },
+ .streaming => .{ null, limit.minInt(max_count) },
+ .streaming_reading, .positional_reading => break :sf,
+ .failure => return error.ReadFailed,
+ };
+ const n = std.os.linux.wrapped.sendfile(out_fd, in_fd, off_ptr, count) catch |err| switch (err) {
+ error.Unseekable => {
+ file_reader.mode = file_reader.mode.toStreaming();
+ if (file_reader.pos != 0) file_reader.seekBy(@intCast(file_reader.pos)) catch {
+ file_reader.mode = .failure;
+ return error.ReadFailed;
+ };
+ return 0;
+ },
+ else => |e| {
+ w.sendfile_err = e;
+ return 0;
+ },
+ };
+ if (n == 0) {
+ file_reader.size = file_reader.pos;
+ return error.EndOfStream;
+ }
+ file_reader.pos += n;
+ w.pos += n;
+ return n;
+ }
+ const copy_file_range_fn = switch (native_os) {
+ .freebsd => std.os.freebsd.copy_file_range,
+ .linux => if (std.c.versionCheck(.{ .major = 2, .minor = 27, .patch = 0 })) std.os.linux.wrapped.copy_file_range else null,
+ else => null,
+ };
+ if (copy_file_range_fn) |copy_file_range| cfr: {
+ if (w.copy_file_range_err != null) break :cfr;
+ const buffered = limit.slice(file_reader.interface.buffer);
+ if (io_writer.end != 0 or buffered.len != 0) return drain(io_writer, &.{buffered}, 1);
+ var off_in: i64 = undefined;
+ var off_out: i64 = undefined;
+ const off_in_ptr: ?*i64 = switch (file_reader.mode) {
+ .positional_reading, .streaming_reading => return error.Unimplemented,
+ .positional => p: {
+ off_in = file_reader.pos;
+ break :p &off_in;
+ },
+ .streaming => null,
+ .failure => return error.WriteFailed,
+ };
+ const off_out_ptr: ?*i64 = switch (w.mode) {
+ .positional_reading, .streaming_reading => return error.Unimplemented,
+ .positional => p: {
+ off_out = w.pos;
+ break :p &off_out;
+ },
+ .streaming => null,
+ .failure => return error.WriteFailed,
+ };
+ const n = copy_file_range(in_fd, off_in_ptr, out_fd, off_out_ptr, @intFromEnum(limit), 0) catch |err| {
+ w.copy_file_range_err = err;
+ return 0;
+ };
+ if (n == 0) {
+ file_reader.size = file_reader.pos;
+ return error.EndOfStream;
+ }
+ file_reader.pos += n;
+ w.pos += n;
+ return n;
+ }
+
+ if (builtin.os.tag.isDarwin()) fcf: {
+ if (w.fcopyfile_err != null) break :fcf;
+ if (file_reader.pos != 0) break :fcf;
+ if (w.pos != 0) break :fcf;
+ if (limit != .unlimited) break :fcf;
+ const rc = std.c.fcopyfile(in_fd, out_fd, null, .{ .DATA = true });
+ switch (posix.errno(rc)) {
+ .SUCCESS => {},
+ .INVAL => if (builtin.mode == .Debug) @panic("invalid API usage") else {
+ w.fcopyfile_err = error.Unexpected;
+ return 0;
+ },
+ .NOMEM => {
+ w.fcopyfile_err = error.OutOfMemory;
+ return 0;
+ },
+ .OPNOTSUPP => {
+ w.fcopyfile_err = error.OperationNotSupported;
+ return 0;
+ },
+ else => |err| {
+ w.fcopyfile_err = posix.unexpectedErrno(err);
+ return 0;
+ },
+ }
+ const n = if (file_reader.size) |size| size else @panic("TODO figure out how much copied");
+ file_reader.pos = n;
+ w.pos = n;
+ return n;
+ }
+
+ return error.Unimplemented;
+ }
+
+ pub fn seekTo(w: *Writer, offset: u64) SeekError!void {
+ if (w.seek_err) |err| return err;
+ switch (w.mode) {
+ .positional, .positional_reading => {
+ w.pos = offset;
+ },
+ .streaming, .streaming_reading => {
+ posix.lseek_SET(w.file.handle, offset) catch |err| {
+ w.seek_err = err;
+ return err;
+ };
+ },
+ }
+ }
+};
+
+/// Defaults to positional reading; falls back to streaming.
+///
+/// Positional is more threadsafe, since the global seek position is not
+/// affected.
+pub fn reader(file: File, buffer: []u8) Reader {
+ return .init(file, buffer);
+}
+
+/// Positional is more threadsafe, since the global seek position is not
+/// affected, but when such syscalls are not available, preemptively choosing
+/// `Reader.Mode.streaming` will skip a failed syscall.
+pub fn readerStreaming(file: File) Reader {
+ return .{
+ .file = file,
+ .mode = .streaming,
+ .seek_err = error.Unseekable,
+ };
+}
+
+/// Defaults to positional reading; falls back to streaming.
+///
+/// Positional is more threadsafe, since the global seek position is not
+/// affected.
+pub fn writer(file: File, buffer: []u8) Writer {
+ return .init(file, buffer);
+}
+
+/// Positional is more threadsafe, since the global seek position is not
+/// affected, but when such syscalls are not available, preemptively choosing
+/// `Writer.Mode.streaming` will skip a failed syscall.
+pub fn writerStreaming(file: File, buffer: []u8) Writer {
+ return .initMode(file, buffer, .streaming);
+}
+
const range_off: windows.LARGE_INTEGER = 0;
const range_len: windows.LARGE_INTEGER = 1;
@@ -1769,18 +2093,3 @@ pub fn downgradeLock(file: File) LockError!void {
};
}
}
-
-const File = @This();
-const std = @import("../std.zig");
-const builtin = @import("builtin");
-const Allocator = std.mem.Allocator;
-const posix = std.posix;
-const io = std.io;
-const math = std.math;
-const assert = std.debug.assert;
-const linux = std.os.linux;
-const windows = std.os.windows;
-const Os = std.builtin.Os;
-const maxInt = std.math.maxInt;
-const is_windows = builtin.os.tag == .windows;
-const Alignment = std.mem.Alignment;
lib/std/fs/path.zig
@@ -146,14 +146,11 @@ pub fn joinZ(allocator: Allocator, paths: []const []const u8) ![:0]u8 {
return out[0 .. out.len - 1 :0];
}
-pub fn fmtJoin(paths: []const []const u8) std.fmt.Formatter(formatJoin) {
+pub fn fmtJoin(paths: []const []const u8) std.fmt.Formatter([]const []const u8, formatJoin) {
return .{ .data = paths };
}
-fn formatJoin(paths: []const []const u8, comptime fmt: []const u8, options: std.fmt.FormatOptions, w: anytype) !void {
- _ = fmt;
- _ = options;
-
+fn formatJoin(paths: []const []const u8, w: *std.io.Writer) std.io.Writer.Error!void {
const first_path_idx = for (paths, 0..) |p, idx| {
if (p.len != 0) break idx;
} else return;
lib/std/fs/test.zig
@@ -1798,11 +1798,11 @@ test "walker" {
var num_walked: usize = 0;
while (try walker.next()) |entry| {
testing.expect(expected_basenames.has(entry.basename)) catch |err| {
- std.debug.print("found unexpected basename: {s}\n", .{std.fmt.fmtSliceEscapeLower(entry.basename)});
+ std.debug.print("found unexpected basename: {f}\n", .{std.ascii.hexEscape(entry.basename, .lower)});
return err;
};
testing.expect(expected_paths.has(entry.path)) catch |err| {
- std.debug.print("found unexpected path: {s}\n", .{std.fmt.fmtSliceEscapeLower(entry.path)});
+ std.debug.print("found unexpected path: {f}\n", .{std.ascii.hexEscape(entry.path, .lower)});
return err;
};
// make sure that the entry.dir is the containing dir
@@ -1953,113 +1953,6 @@ test "chown" {
try dir.chown(null, null);
}
-test "File.Metadata" {
- var tmp = tmpDir(.{});
- defer tmp.cleanup();
-
- const file = try tmp.dir.createFile("test_file", .{ .read = true });
- defer file.close();
-
- const metadata = try file.metadata();
- try testing.expectEqual(File.Kind.file, metadata.kind());
- try testing.expectEqual(@as(u64, 0), metadata.size());
- _ = metadata.accessed();
- _ = metadata.modified();
- _ = metadata.created();
-}
-
-test "File.Permissions" {
- if (native_os == .wasi)
- return error.SkipZigTest;
-
- var tmp = tmpDir(.{});
- defer tmp.cleanup();
-
- const file = try tmp.dir.createFile("test_file", .{ .read = true });
- defer file.close();
-
- const metadata = try file.metadata();
- var permissions = metadata.permissions();
-
- try testing.expect(!permissions.readOnly());
- permissions.setReadOnly(true);
- try testing.expect(permissions.readOnly());
-
- try file.setPermissions(permissions);
- const new_permissions = (try file.metadata()).permissions();
- try testing.expect(new_permissions.readOnly());
-
- // Must be set to non-read-only to delete
- permissions.setReadOnly(false);
- try file.setPermissions(permissions);
-}
-
-test "File.PermissionsUnix" {
- if (native_os == .windows or native_os == .wasi)
- return error.SkipZigTest;
-
- var tmp = tmpDir(.{});
- defer tmp.cleanup();
-
- const file = try tmp.dir.createFile("test_file", .{ .mode = 0o666, .read = true });
- defer file.close();
-
- const metadata = try file.metadata();
- var permissions = metadata.permissions();
-
- permissions.setReadOnly(true);
- try testing.expect(permissions.readOnly());
- try testing.expect(!permissions.inner.unixHas(.user, .write));
- permissions.inner.unixSet(.user, .{ .write = true });
- try testing.expect(!permissions.readOnly());
- try testing.expect(permissions.inner.unixHas(.user, .write));
- try testing.expect(permissions.inner.mode & 0o400 != 0);
-
- permissions.setReadOnly(true);
- try file.setPermissions(permissions);
- permissions = (try file.metadata()).permissions();
- try testing.expect(permissions.readOnly());
-
- // Must be set to non-read-only to delete
- permissions.setReadOnly(false);
- try file.setPermissions(permissions);
-
- const permissions_unix = File.PermissionsUnix.unixNew(0o754);
- try testing.expect(permissions_unix.unixHas(.user, .execute));
- try testing.expect(!permissions_unix.unixHas(.other, .execute));
-}
-
-test "delete a read-only file on windows" {
- if (native_os != .windows)
- return error.SkipZigTest;
-
- var tmp = testing.tmpDir(.{});
- defer tmp.cleanup();
-
- const file = try tmp.dir.createFile("test_file", .{ .read = true });
- defer file.close();
- // Create a file and make it read-only
- const metadata = try file.metadata();
- var permissions = metadata.permissions();
- permissions.setReadOnly(true);
- try file.setPermissions(permissions);
-
- // If the OS and filesystem support it, POSIX_SEMANTICS and IGNORE_READONLY_ATTRIBUTE
- // is used meaning that the deletion of a read-only file will succeed.
- // Otherwise, this delete will fail and the read-only flag must be unset before it's
- // able to be deleted.
- const delete_result = tmp.dir.deleteFile("test_file");
- if (delete_result) {
- try testing.expectError(error.FileNotFound, tmp.dir.deleteFile("test_file"));
- } else |err| {
- try testing.expectEqual(@as(anyerror, error.AccessDenied), err);
- // Now make the file not read-only
- permissions.setReadOnly(false);
- try file.setPermissions(permissions);
- try tmp.dir.deleteFile("test_file");
- }
-}
-
test "delete a setAsCwd directory on Windows" {
if (native_os != .windows) return error.SkipZigTest;
lib/std/hash/benchmark.zig
@@ -346,7 +346,7 @@ fn mode(comptime x: comptime_int) comptime_int {
}
pub fn main() !void {
- const stdout = std.fs.File.stdout().writer();
+ const stdout = std.fs.File.stdout().deprecatedWriter();
var buffer: [1024]u8 = undefined;
var fixed = std.heap.FixedBufferAllocator.init(buffer[0..]);
lib/std/heap/debug_allocator.zig
@@ -436,7 +436,7 @@ pub fn DebugAllocator(comptime config: Config) type {
const stack_trace = bucketStackTrace(bucket, slot_count, slot_index, .alloc);
const page_addr = @intFromPtr(bucket) & ~(page_size - 1);
const addr = page_addr + slot_index * size_class;
- log.err("memory address 0x{x} leaked: {}", .{ addr, stack_trace });
+ log.err("memory address 0x{x} leaked: {f}", .{ addr, stack_trace });
leaks = true;
}
}
@@ -463,7 +463,7 @@ pub fn DebugAllocator(comptime config: Config) type {
while (it.next()) |large_alloc| {
if (config.retain_metadata and large_alloc.freed) continue;
const stack_trace = large_alloc.getStackTrace(.alloc);
- log.err("memory address 0x{x} leaked: {}", .{
+ log.err("memory address 0x{x} leaked: {f}", .{
@intFromPtr(large_alloc.bytes.ptr), stack_trace,
});
leaks = true;
@@ -522,7 +522,7 @@ pub fn DebugAllocator(comptime config: Config) type {
.index = 0,
};
std.debug.captureStackTrace(ret_addr, &second_free_stack_trace);
- log.err("Double free detected. Allocation: {} First free: {} Second free: {}", .{
+ log.err("Double free detected. Allocation: {f} First free: {f} Second free: {f}", .{
alloc_stack_trace, free_stack_trace, second_free_stack_trace,
});
}
@@ -568,7 +568,7 @@ pub fn DebugAllocator(comptime config: Config) type {
.index = 0,
};
std.debug.captureStackTrace(ret_addr, &free_stack_trace);
- log.err("Allocation size {d} bytes does not match free size {d}. Allocation: {} Free: {}", .{
+ log.err("Allocation size {d} bytes does not match free size {d}. Allocation: {f} Free: {f}", .{
entry.value_ptr.bytes.len,
old_mem.len,
entry.value_ptr.getStackTrace(.alloc),
@@ -678,7 +678,7 @@ pub fn DebugAllocator(comptime config: Config) type {
.index = 0,
};
std.debug.captureStackTrace(ret_addr, &free_stack_trace);
- log.err("Allocation size {d} bytes does not match free size {d}. Allocation: {} Free: {}", .{
+ log.err("Allocation size {d} bytes does not match free size {d}. Allocation: {f} Free: {f}", .{
entry.value_ptr.bytes.len,
old_mem.len,
entry.value_ptr.getStackTrace(.alloc),
@@ -907,7 +907,7 @@ pub fn DebugAllocator(comptime config: Config) type {
};
std.debug.captureStackTrace(return_address, &free_stack_trace);
if (old_memory.len != requested_size) {
- log.err("Allocation size {d} bytes does not match free size {d}. Allocation: {} Free: {}", .{
+ log.err("Allocation size {d} bytes does not match free size {d}. Allocation: {f} Free: {f}", .{
requested_size,
old_memory.len,
bucketStackTrace(bucket, slot_count, slot_index, .alloc),
@@ -915,7 +915,7 @@ pub fn DebugAllocator(comptime config: Config) type {
});
}
if (alignment != slot_alignment) {
- log.err("Allocation alignment {d} does not match free alignment {d}. Allocation: {} Free: {}", .{
+ log.err("Allocation alignment {d} does not match free alignment {d}. Allocation: {f} Free: {f}", .{
slot_alignment.toByteUnits(),
alignment.toByteUnits(),
bucketStackTrace(bucket, slot_count, slot_index, .alloc),
@@ -1006,7 +1006,7 @@ pub fn DebugAllocator(comptime config: Config) type {
};
std.debug.captureStackTrace(return_address, &free_stack_trace);
if (memory.len != requested_size) {
- log.err("Allocation size {d} bytes does not match free size {d}. Allocation: {} Free: {}", .{
+ log.err("Allocation size {d} bytes does not match free size {d}. Allocation: {f} Free: {f}", .{
requested_size,
memory.len,
bucketStackTrace(bucket, slot_count, slot_index, .alloc),
@@ -1014,7 +1014,7 @@ pub fn DebugAllocator(comptime config: Config) type {
});
}
if (alignment != slot_alignment) {
- log.err("Allocation alignment {d} does not match free alignment {d}. Allocation: {} Free: {}", .{
+ log.err("Allocation alignment {d} does not match free alignment {d}. Allocation: {f} Free: {f}", .{
slot_alignment.toByteUnits(),
alignment.toByteUnits(),
bucketStackTrace(bucket, slot_count, slot_index, .alloc),
@@ -1054,7 +1054,7 @@ const TraceKind = enum {
free,
};
-const test_config = Config{};
+const test_config: Config = .{};
test "small allocations - free in same order" {
var gpa = DebugAllocator(test_config){};
lib/std/http/Client.zig
@@ -823,21 +823,28 @@ pub const Request = struct {
return error.UnsupportedTransferEncoding;
const connection = req.connection.?;
- const w = connection.writer();
+ var connection_writer_adapter = connection.writer().adaptToNewApi();
+ const w = &connection_writer_adapter.new_interface;
+ sendAdapted(req, connection, w) catch |err| switch (err) {
+ error.WriteFailed => return connection_writer_adapter.err.?,
+ else => |e| return e,
+ };
+ }
- try req.method.write(w);
+ fn sendAdapted(req: *Request, connection: *Connection, w: *std.io.Writer) !void {
+ try req.method.format(w, "");
try w.writeByte(' ');
if (req.method == .CONNECT) {
- try req.uri.writeToStream(.{ .authority = true }, w);
+ try req.uri.writeToStream(w, .{ .authority = true });
} else {
- try req.uri.writeToStream(.{
+ try req.uri.writeToStream(w, .{
.scheme = connection.proxied,
.authentication = connection.proxied,
.authority = connection.proxied,
.path = true,
.query = true,
- }, w);
+ });
}
try w.writeByte(' ');
try w.writeAll(@tagName(req.version));
@@ -845,7 +852,7 @@ pub const Request = struct {
if (try emitOverridableHeader("host: ", req.headers.host, w)) {
try w.writeAll("host: ");
- try req.uri.writeToStream(.{ .authority = true }, w);
+ try req.uri.writeToStream(w, .{ .authority = true });
try w.writeAll("\r\n");
}
@@ -1284,10 +1291,10 @@ pub const basic_authorization = struct {
pub fn valueLengthFromUri(uri: Uri) usize {
var stream = std.io.countingWriter(std.io.null_writer);
- try stream.writer().print("{user}", .{uri.user orelse Uri.Component.empty});
+ try stream.writer().print("{fuser}", .{uri.user orelse Uri.Component.empty});
const user_len = stream.bytes_written;
stream.bytes_written = 0;
- try stream.writer().print("{password}", .{uri.password orelse Uri.Component.empty});
+ try stream.writer().print("{fpassword}", .{uri.password orelse Uri.Component.empty});
const password_len = stream.bytes_written;
return valueLength(@intCast(user_len), @intCast(password_len));
}
@@ -1295,10 +1302,10 @@ pub const basic_authorization = struct {
pub fn value(uri: Uri, out: []u8) []u8 {
var buf: [max_user_len + ":".len + max_password_len]u8 = undefined;
var stream = std.io.fixedBufferStream(&buf);
- stream.writer().print("{user}", .{uri.user orelse Uri.Component.empty}) catch
+ stream.writer().print("{fuser}", .{uri.user orelse Uri.Component.empty}) catch
unreachable;
assert(stream.pos <= max_user_len);
- stream.writer().print(":{password}", .{uri.password orelse Uri.Component.empty}) catch
+ stream.writer().print(":{fpassword}", .{uri.password orelse Uri.Component.empty}) catch
unreachable;
@memcpy(out[0..prefix.len], prefix);
lib/std/http/test.zig
@@ -385,10 +385,8 @@ test "general client/server API coverage" {
fn handleRequest(request: *http.Server.Request, listen_port: u16) !void {
const log = std.log.scoped(.server);
- log.info("{} {s} {s}", .{
- request.head.method,
- @tagName(request.head.version),
- request.head.target,
+ log.info("{f} {s} {s}", .{
+ request.head.method, @tagName(request.head.version), request.head.target,
});
const gpa = std.testing.allocator;
lib/std/io/buffered_atomic_file.zig
@@ -33,7 +33,7 @@ pub const BufferedAtomicFile = struct {
self.atomic_file = try dir.atomicFile(dest_path, atomic_file_options);
errdefer self.atomic_file.deinit();
- self.file_writer = self.atomic_file.file.writer();
+ self.file_writer = self.atomic_file.file.deprecatedWriter();
self.buffered_writer = .{ .unbuffered_writer = self.file_writer };
return self;
}
lib/std/io/DeprecatedWriter.zig
@@ -21,7 +21,7 @@ pub fn writeAll(self: Self, bytes: []const u8) anyerror!void {
}
pub fn print(self: Self, comptime format: []const u8, args: anytype) anyerror!void {
- return std.fmt.format(self, format, args);
+ return std.fmt.deprecatedFormat(self, format, args);
}
pub fn writeByte(self: Self, byte: u8) anyerror!void {
@@ -81,3 +81,29 @@ pub fn writeFile(self: Self, file: std.fs.File) anyerror!void {
if (n < buf.len) return;
}
}
+
+/// Helper for bridging to the new `Writer` API while upgrading.
+pub fn adaptToNewApi(self: *const Self) Adapter {
+ return .{
+ .derp_writer = self.*,
+ .new_interface = .{
+ .buffer = &.{},
+ .vtable = &.{ .drain = Adapter.drain },
+ },
+ };
+}
+
+pub const Adapter = struct {
+ derp_writer: Self,
+ new_interface: std.io.Writer,
+ err: ?Error = null,
+
+ fn drain(w: *std.io.Writer, data: []const []const u8, splat: usize) std.io.Writer.Error!usize {
+ _ = splat;
+ const a: *@This() = @fieldParentPtr("new_interface", w);
+ return a.derp_writer.write(data[0]) catch |err| {
+ a.err = err;
+ return error.WriteFailed;
+ };
+ }
+};
lib/std/io/Reader.zig
@@ -26,7 +26,8 @@ pub const VTable = struct {
/// Returns the number of bytes written, which will be at minimum `0` and
/// at most `limit`. The number returned, including zero, does not indicate
/// end of stream. `limit` is guaranteed to be at least as large as the
- /// buffer capacity of `w`.
+ /// buffer capacity of `w`, a value whose minimum size is determined by the
+ /// stream implementation.
///
/// The reader's internal logical seek position moves forward in accordance
/// with the number of bytes returned from this function.
@@ -1243,10 +1244,10 @@ test peekArray {
test discardAll {
var r: Reader = .fixed("foobar");
- try r.discard(3);
+ try r.discardAll(3);
try testing.expectEqualStrings("bar", try r.take(3));
- try r.discard(0);
- try testing.expectError(error.EndOfStream, r.discard(1));
+ try r.discardAll(0);
+ try testing.expectError(error.EndOfStream, r.discardAll(1));
}
test discardRemaining {
@@ -1355,9 +1356,11 @@ test readVec {
test "expected error.EndOfStream" {
// Unit test inspired by https://github.com/ziglang/zig/issues/17733
- var r: std.io.Reader = .fixed("");
- try std.testing.expectError(error.EndOfStream, r.readEnum(enum(u8) { a, b }, .little));
- try std.testing.expectError(error.EndOfStream, r.isBytes("foo"));
+ var buffer: [3]u8 = undefined;
+ var r: std.io.Reader = .fixed(&buffer);
+ r.end = 0; // capacity 3, but empty
+ try std.testing.expectError(error.EndOfStream, r.takeEnum(enum(u8) { a, b }, .little));
+ try std.testing.expectError(error.EndOfStream, r.take(3));
}
fn endingStream(r: *Reader, w: *Writer, limit: Limit) StreamError!usize {
@@ -1389,21 +1392,30 @@ fn failingDiscard(r: *Reader, limit: Limit) Error!usize {
test "readAlloc when the backing reader provides one byte at a time" {
const OneByteReader = struct {
str: []const u8,
- curr: usize,
-
- fn read(self: *@This(), dest: []u8) usize {
- if (self.str.len <= self.curr or dest.len == 0)
- return 0;
-
- dest[0] = self.str[self.curr];
- self.curr += 1;
+ i: usize,
+ reader: Reader,
+
+ fn stream(r: *Reader, w: *Writer, limit: Limit) StreamError!usize {
+ assert(@intFromEnum(limit) >= 1);
+ const self: *@This() = @fieldParentPtr("reader", r);
+ if (self.str.len - self.i == 0) return error.EndOfStream;
+ try w.writeByte(self.str[self.i]);
+ self.i += 1;
return 1;
}
};
-
const str = "This is a test";
- var one_byte_stream: OneByteReader = .init(str);
- const res = try one_byte_stream.reader().streamReadAlloc(std.testing.allocator, str.len + 1);
+ var one_byte_stream: OneByteReader = .{
+ .str = str,
+ .i = 0,
+ .reader = .{
+ .buffer = &.{},
+ .vtable = &.{ .stream = OneByteReader.stream },
+ .seek = 0,
+ .end = 0,
+ },
+ };
+ const res = try one_byte_stream.reader.allocRemaining(std.testing.allocator, .unlimited);
defer std.testing.allocator.free(res);
try std.testing.expectEqualStrings(str, res);
}
lib/std/io/test.zig
@@ -24,7 +24,7 @@ test "write a file, read it, then delete it" {
var file = try tmp.dir.createFile(tmp_file_name, .{});
defer file.close();
- var buf_stream = io.bufferedWriter(file.writer());
+ var buf_stream = io.bufferedWriter(file.deprecatedWriter());
const st = buf_stream.writer();
try st.print("begin", .{});
try st.writeAll(data[0..]);
@@ -45,7 +45,7 @@ test "write a file, read it, then delete it" {
const expected_file_size: u64 = "begin".len + data.len + "end".len;
try expectEqual(expected_file_size, file_size);
- var buf_stream = io.bufferedReader(file.reader());
+ var buf_stream = io.bufferedReader(file.deprecatedReader());
const st = buf_stream.reader();
const contents = try st.readAllAlloc(std.testing.allocator, 2 * 1024);
defer std.testing.allocator.free(contents);
@@ -66,7 +66,7 @@ test "BitStreams with File Stream" {
var file = try tmp.dir.createFile(tmp_file_name, .{});
defer file.close();
- var bit_stream = io.bitWriter(native_endian, file.writer());
+ var bit_stream = io.bitWriter(native_endian, file.deprecatedWriter());
try bit_stream.writeBits(@as(u2, 1), 1);
try bit_stream.writeBits(@as(u5, 2), 2);
@@ -80,7 +80,7 @@ test "BitStreams with File Stream" {
var file = try tmp.dir.openFile(tmp_file_name, .{});
defer file.close();
- var bit_stream = io.bitReader(native_endian, file.reader());
+ var bit_stream = io.bitReader(native_endian, file.deprecatedReader());
var out_bits: u16 = undefined;
lib/std/io/Writer.zig
@@ -37,6 +37,10 @@ pub const VTable = struct {
/// The last element of `data` is repeated as necessary so that it is
/// written `splat` number of times, which may be zero.
///
+ /// This function may not be called if the data to be written could have
+ /// been stored in `buffer` instead, including when the amount of data to
+ /// be written is zero and the buffer capacity is zero.
+ ///
/// Number of bytes consumed from `data` is returned, excluding bytes from
/// `buffer`.
///
@@ -800,18 +804,13 @@ pub fn printValue(
) Error!void {
const T = @TypeOf(value);
- if (comptime std.mem.eql(u8, fmt, "*")) {
- return w.printAddress(value);
- }
+ if (comptime std.mem.eql(u8, fmt, "*")) return w.printAddress(value);
+ if (fmt.len > 0 and fmt[0] == 'f') return value.format(w, fmt[1..]);
const is_any = comptime std.mem.eql(u8, fmt, ANY);
- if (!is_any and std.meta.hasMethod(T, "format")) {
- if (fmt.len > 0 and fmt[0] == 'f') {
- return value.format(w, fmt[1..]);
- } else if (fmt.len == 0) {
- // after 0.15.0 is tagged, delete the hasMethod condition and this compile error
- @compileError("ambiguous format string; specify {f} to call format method, or {any} to skip it");
- }
+ if (!is_any and std.meta.hasMethod(T, "format") and fmt.len == 0) {
+ // after 0.15.0 is tagged, delete this compile error and its condition
+ @compileError("ambiguous format string; specify {f} to call format method, or {any} to skip it");
}
switch (@typeInfo(T)) {
@@ -952,9 +951,8 @@ pub fn printValue(
},
.pointer => |ptr_info| switch (ptr_info.size) {
.one => switch (@typeInfo(ptr_info.child)) {
- .array, .@"enum", .@"union", .@"struct" => {
- return w.printValue(fmt, options, value.*, max_depth);
- },
+ .array => |array_info| return w.printValue(fmt, options, @as([]const array_info.child, value), max_depth),
+ .@"enum", .@"union", .@"struct" => return w.printValue(fmt, options, value.*, max_depth),
else => {
var buffers: [2][]const u8 = .{ @typeName(ptr_info.child), "@" };
try w.writeVecAll(&buffers);
@@ -1120,7 +1118,12 @@ pub fn printAscii(w: *Writer, bytes: []const u8, options: std.fmt.Options) Error
pub fn printUnicodeCodepoint(w: *Writer, c: u21, options: std.fmt.Options) Error!void {
var buf: [4]u8 = undefined;
- const len = try std.unicode.utf8Encode(c, &buf);
+ const len = std.unicode.utf8Encode(c, &buf) catch |err| switch (err) {
+ error.Utf8CannotEncodeSurrogateHalf, error.CodepointTooLarge => l: {
+ buf[0..3].* = std.unicode.replacement_character_utf8;
+ break :l 3;
+ },
+ };
return w.alignBufferOptions(buf[0..len], options);
}
@@ -1553,13 +1556,7 @@ test "formatValue max_depth" {
x: f32,
y: f32,
- pub fn format(
- self: SelfType,
- comptime fmt: []const u8,
- options: std.fmt.Options,
- w: *Writer,
- ) Error!void {
- _ = options;
+ pub fn format(self: SelfType, w: *Writer, comptime fmt: []const u8) Error!void {
if (fmt.len == 0) {
return w.print("({d:.3},{d:.3})", .{ self.x, self.y });
} else {
@@ -1600,131 +1597,131 @@ test "formatValue max_depth" {
try w.printValue("", .{}, inst, 0);
try testing.expectEqualStrings("io.Writer.test.printValue max_depth.S{ ... }", w.buffered());
- w.reset();
+ w = .fixed(&buf);
try w.printValue("", .{}, inst, 1);
try testing.expectEqualStrings("io.Writer.test.printValue max_depth.S{ .a = io.Writer.test.printValue max_depth.S{ ... }, .tu = io.Writer.test.printValue max_depth.TU{ ... }, .e = io.Writer.test.printValue max_depth.E.Two, .vec = (10.200,2.220) }", w.buffered());
- w.reset();
+ w = .fixed(&buf);
try w.printValue("", .{}, inst, 2);
try testing.expectEqualStrings("io.Writer.test.printValue max_depth.S{ .a = io.Writer.test.printValue max_depth.S{ .a = io.Writer.test.printValue max_depth.S{ ... }, .tu = io.Writer.test.printValue max_depth.TU{ ... }, .e = io.Writer.test.printValue max_depth.E.Two, .vec = (10.200,2.220) }, .tu = io.Writer.test.printValue max_depth.TU{ .ptr = io.Writer.test.printValue max_depth.TU{ ... } }, .e = io.Writer.test.printValue max_depth.E.Two, .vec = (10.200,2.220) }", w.buffered());
- w.reset();
+ w = .fixed(&buf);
try w.printValue("", .{}, inst, 3);
try testing.expectEqualStrings("io.Writer.test.printValue max_depth.S{ .a = io.Writer.test.printValue max_depth.S{ .a = io.Writer.test.printValue max_depth.S{ .a = io.Writer.test.printValue max_depth.S{ ... }, .tu = io.Writer.test.printValue max_depth.TU{ ... }, .e = io.Writer.test.printValue max_depth.E.Two, .vec = (10.200,2.220) }, .tu = io.Writer.test.printValue max_depth.TU{ .ptr = io.Writer.test.printValue max_depth.TU{ ... } }, .e = io.Writer.test.printValue max_depth.E.Two, .vec = (10.200,2.220) }, .tu = io.Writer.test.printValue max_depth.TU{ .ptr = io.Writer.test.printValue max_depth.TU{ .ptr = io.Writer.test.printValue max_depth.TU{ ... } } }, .e = io.Writer.test.printValue max_depth.E.Two, .vec = (10.200,2.220) }", w.buffered());
const vec: @Vector(4, i32) = .{ 1, 2, 3, 4 };
- w.reset();
+ w = .fixed(&buf);
try w.printValue("", .{}, vec, 0);
try testing.expectEqualStrings("{ ... }", w.buffered());
- w.reset();
+ w = .fixed(&buf);
try w.printValue("", .{}, vec, 1);
try testing.expectEqualStrings("{ 1, 2, 3, 4 }", w.buffered());
}
test printDuration {
- testDurationCase("0ns", 0);
- testDurationCase("1ns", 1);
- testDurationCase("999ns", std.time.ns_per_us - 1);
- testDurationCase("1us", std.time.ns_per_us);
- testDurationCase("1.45us", 1450);
- testDurationCase("1.5us", 3 * std.time.ns_per_us / 2);
- testDurationCase("14.5us", 14500);
- testDurationCase("145us", 145000);
- testDurationCase("999.999us", std.time.ns_per_ms - 1);
- testDurationCase("1ms", std.time.ns_per_ms + 1);
- testDurationCase("1.5ms", 3 * std.time.ns_per_ms / 2);
- testDurationCase("1.11ms", 1110000);
- testDurationCase("1.111ms", 1111000);
- testDurationCase("1.111ms", 1111100);
- testDurationCase("999.999ms", std.time.ns_per_s - 1);
- testDurationCase("1s", std.time.ns_per_s);
- testDurationCase("59.999s", std.time.ns_per_min - 1);
- testDurationCase("1m", std.time.ns_per_min);
- testDurationCase("1h", std.time.ns_per_hour);
- testDurationCase("1d", std.time.ns_per_day);
- testDurationCase("1w", std.time.ns_per_week);
- testDurationCase("1y", 365 * std.time.ns_per_day);
- testDurationCase("1y52w23h59m59.999s", 730 * std.time.ns_per_day - 1); // 365d = 52w1
- testDurationCase("1y1h1.001s", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + std.time.ns_per_ms);
- testDurationCase("1y1h1s", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + 999 * std.time.ns_per_us);
- testDurationCase("1y1h999.999us", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms - 1);
- testDurationCase("1y1h1ms", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms);
- testDurationCase("1y1h1ms", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms + 1);
- testDurationCase("1y1m999ns", 365 * std.time.ns_per_day + std.time.ns_per_min + 999);
- testDurationCase("584y49w23h34m33.709s", std.math.maxInt(u64));
-
- testing.expectFmt("=======0ns", "{D:=>10}", .{0});
- testing.expectFmt("1ns=======", "{D:=<10}", .{1});
- testing.expectFmt(" 999ns ", "{D:^10}", .{std.time.ns_per_us - 1});
+ try testDurationCase("0ns", 0);
+ try testDurationCase("1ns", 1);
+ try testDurationCase("999ns", std.time.ns_per_us - 1);
+ try testDurationCase("1us", std.time.ns_per_us);
+ try testDurationCase("1.45us", 1450);
+ try testDurationCase("1.5us", 3 * std.time.ns_per_us / 2);
+ try testDurationCase("14.5us", 14500);
+ try testDurationCase("145us", 145000);
+ try testDurationCase("999.999us", std.time.ns_per_ms - 1);
+ try testDurationCase("1ms", std.time.ns_per_ms + 1);
+ try testDurationCase("1.5ms", 3 * std.time.ns_per_ms / 2);
+ try testDurationCase("1.11ms", 1110000);
+ try testDurationCase("1.111ms", 1111000);
+ try testDurationCase("1.111ms", 1111100);
+ try testDurationCase("999.999ms", std.time.ns_per_s - 1);
+ try testDurationCase("1s", std.time.ns_per_s);
+ try testDurationCase("59.999s", std.time.ns_per_min - 1);
+ try testDurationCase("1m", std.time.ns_per_min);
+ try testDurationCase("1h", std.time.ns_per_hour);
+ try testDurationCase("1d", std.time.ns_per_day);
+ try testDurationCase("1w", std.time.ns_per_week);
+ try testDurationCase("1y", 365 * std.time.ns_per_day);
+ try testDurationCase("1y52w23h59m59.999s", 730 * std.time.ns_per_day - 1); // 365d = 52w1
+ try testDurationCase("1y1h1.001s", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + std.time.ns_per_ms);
+ try testDurationCase("1y1h1s", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + 999 * std.time.ns_per_us);
+ try testDurationCase("1y1h999.999us", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms - 1);
+ try testDurationCase("1y1h1ms", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms);
+ try testDurationCase("1y1h1ms", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms + 1);
+ try testDurationCase("1y1m999ns", 365 * std.time.ns_per_day + std.time.ns_per_min + 999);
+ try testDurationCase("584y49w23h34m33.709s", std.math.maxInt(u64));
+
+ try testing.expectFmt("=======0ns", "{D:=>10}", .{0});
+ try testing.expectFmt("1ns=======", "{D:=<10}", .{1});
+ try testing.expectFmt(" 999ns ", "{D:^10}", .{std.time.ns_per_us - 1});
}
test printDurationSigned {
- testDurationCaseSigned("0ns", 0);
- testDurationCaseSigned("1ns", 1);
- testDurationCaseSigned("-1ns", -(1));
- testDurationCaseSigned("999ns", std.time.ns_per_us - 1);
- testDurationCaseSigned("-999ns", -(std.time.ns_per_us - 1));
- testDurationCaseSigned("1us", std.time.ns_per_us);
- testDurationCaseSigned("-1us", -(std.time.ns_per_us));
- testDurationCaseSigned("1.45us", 1450);
- testDurationCaseSigned("-1.45us", -(1450));
- testDurationCaseSigned("1.5us", 3 * std.time.ns_per_us / 2);
- testDurationCaseSigned("-1.5us", -(3 * std.time.ns_per_us / 2));
- testDurationCaseSigned("14.5us", 14500);
- testDurationCaseSigned("-14.5us", -(14500));
- testDurationCaseSigned("145us", 145000);
- testDurationCaseSigned("-145us", -(145000));
- testDurationCaseSigned("999.999us", std.time.ns_per_ms - 1);
- testDurationCaseSigned("-999.999us", -(std.time.ns_per_ms - 1));
- testDurationCaseSigned("1ms", std.time.ns_per_ms + 1);
- testDurationCaseSigned("-1ms", -(std.time.ns_per_ms + 1));
- testDurationCaseSigned("1.5ms", 3 * std.time.ns_per_ms / 2);
- testDurationCaseSigned("-1.5ms", -(3 * std.time.ns_per_ms / 2));
- testDurationCaseSigned("1.11ms", 1110000);
- testDurationCaseSigned("-1.11ms", -(1110000));
- testDurationCaseSigned("1.111ms", 1111000);
- testDurationCaseSigned("-1.111ms", -(1111000));
- testDurationCaseSigned("1.111ms", 1111100);
- testDurationCaseSigned("-1.111ms", -(1111100));
- testDurationCaseSigned("999.999ms", std.time.ns_per_s - 1);
- testDurationCaseSigned("-999.999ms", -(std.time.ns_per_s - 1));
- testDurationCaseSigned("1s", std.time.ns_per_s);
- testDurationCaseSigned("-1s", -(std.time.ns_per_s));
- testDurationCaseSigned("59.999s", std.time.ns_per_min - 1);
- testDurationCaseSigned("-59.999s", -(std.time.ns_per_min - 1));
- testDurationCaseSigned("1m", std.time.ns_per_min);
- testDurationCaseSigned("-1m", -(std.time.ns_per_min));
- testDurationCaseSigned("1h", std.time.ns_per_hour);
- testDurationCaseSigned("-1h", -(std.time.ns_per_hour));
- testDurationCaseSigned("1d", std.time.ns_per_day);
- testDurationCaseSigned("-1d", -(std.time.ns_per_day));
- testDurationCaseSigned("1w", std.time.ns_per_week);
- testDurationCaseSigned("-1w", -(std.time.ns_per_week));
- testDurationCaseSigned("1y", 365 * std.time.ns_per_day);
- testDurationCaseSigned("-1y", -(365 * std.time.ns_per_day));
- testDurationCaseSigned("1y52w23h59m59.999s", 730 * std.time.ns_per_day - 1); // 365d = 52w1d
- testDurationCaseSigned("-1y52w23h59m59.999s", -(730 * std.time.ns_per_day - 1)); // 365d = 52w1d
- testDurationCaseSigned("1y1h1.001s", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + std.time.ns_per_ms);
- testDurationCaseSigned("-1y1h1.001s", -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + std.time.ns_per_ms));
- testDurationCaseSigned("1y1h1s", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + 999 * std.time.ns_per_us);
- testDurationCaseSigned("-1y1h1s", -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + 999 * std.time.ns_per_us));
- testDurationCaseSigned("1y1h999.999us", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms - 1);
- testDurationCaseSigned("-1y1h999.999us", -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms - 1));
- testDurationCaseSigned("1y1h1ms", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms);
- testDurationCaseSigned("-1y1h1ms", -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms));
- testDurationCaseSigned("1y1h1ms", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms + 1);
- testDurationCaseSigned("-1y1h1ms", -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms + 1));
- testDurationCaseSigned("1y1m999ns", 365 * std.time.ns_per_day + std.time.ns_per_min + 999);
- testDurationCaseSigned("-1y1m999ns", -(365 * std.time.ns_per_day + std.time.ns_per_min + 999));
- testDurationCaseSigned("292y24w3d23h47m16.854s", std.math.maxInt(i64));
- testDurationCaseSigned("-292y24w3d23h47m16.854s", std.math.minInt(i64) + 1);
- testDurationCaseSigned("-292y24w3d23h47m16.854s", std.math.minInt(i64));
-
- testing.expectFmt("=======0ns", "{s:=>10}", .{0});
- testing.expectFmt("1ns=======", "{s:=<10}", .{1});
- testing.expectFmt("-1ns======", "{s:=<10}", .{-(1)});
- testing.expectFmt(" -999ns ", "{s:^10}", .{-(std.time.ns_per_us - 1)});
+ try testDurationCaseSigned("0ns", 0);
+ try testDurationCaseSigned("1ns", 1);
+ try testDurationCaseSigned("-1ns", -(1));
+ try testDurationCaseSigned("999ns", std.time.ns_per_us - 1);
+ try testDurationCaseSigned("-999ns", -(std.time.ns_per_us - 1));
+ try testDurationCaseSigned("1us", std.time.ns_per_us);
+ try testDurationCaseSigned("-1us", -(std.time.ns_per_us));
+ try testDurationCaseSigned("1.45us", 1450);
+ try testDurationCaseSigned("-1.45us", -(1450));
+ try testDurationCaseSigned("1.5us", 3 * std.time.ns_per_us / 2);
+ try testDurationCaseSigned("-1.5us", -(3 * std.time.ns_per_us / 2));
+ try testDurationCaseSigned("14.5us", 14500);
+ try testDurationCaseSigned("-14.5us", -(14500));
+ try testDurationCaseSigned("145us", 145000);
+ try testDurationCaseSigned("-145us", -(145000));
+ try testDurationCaseSigned("999.999us", std.time.ns_per_ms - 1);
+ try testDurationCaseSigned("-999.999us", -(std.time.ns_per_ms - 1));
+ try testDurationCaseSigned("1ms", std.time.ns_per_ms + 1);
+ try testDurationCaseSigned("-1ms", -(std.time.ns_per_ms + 1));
+ try testDurationCaseSigned("1.5ms", 3 * std.time.ns_per_ms / 2);
+ try testDurationCaseSigned("-1.5ms", -(3 * std.time.ns_per_ms / 2));
+ try testDurationCaseSigned("1.11ms", 1110000);
+ try testDurationCaseSigned("-1.11ms", -(1110000));
+ try testDurationCaseSigned("1.111ms", 1111000);
+ try testDurationCaseSigned("-1.111ms", -(1111000));
+ try testDurationCaseSigned("1.111ms", 1111100);
+ try testDurationCaseSigned("-1.111ms", -(1111100));
+ try testDurationCaseSigned("999.999ms", std.time.ns_per_s - 1);
+ try testDurationCaseSigned("-999.999ms", -(std.time.ns_per_s - 1));
+ try testDurationCaseSigned("1s", std.time.ns_per_s);
+ try testDurationCaseSigned("-1s", -(std.time.ns_per_s));
+ try testDurationCaseSigned("59.999s", std.time.ns_per_min - 1);
+ try testDurationCaseSigned("-59.999s", -(std.time.ns_per_min - 1));
+ try testDurationCaseSigned("1m", std.time.ns_per_min);
+ try testDurationCaseSigned("-1m", -(std.time.ns_per_min));
+ try testDurationCaseSigned("1h", std.time.ns_per_hour);
+ try testDurationCaseSigned("-1h", -(std.time.ns_per_hour));
+ try testDurationCaseSigned("1d", std.time.ns_per_day);
+ try testDurationCaseSigned("-1d", -(std.time.ns_per_day));
+ try testDurationCaseSigned("1w", std.time.ns_per_week);
+ try testDurationCaseSigned("-1w", -(std.time.ns_per_week));
+ try testDurationCaseSigned("1y", 365 * std.time.ns_per_day);
+ try testDurationCaseSigned("-1y", -(365 * std.time.ns_per_day));
+ try testDurationCaseSigned("1y52w23h59m59.999s", 730 * std.time.ns_per_day - 1); // 365d = 52w1d
+ try testDurationCaseSigned("-1y52w23h59m59.999s", -(730 * std.time.ns_per_day - 1)); // 365d = 52w1d
+ try testDurationCaseSigned("1y1h1.001s", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + std.time.ns_per_ms);
+ try testDurationCaseSigned("-1y1h1.001s", -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + std.time.ns_per_ms));
+ try testDurationCaseSigned("1y1h1s", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + 999 * std.time.ns_per_us);
+ try testDurationCaseSigned("-1y1h1s", -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + 999 * std.time.ns_per_us));
+ try testDurationCaseSigned("1y1h999.999us", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms - 1);
+ try testDurationCaseSigned("-1y1h999.999us", -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms - 1));
+ try testDurationCaseSigned("1y1h1ms", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms);
+ try testDurationCaseSigned("-1y1h1ms", -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms));
+ try testDurationCaseSigned("1y1h1ms", 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms + 1);
+ try testDurationCaseSigned("-1y1h1ms", -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms + 1));
+ try testDurationCaseSigned("1y1m999ns", 365 * std.time.ns_per_day + std.time.ns_per_min + 999);
+ try testDurationCaseSigned("-1y1m999ns", -(365 * std.time.ns_per_day + std.time.ns_per_min + 999));
+ try testDurationCaseSigned("292y24w3d23h47m16.854s", std.math.maxInt(i64));
+ try testDurationCaseSigned("-292y24w3d23h47m16.854s", std.math.minInt(i64) + 1);
+ try testDurationCaseSigned("-292y24w3d23h47m16.854s", std.math.minInt(i64));
+
+ try testing.expectFmt("=======0ns", "{D:=>10}", .{0});
+ try testing.expectFmt("1ns=======", "{D:=<10}", .{1});
+ try testing.expectFmt("-1ns======", "{D:=<10}", .{-(1)});
+ try testing.expectFmt(" -999ns ", "{D:^10}", .{-(std.time.ns_per_us - 1)});
}
fn testDurationCase(expected: []const u8, input: u64) !void {
@@ -1762,7 +1759,7 @@ test printIntOptions {
test "printInt with comptime_int" {
var buf: [20]u8 = undefined;
var w: Writer = .fixed(&buf);
- try w.printInt(@as(comptime_int, 123456789123456789), "", .{});
+ try w.printInt("", .{}, @as(comptime_int, 123456789123456789));
try std.testing.expectEqualStrings("123456789123456789", w.buffered());
}
@@ -1777,7 +1774,7 @@ test "printFloat with comptime_float" {
fn testPrintIntCase(expected: []const u8, value: anytype, base: u8, case: std.fmt.Case, options: std.fmt.Options) !void {
var buffer: [100]u8 = undefined;
var w: Writer = .fixed(&buffer);
- w.printIntOptions(value, base, case, options);
+ try w.printIntOptions(value, base, case, options);
try testing.expectEqualStrings(expected, w.buffered());
}
@@ -1832,17 +1829,15 @@ test "fixed output" {
try w.writeAll("world");
try testing.expect(std.mem.eql(u8, w.buffered(), "Helloworld"));
- try testing.expectError(error.WriteStreamEnd, w.writeAll("!"));
+ try testing.expectError(error.WriteFailed, w.writeAll("!"));
try testing.expect(std.mem.eql(u8, w.buffered(), "Helloworld"));
- w.reset();
+ w = .fixed(&buffer);
+
try testing.expect(w.buffered().len == 0);
- try testing.expectError(error.WriteStreamEnd, w.writeAll("Hello world!"));
+ try testing.expectError(error.WriteFailed, w.writeAll("Hello world!"));
try testing.expect(std.mem.eql(u8, w.buffered(), "Hello worl"));
-
- try w.seekTo((try w.getEndPos()) + 1);
- try testing.expectError(error.WriteStreamEnd, w.writeAll("H"));
}
pub fn failingDrain(w: *Writer, data: []const []const u8, splat: usize) Error!usize {
lib/std/json/dynamic.zig
@@ -56,7 +56,7 @@ pub const Value = union(enum) {
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
stringify(self, .{}, stderr) catch return;
}
lib/std/json/fmt.zig
@@ -1,4 +1,5 @@
-const std = @import("std");
+const std = @import("../std.zig");
+const assert = std.debug.assert;
const stringify = @import("stringify.zig").stringify;
const StringifyOptions = @import("stringify.zig").StringifyOptions;
@@ -14,14 +15,8 @@ pub fn Formatter(comptime T: type) type {
value: T,
options: StringifyOptions,
- pub fn format(
- self: @This(),
- comptime fmt_spec: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
- _ = fmt_spec;
- _ = options;
+ pub fn format(self: @This(), writer: *std.io.Writer, comptime f: []const u8) std.io.Writer.Error!void {
+ comptime assert(f.len == 0);
try stringify(self.value, self.options, writer);
}
};
lib/std/json/stringify.zig
@@ -689,7 +689,8 @@ fn outputUnicodeEscape(codepoint: u21, out_stream: anytype) !void {
// then it may be represented as a six-character sequence: a reverse solidus, followed
// by the lowercase letter u, followed by four hexadecimal digits that encode the character's code point.
try out_stream.writeAll("\\u");
- try std.fmt.formatIntValue(codepoint, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
+ //try w.printInt("x", .{ .width = 4, .fill = '0' }, codepoint);
+ try std.fmt.deprecatedFormat(out_stream, "{x:0>4}", .{codepoint});
} else {
assert(codepoint <= 0x10FFFF);
// To escape an extended character that is not in the Basic Multilingual Plane,
@@ -697,9 +698,11 @@ fn outputUnicodeEscape(codepoint: u21, out_stream: anytype) !void {
const high = @as(u16, @intCast((codepoint - 0x10000) >> 10)) + 0xD800;
const low = @as(u16, @intCast(codepoint & 0x3FF)) + 0xDC00;
try out_stream.writeAll("\\u");
- try std.fmt.formatIntValue(high, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
+ //try w.printInt("x", .{ .width = 4, .fill = '0' }, high);
+ try std.fmt.deprecatedFormat(out_stream, "{x:0>4}", .{high});
try out_stream.writeAll("\\u");
- try std.fmt.formatIntValue(low, "x", std.fmt.FormatOptions{ .width = 4, .fill = '0' }, out_stream);
+ //try w.printInt("x", .{ .width = 4, .fill = '0' }, low);
+ try std.fmt.deprecatedFormat(out_stream, "{x:0>4}", .{low});
}
}
lib/std/math/big/int.zig
@@ -2322,13 +2322,7 @@ pub const Const = struct {
/// this function will fail to print the string, printing "(BigInt)" instead of a number.
/// This is because the rendering algorithm requires reversing a string, which requires O(N) memory.
/// See `toString` and `toStringAlloc` for a way to print big integers without failure.
- pub fn format(
- self: Const,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- out_stream: anytype,
- ) !void {
- _ = options;
+ pub fn format(self: Const, w: *std.io.Writer, comptime fmt: []const u8) std.io.Writer.Error!void {
comptime var base = 10;
comptime var case: std.fmt.Case = .lower;
@@ -2350,7 +2344,7 @@ pub const Const = struct {
const available_len = 64;
if (self.limbs.len > available_len)
- return out_stream.writeAll("(BigInt)");
+ return w.writeAll("(BigInt)");
var limbs: [calcToStringLimbsBufferLen(available_len, base)]Limb = undefined;
@@ -2360,7 +2354,7 @@ pub const Const = struct {
};
var buf: [biggest.sizeInBaseUpperBound(base)]u8 = undefined;
const len = self.toString(&buf, base, case, &limbs);
- return out_stream.writeAll(buf[0..len]);
+ return w.writeAll(buf[0..len]);
}
/// Converts self to a string in the requested base.
@@ -2934,13 +2928,8 @@ pub const Managed = struct {
/// this function will fail to print the string, printing "(BigInt)" instead of a number.
/// This is because the rendering algorithm requires reversing a string, which requires O(N) memory.
/// See `toString` and `toStringAlloc` for a way to print big integers without failure.
- pub fn format(
- self: Managed,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- out_stream: anytype,
- ) !void {
- return self.toConst().format(fmt, options, out_stream);
+ pub fn format(self: Managed, w: *std.io.Writer, comptime f: []const u8) std.io.Writer.Error!void {
+ return self.toConst().format(w, f);
}
/// Returns math.Order.lt, math.Order.eq, math.Order.gt if |a| < |b|, |a| ==
lib/std/math/big/int_test.zig
@@ -3813,10 +3813,10 @@ test "(BigInt) positive" {
try a.pow(&a, 64 * @sizeOf(Limb) * 8);
try b.sub(&a, &c);
- const a_fmt = try std.fmt.allocPrintZ(testing.allocator, "{d}", .{a});
+ const a_fmt = try std.fmt.allocPrintSentinel(testing.allocator, "{fd}", .{a}, 0);
defer testing.allocator.free(a_fmt);
- const b_fmt = try std.fmt.allocPrintZ(testing.allocator, "{d}", .{b});
+ const b_fmt = try std.fmt.allocPrintSentinel(testing.allocator, "{fd}", .{b}, 0);
defer testing.allocator.free(b_fmt);
try testing.expect(mem.eql(u8, a_fmt, "(BigInt)"));
@@ -3838,10 +3838,10 @@ test "(BigInt) negative" {
a.negate();
try b.add(&a, &c);
- const a_fmt = try std.fmt.allocPrintZ(testing.allocator, "{d}", .{a});
+ const a_fmt = try std.fmt.allocPrintSentinel(testing.allocator, "{fd}", .{a}, 0);
defer testing.allocator.free(a_fmt);
- const b_fmt = try std.fmt.allocPrintZ(testing.allocator, "{d}", .{b});
+ const b_fmt = try std.fmt.allocPrintSentinel(testing.allocator, "{fd}", .{b}, 0);
defer testing.allocator.free(b_fmt);
try testing.expect(mem.eql(u8, a_fmt, "(BigInt)"));
lib/std/net/test.zig
@@ -7,18 +7,12 @@ const testing = std.testing;
test "parse and render IP addresses at comptime" {
if (builtin.os.tag == .wasi) return error.SkipZigTest;
comptime {
- var ipAddrBuffer: [16]u8 = undefined;
- // Parses IPv6 at comptime
const ipv6addr = net.Address.parseIp("::1", 0) catch unreachable;
- var ipv6 = std.fmt.bufPrint(ipAddrBuffer[0..], "{}", .{ipv6addr}) catch unreachable;
- try std.testing.expect(std.mem.eql(u8, "::1", ipv6[1 .. ipv6.len - 3]));
+ try std.testing.expectFmt("[::1]:0", "{f}", .{ipv6addr});
- // Parses IPv4 at comptime
const ipv4addr = net.Address.parseIp("127.0.0.1", 0) catch unreachable;
- var ipv4 = std.fmt.bufPrint(ipAddrBuffer[0..], "{}", .{ipv4addr}) catch unreachable;
- try std.testing.expect(std.mem.eql(u8, "127.0.0.1", ipv4[0 .. ipv4.len - 2]));
+ try std.testing.expectFmt("127.0.0.1:0", "{f}", .{ipv4addr});
- // Returns error for invalid IP addresses at comptime
try testing.expectError(error.InvalidIPAddressFormat, net.Address.parseIp("::123.123.123.123", 0));
try testing.expectError(error.InvalidIPAddressFormat, net.Address.parseIp("127.01.0.1", 0));
try testing.expectError(error.InvalidIPAddressFormat, net.Address.resolveIp("::123.123.123.123", 0));
@@ -28,13 +22,8 @@ test "parse and render IP addresses at comptime" {
test "format IPv6 address with no zero runs" {
if (builtin.os.tag == .wasi) return error.SkipZigTest;
-
const addr = try std.net.Address.parseIp6("2001:db8:1:2:3:4:5:6", 0);
-
- var buffer: [50]u8 = undefined;
- const result = std.fmt.bufPrint(buffer[0..], "{}", .{addr}) catch unreachable;
-
- try std.testing.expectEqualStrings("[2001:db8:1:2:3:4:5:6]:0", result);
+ try std.testing.expectFmt("[2001:db8:1:2:3:4:5:6]:0", "{f}", .{addr});
}
test "parse IPv6 addresses and check compressed form" {
@@ -111,12 +100,12 @@ test "parse and render IPv6 addresses" {
};
for (ips, 0..) |ip, i| {
const addr = net.Address.parseIp6(ip, 0) catch unreachable;
- var newIp = std.fmt.bufPrint(buffer[0..], "{}", .{addr}) catch unreachable;
+ var newIp = std.fmt.bufPrint(buffer[0..], "{f}", .{addr}) catch unreachable;
try std.testing.expect(std.mem.eql(u8, printed[i], newIp[1 .. newIp.len - 3]));
if (builtin.os.tag == .linux) {
const addr_via_resolve = net.Address.resolveIp6(ip, 0) catch unreachable;
- var newResolvedIp = std.fmt.bufPrint(buffer[0..], "{}", .{addr_via_resolve}) catch unreachable;
+ var newResolvedIp = std.fmt.bufPrint(buffer[0..], "{f}", .{addr_via_resolve}) catch unreachable;
try std.testing.expect(std.mem.eql(u8, printed[i], newResolvedIp[1 .. newResolvedIp.len - 3]));
}
}
@@ -159,7 +148,7 @@ test "parse and render IPv4 addresses" {
"127.0.0.1",
}) |ip| {
const addr = net.Address.parseIp4(ip, 0) catch unreachable;
- var newIp = std.fmt.bufPrint(buffer[0..], "{}", .{addr}) catch unreachable;
+ var newIp = std.fmt.bufPrint(buffer[0..], "{f}", .{addr}) catch unreachable;
try std.testing.expect(std.mem.eql(u8, ip, newIp[0 .. newIp.len - 2]));
}
@@ -175,10 +164,8 @@ test "parse and render UNIX addresses" {
if (builtin.os.tag == .wasi) return error.SkipZigTest;
if (!net.has_unix_sockets) return error.SkipZigTest;
- var buffer: [14]u8 = undefined;
const addr = net.Address.initUnix("/tmp/testpath") catch unreachable;
- const fmt_addr = std.fmt.bufPrint(buffer[0..], "{}", .{addr}) catch unreachable;
- try std.testing.expectEqualSlices(u8, "/tmp/testpath", fmt_addr);
+ try std.testing.expectFmt("/tmp/testpath", "{f}", .{addr});
const too_long = [_]u8{'a'} ** 200;
try testing.expectError(error.NameTooLong, net.Address.initUnix(too_long[0..]));
lib/std/os/uefi/protocol/file.zig
@@ -79,30 +79,6 @@ pub const File = extern struct {
VolumeFull,
};
- pub const SeekableStream = io.SeekableStream(
- *File,
- SeekError,
- SeekError,
- setPosition,
- seekBy,
- getPosition,
- getEndPos,
- );
- pub const Reader = io.GenericReader(*File, ReadError, read);
- pub const Writer = io.GenericWriter(*File, WriteError, write);
-
- pub fn seekableStream(self: *File) SeekableStream {
- return .{ .context = self };
- }
-
- pub fn reader(self: *File) Reader {
- return .{ .context = self };
- }
-
- pub fn writer(self: *File) Writer {
- return .{ .context = self };
- }
-
pub fn open(
self: *const File,
file_name: [*:0]const u16,
lib/std/os/windows/ws2_32.zig
@@ -1829,7 +1829,7 @@ pub extern "ws2_32" fn sendto(
buf: [*]const u8,
len: i32,
flags: i32,
- to: *const sockaddr,
+ to: ?*const sockaddr,
tolen: i32,
) callconv(.winapi) i32;
@@ -2116,14 +2116,6 @@ pub extern "ws2_32" fn WSASendMsg(
lpCompletionRoutine: ?LPWSAOVERLAPPED_COMPLETION_ROUTINE,
) callconv(.winapi) i32;
-pub extern "ws2_32" fn WSARecvMsg(
- s: SOCKET,
- lpMsg: *WSAMSG,
- lpdwNumberOfBytesRecv: ?*u32,
- lpOverlapped: ?*OVERLAPPED,
- lpCompletionRoutine: ?LPWSAOVERLAPPED_COMPLETION_ROUTINE,
-) callconv(.winapi) i32;
-
pub extern "ws2_32" fn WSASendDisconnect(
s: SOCKET,
lpOutboundDisconnectData: ?*WSABUF,
lib/std/os/freebsd.zig
@@ -0,0 +1,49 @@
+const std = @import("../std.zig");
+const fd_t = std.c.fd_t;
+const off_t = std.c.off_t;
+const unexpectedErrno = std.posix.unexpectedErrno;
+const errno = std.posix.errno;
+
+pub const CopyFileRangeError = error{
+ /// If infd is not open for reading or outfd is not open for writing, or
+ /// opened for writing with O_APPEND, or if infd and outfd refer to the
+ /// same file.
+ BadFileFlags,
+ /// If the copy exceeds the process's file size limit or the maximum
+ /// file size for the file system outfd re- sides on.
+ FileTooBig,
+ /// A signal interrupted the system call before it could be completed.
+ /// This may happen for files on some NFS mounts. When this happens,
+ /// the values pointed to by inoffp and outoffp are reset to the
+ /// initial values for the system call.
+ Interrupted,
+ /// One of:
+ /// * infd and outfd refer to the same file and the byte ranges overlap.
+ /// * The flags argument is not zero.
+ /// * Either infd or outfd refers to a file object that is not a regular file.
+ InvalidArguments,
+ /// An I/O error occurred while reading/writing the files.
+ InputOutput,
+ /// Corrupted data was detected while reading from a file system.
+ CorruptedData,
+ /// Either infd or outfd refers to a directory.
+ IsDir,
+ /// File system that stores outfd is full.
+ NoSpaceLeft,
+};
+
+pub fn copy_file_range(fd_in: fd_t, off_in: ?*i64, fd_out: fd_t, off_out: ?*i64, len: usize, flags: u32) CopyFileRangeError!usize {
+ const rc = std.c.copy_file_range(fd_in, off_in, fd_out, off_out, len, flags);
+ switch (errno(rc)) {
+ .SUCCESS => return @intCast(rc),
+ .BADF => return error.BadFileFlags,
+ .FBIG => return error.FileTooBig,
+ .INTR => return error.Interrupted,
+ .INVAL => return error.InvalidArguments,
+ .IO => return error.InputOutput,
+ .INTEGRITY => return error.CorruptedData,
+ .ISDIR => return error.IsDir,
+ .NOSPC => return error.NoSpaceLeft,
+ else => |err| return unexpectedErrno(err),
+ }
+}
lib/std/os/linux.zig
@@ -9420,4 +9420,132 @@ pub const msghdr_const = extern struct {
control: ?*const anyopaque,
controllen: usize,
flags: u32,
-};
\ No newline at end of file
+};
+
+/// The syscalls, but with Zig error sets, going through libc if linking libc,
+/// and with some footguns eliminated.
+pub const wrapped = struct {
+ pub const lfs64_abi = builtin.link_libc and (builtin.abi.isGnu() or builtin.abi.isAndroid());
+ const system = if (builtin.link_libc) std.c else std.os.linux;
+
+ pub const SendfileError = std.posix.UnexpectedError || error{
+ /// `out_fd` is an unconnected socket, or out_fd closed its read end.
+ BrokenPipe,
+ /// Descriptor is not valid or locked, or an mmap(2)-like operation is not available for in_fd.
+ UnsupportedOperation,
+ /// Nonblocking I/O has been selected but the write would block.
+ WouldBlock,
+ /// Unspecified error while reading from in_fd.
+ InputOutput,
+ /// Insufficient kernel memory to read from in_fd.
+ SystemResources,
+ /// `offset` is not `null` but the input file is not seekable.
+ Unseekable,
+ };
+
+ pub fn sendfile(
+ out_fd: fd_t,
+ in_fd: fd_t,
+ in_offset: ?*off_t,
+ in_len: usize,
+ ) SendfileError!usize {
+ const adjusted_len = @min(in_len, 0x7ffff000); // Prevents EOVERFLOW.
+ const sendfileSymbol = if (lfs64_abi) system.sendfile64 else system.sendfile;
+ const rc = sendfileSymbol(out_fd, in_fd, in_offset, adjusted_len);
+ switch (errno(rc)) {
+ .SUCCESS => return @intCast(rc),
+ .BADF => return invalidApiUsage(), // Always a race condition.
+ .FAULT => return invalidApiUsage(), // Segmentation fault.
+ .OVERFLOW => return unexpectedErrno(.OVERFLOW), // We avoid passing too large of a `count`.
+ .NOTCONN => return error.BrokenPipe, // `out_fd` is an unconnected socket
+ .INVAL => return error.UnsupportedOperation,
+ .AGAIN => return error.WouldBlock,
+ .IO => return error.InputOutput,
+ .PIPE => return error.BrokenPipe,
+ .NOMEM => return error.SystemResources,
+ .NXIO => return error.Unseekable,
+ .SPIPE => return error.Unseekable,
+ else => |err| return unexpectedErrno(err),
+ }
+ }
+
+ pub const CopyFileRangeError = std.posix.UnexpectedError || error{
+ /// One of:
+ /// * One or more file descriptors are not valid.
+ /// * fd_in is not open for reading; or fd_out is not open for writing.
+ /// * The O_APPEND flag is set for the open file description referred
+ /// to by the file descriptor fd_out.
+ BadFileFlags,
+ /// One of:
+ /// * An attempt was made to write at a position past the maximum file
+ /// offset the kernel supports.
+ /// * An attempt was made to write a range that exceeds the allowed
+ /// maximum file size. The maximum file size differs between
+ /// filesystem implementations and can be different from the maximum
+ /// allowed file offset.
+ /// * An attempt was made to write beyond the process's file size
+ /// resource limit. This may also result in the process receiving a
+ /// SIGXFSZ signal.
+ FileTooBig,
+ /// One of:
+ /// * either fd_in or fd_out is not a regular file
+ /// * flags argument is not zero
+ /// * fd_in and fd_out refer to the same file and the source and target ranges overlap.
+ InvalidArguments,
+ /// A low-level I/O error occurred while copying.
+ InputOutput,
+ /// Either fd_in or fd_out refers to a directory.
+ IsDir,
+ OutOfMemory,
+ /// There is not enough space on the target filesystem to complete the copy.
+ NoSpaceLeft,
+ /// (since Linux 5.19) the filesystem does not support this operation.
+ OperationNotSupported,
+ /// The requested source or destination range is too large to represent
+ /// in the specified data types.
+ Overflow,
+ /// fd_out refers to an immutable file.
+ PermissionDenied,
+ /// Either fd_in or fd_out refers to an active swap file.
+ SwapFile,
+ /// The files referred to by fd_in and fd_out are not on the same
+ /// filesystem, and the source and target filesystems are not of the
+ /// same type, or do not support cross-filesystem copy.
+ NotSameFileSystem,
+ };
+
+ pub fn copy_file_range(fd_in: fd_t, off_in: ?*i64, fd_out: fd_t, off_out: ?*i64, len: usize, flags: u32) CopyFileRangeError!usize {
+ const rc = system.copy_file_range(fd_in, off_in, fd_out, off_out, len, flags);
+ switch (errno(rc)) {
+ .SUCCESS => return @intCast(rc),
+ .BADF => return error.BadFileFlags,
+ .FBIG => return error.FileTooBig,
+ .INVAL => return error.InvalidArguments,
+ .IO => return error.InputOutput,
+ .ISDIR => return error.IsDir,
+ .NOMEM => return error.OutOfMemory,
+ .NOSPC => return error.NoSpaceLeft,
+ .OPNOTSUPP => return error.OperationNotSupported,
+ .OVERFLOW => return error.Overflow,
+ .PERM => return error.PermissionDenied,
+ .TXTBSY => return error.SwapFile,
+ .XDEV => return error.NotSameFileSystem,
+ else => |err| return unexpectedErrno(err),
+ }
+ }
+
+ const unexpectedErrno = std.posix.unexpectedErrno;
+
+ fn invalidApiUsage() error{Unexpected} {
+ if (builtin.mode == .Debug) @panic("invalid API usage");
+ return error.Unexpected;
+ }
+
+ fn errno(rc: anytype) E {
+ if (builtin.link_libc) {
+ return if (rc == -1) @enumFromInt(std.c._errno().*) else .SUCCESS;
+ } else {
+ return errnoFromSyscall(rc);
+ }
+ }
+};
lib/std/os/uefi.zig
@@ -1,4 +1,5 @@
const std = @import("../std.zig");
+const assert = std.debug.assert;
/// A protocol is an interface identified by a GUID.
pub const protocol = @import("uefi/protocol.zig");
@@ -59,31 +60,21 @@ pub const Guid = extern struct {
node: [6]u8,
/// Format GUID into hexadecimal lowercase xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx format
- pub fn format(
- self: @This(),
- comptime f: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
- _ = options;
- if (f.len == 0) {
- const fmt = std.fmt.fmtSliceHexLower;
-
- const time_low = @byteSwap(self.time_low);
- const time_mid = @byteSwap(self.time_mid);
- const time_high_and_version = @byteSwap(self.time_high_and_version);
-
- return std.fmt.format(writer, "{:0>8}-{:0>4}-{:0>4}-{:0>2}{:0>2}-{:0>12}", .{
- fmt(std.mem.asBytes(&time_low)),
- fmt(std.mem.asBytes(&time_mid)),
- fmt(std.mem.asBytes(&time_high_and_version)),
- fmt(std.mem.asBytes(&self.clock_seq_high_and_reserved)),
- fmt(std.mem.asBytes(&self.clock_seq_low)),
- fmt(std.mem.asBytes(&self.node)),
- });
- } else {
- std.fmt.invalidFmtError(f, self);
- }
+ pub fn format(self: @This(), writer: *std.io.Writer, comptime f: []const u8) std.io.Writer.Error!void {
+ comptime assert(f.len == 0);
+
+ const time_low = @byteSwap(self.time_low);
+ const time_mid = @byteSwap(self.time_mid);
+ const time_high_and_version = @byteSwap(self.time_high_and_version);
+
+ return std.fmt.format(writer, "{x:0>8}-{x:0>4}-{x:0>4}-{x:0>2}{x:0>2}-{x:0>12}", .{
+ std.mem.asBytes(&time_low),
+ std.mem.asBytes(&time_mid),
+ std.mem.asBytes(&time_high_and_version),
+ std.mem.asBytes(&self.clock_seq_high_and_reserved),
+ std.mem.asBytes(&self.clock_seq_low),
+ std.mem.asBytes(&self.node),
+ });
}
pub fn eql(a: std.os.uefi.Guid, b: std.os.uefi.Guid) bool {
lib/std/os/windows.zig
@@ -1690,40 +1690,6 @@ pub fn getpeername(s: ws2_32.SOCKET, name: *ws2_32.sockaddr, namelen: *ws2_32.so
return ws2_32.getpeername(s, name, @as(*i32, @ptrCast(namelen)));
}
-pub fn sendmsg(
- s: ws2_32.SOCKET,
- msg: *ws2_32.WSAMSG_const,
- flags: u32,
-) i32 {
- var bytes_send: DWORD = undefined;
- if (ws2_32.WSASendMsg(s, msg, flags, &bytes_send, null, null) == ws2_32.SOCKET_ERROR) {
- return ws2_32.SOCKET_ERROR;
- } else {
- return @as(i32, @as(u31, @intCast(bytes_send)));
- }
-}
-
-pub fn sendto(s: ws2_32.SOCKET, buf: [*]const u8, len: usize, flags: u32, to: ?*const ws2_32.sockaddr, to_len: ws2_32.socklen_t) i32 {
- var buffer = ws2_32.WSABUF{ .len = @as(u31, @truncate(len)), .buf = @constCast(buf) };
- var bytes_send: DWORD = undefined;
- if (ws2_32.WSASendTo(s, @as([*]ws2_32.WSABUF, @ptrCast(&buffer)), 1, &bytes_send, flags, to, @as(i32, @intCast(to_len)), null, null) == ws2_32.SOCKET_ERROR) {
- return ws2_32.SOCKET_ERROR;
- } else {
- return @as(i32, @as(u31, @intCast(bytes_send)));
- }
-}
-
-pub fn recvfrom(s: ws2_32.SOCKET, buf: [*]u8, len: usize, flags: u32, from: ?*ws2_32.sockaddr, from_len: ?*ws2_32.socklen_t) i32 {
- var buffer = ws2_32.WSABUF{ .len = @as(u31, @truncate(len)), .buf = buf };
- var bytes_received: DWORD = undefined;
- var flags_inout = flags;
- if (ws2_32.WSARecvFrom(s, @as([*]ws2_32.WSABUF, @ptrCast(&buffer)), 1, &bytes_received, &flags_inout, from, @as(?*i32, @ptrCast(from_len)), null, null) == ws2_32.SOCKET_ERROR) {
- return ws2_32.SOCKET_ERROR;
- } else {
- return @as(i32, @as(u31, @intCast(bytes_received)));
- }
-}
-
pub fn poll(fds: [*]ws2_32.pollfd, n: c_ulong, timeout: i32) i32 {
return ws2_32.WSAPoll(fds, n, timeout);
}
lib/std/posix/test.zig
@@ -667,7 +667,7 @@ test "mmap" {
const file = try tmp.dir.createFile(test_out_file, .{});
defer file.close();
- const stream = file.writer();
+ const stream = file.deprecatedWriter();
var i: u32 = 0;
while (i < alloc_size / @sizeOf(u32)) : (i += 1) {
lib/std/process/Child.zig
@@ -1004,12 +1004,12 @@ fn forkChildErrReport(fd: i32, err: ChildProcess.SpawnError) noreturn {
fn writeIntFd(fd: i32, value: ErrInt) !void {
const file: File = .{ .handle = fd };
- file.writer().writeInt(u64, @intCast(value), .little) catch return error.SystemResources;
+ file.deprecatedWriter().writeInt(u64, @intCast(value), .little) catch return error.SystemResources;
}
fn readIntFd(fd: i32) !ErrInt {
const file: File = .{ .handle = fd };
- return @intCast(file.reader().readInt(u64, .little) catch return error.SystemResources);
+ return @intCast(file.deprecatedReader().readInt(u64, .little) catch return error.SystemResources);
}
const ErrInt = std.meta.Int(.unsigned, @sizeOf(anyerror) * 8);
lib/std/Random/benchmark.zig
@@ -122,7 +122,7 @@ fn mode(comptime x: comptime_int) comptime_int {
}
pub fn main() !void {
- const stdout = std.fs.File.stdout().writer();
+ const stdout = std.fs.File.stdout().deprecatedWriter();
var buffer: [1024]u8 = undefined;
var fixed = std.heap.FixedBufferAllocator.init(buffer[0..]);
lib/std/Target/Query.zig
@@ -394,25 +394,24 @@ pub fn canDetectLibC(self: Query) bool {
/// Formats a version with the patch component omitted if it is zero,
/// unlike SemanticVersion.format which formats all its version components regardless.
-fn formatVersion(version: SemanticVersion, writer: anytype) !void {
+fn formatVersion(version: SemanticVersion, gpa: Allocator, list: *std.ArrayListUnmanaged(u8)) !void {
if (version.patch == 0) {
- try writer.print("{d}.{d}", .{ version.major, version.minor });
+ try list.print(gpa, "{d}.{d}", .{ version.major, version.minor });
} else {
- try writer.print("{d}.{d}.{d}", .{ version.major, version.minor, version.patch });
+ try list.print(gpa, "{d}.{d}.{d}", .{ version.major, version.minor, version.patch });
}
}
-pub fn zigTriple(self: Query, allocator: Allocator) Allocator.Error![]u8 {
- if (self.isNativeTriple())
- return allocator.dupe(u8, "native");
+pub fn zigTriple(self: Query, gpa: Allocator) Allocator.Error![]u8 {
+ if (self.isNativeTriple()) return gpa.dupe(u8, "native");
const arch_name = if (self.cpu_arch) |arch| @tagName(arch) else "native";
const os_name = if (self.os_tag) |os_tag| @tagName(os_tag) else "native";
- var result = std.ArrayList(u8).init(allocator);
- defer result.deinit();
+ var result: std.ArrayListUnmanaged(u8) = .empty;
+ defer result.deinit(gpa);
- try result.writer().print("{s}-{s}", .{ arch_name, os_name });
+ try result.print(gpa, "{s}-{s}", .{ arch_name, os_name });
// The zig target syntax does not allow specifying a max os version with no min, so
// if either are present, we need the min.
@@ -420,11 +419,11 @@ pub fn zigTriple(self: Query, allocator: Allocator) Allocator.Error![]u8 {
switch (min) {
.none => {},
.semver => |v| {
- try result.writer().writeAll(".");
- try formatVersion(v, result.writer());
+ try result.appendSlice(gpa, ".");
+ try formatVersion(v, gpa, &result);
},
.windows => |v| {
- try result.writer().print("{s}", .{v});
+ try result.print(gpa, "{d}", .{v});
},
}
}
@@ -432,39 +431,39 @@ pub fn zigTriple(self: Query, allocator: Allocator) Allocator.Error![]u8 {
switch (max) {
.none => {},
.semver => |v| {
- try result.writer().writeAll("...");
- try formatVersion(v, result.writer());
+ try result.appendSlice(gpa, "...");
+ try formatVersion(v, gpa, &result);
},
.windows => |v| {
// This is counting on a custom format() function defined on `WindowsVersion`
// to add a prefix '.' and make there be a total of three dots.
- try result.writer().print("..{s}", .{v});
+ try result.print(gpa, "..{d}", .{v});
},
}
}
if (self.glibc_version) |v| {
const name = if (self.abi) |abi| @tagName(abi) else "gnu";
- try result.ensureUnusedCapacity(name.len + 2);
+ try result.ensureUnusedCapacity(gpa, name.len + 2);
result.appendAssumeCapacity('-');
result.appendSliceAssumeCapacity(name);
result.appendAssumeCapacity('.');
- try formatVersion(v, result.writer());
+ try formatVersion(v, gpa, &result);
} else if (self.android_api_level) |lvl| {
const name = if (self.abi) |abi| @tagName(abi) else "android";
- try result.ensureUnusedCapacity(name.len + 2);
+ try result.ensureUnusedCapacity(gpa, name.len + 2);
result.appendAssumeCapacity('-');
result.appendSliceAssumeCapacity(name);
result.appendAssumeCapacity('.');
- try result.writer().print("{d}", .{lvl});
+ try result.print(gpa, "{d}", .{lvl});
} else if (self.abi) |abi| {
const name = @tagName(abi);
- try result.ensureUnusedCapacity(name.len + 1);
+ try result.ensureUnusedCapacity(gpa, name.len + 1);
result.appendAssumeCapacity('-');
result.appendSliceAssumeCapacity(name);
}
- return result.toOwnedSlice();
+ return result.toOwnedSlice(gpa);
}
/// Renders the query into a textual representation that can be parsed via the
lib/std/unicode/throughput_test.zig
@@ -39,7 +39,7 @@ fn benchmarkCodepointCount(buf: []const u8) !ResultCount {
}
pub fn main() !void {
- const stdout = std.fs.File.stdout().writer();
+ const stdout = std.fs.File.stdout().deprecatedWriter();
try stdout.print("short ASCII strings\n", .{});
{
lib/std/zig/llvm/Builder.zig
@@ -1,3 +1,14 @@
+const std = @import("../../std.zig");
+const Allocator = std.mem.Allocator;
+const assert = std.debug.assert;
+const bitcode_writer = @import("bitcode_writer.zig");
+const Builder = @This();
+const builtin = @import("builtin");
+const DW = std.dwarf;
+const ir = @import("ir.zig");
+const log = std.log.scoped(.llvm);
+const Writer = std.io.Writer;
+
gpa: Allocator,
strip: bool,
@@ -90,31 +101,25 @@ pub const String = enum(u32) {
const FormatData = struct {
string: String,
builder: *const Builder,
+ quote_behavior: ?QuoteBehavior,
};
- fn format(
- data: FormatData,
- comptime fmt_str: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- if (comptime std.mem.indexOfNone(u8, fmt_str, "\"r")) |_|
- @compileError("invalid format string: '" ++ fmt_str ++ "'");
+ fn format(data: FormatData, w: *Writer) Writer.Error!void {
assert(data.string != .none);
const string_slice = data.string.slice(data.builder) orelse
- return writer.print("{d}", .{@intFromEnum(data.string)});
- if (comptime std.mem.indexOfScalar(u8, fmt_str, 'r')) |_|
- return writer.writeAll(string_slice);
- try printEscapedString(
- string_slice,
- if (comptime std.mem.indexOfScalar(u8, fmt_str, '"')) |_|
- .always_quote
- else
- .quote_unless_valid_identifier,
- writer,
- );
+ return w.print("{d}", .{@intFromEnum(data.string)});
+ const quote_behavior = data.quote_behavior orelse return w.writeAll(string_slice);
+ return printEscapedString(string_slice, quote_behavior, w);
}
- pub fn fmt(self: String, builder: *const Builder) std.fmt.Formatter(format) {
- return .{ .data = .{ .string = self, .builder = builder } };
+ pub fn fmt(
+ self: String,
+ builder: *const Builder,
+ quote_behavior: ?QuoteBehavior,
+ ) std.fmt.Formatter(FormatData, format) {
+ return .{ .data = .{
+ .string = self,
+ .builder = builder,
+ .quote_behavior = quote_behavior,
+ } };
}
fn fromIndex(index: ?usize) String {
@@ -228,7 +233,7 @@ pub const Type = enum(u32) {
_,
pub const ptr_amdgpu_constant =
- @field(Type, std.fmt.comptimePrint("ptr{ }", .{AddrSpace.amdgpu.constant}));
+ @field(Type, std.fmt.comptimePrint("ptr{f }", .{AddrSpace.amdgpu.constant}));
pub const Tag = enum(u4) {
simple,
@@ -653,18 +658,16 @@ pub const Type = enum(u32) {
const FormatData = struct {
type: Type,
builder: *const Builder,
+ mode: Mode,
+
+ const Mode = enum { default, m, lt, gt, percent };
};
- fn format(
- data: FormatData,
- comptime fmt_str: []const u8,
- fmt_opts: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
+ fn format(data: FormatData, w: *Writer) Writer.Error!void {
assert(data.type != .none);
- if (comptime std.mem.eql(u8, fmt_str, "m")) {
+ if (data.mode == .m) {
const item = data.builder.type_items.items[@intFromEnum(data.type)];
switch (item.tag) {
- .simple => try writer.writeAll(switch (@as(Simple, @enumFromInt(item.data))) {
+ .simple => try w.writeAll(switch (@as(Simple, @enumFromInt(item.data))) {
.void => "isVoid",
.half => "f16",
.bfloat => "bf16",
@@ -681,29 +684,29 @@ pub const Type = enum(u32) {
.function, .vararg_function => |kind| {
var extra = data.builder.typeExtraDataTrail(Type.Function, item.data);
const params = extra.trail.next(extra.data.params_len, Type, data.builder);
- try writer.print("f_{m}", .{extra.data.ret.fmt(data.builder)});
- for (params) |param| try writer.print("{m}", .{param.fmt(data.builder)});
+ try w.print("f_{fm}", .{extra.data.ret.fmt(data.builder)});
+ for (params) |param| try w.print("{fm}", .{param.fmt(data.builder)});
switch (kind) {
.function => {},
- .vararg_function => try writer.writeAll("vararg"),
+ .vararg_function => try w.writeAll("vararg"),
else => unreachable,
}
- try writer.writeByte('f');
+ try w.writeByte('f');
},
- .integer => try writer.print("i{d}", .{item.data}),
- .pointer => try writer.print("p{d}", .{item.data}),
+ .integer => try w.print("i{d}", .{item.data}),
+ .pointer => try w.print("p{d}", .{item.data}),
.target => {
var extra = data.builder.typeExtraDataTrail(Type.Target, item.data);
const types = extra.trail.next(extra.data.types_len, Type, data.builder);
const ints = extra.trail.next(extra.data.ints_len, u32, data.builder);
- try writer.print("t{s}", .{extra.data.name.slice(data.builder).?});
- for (types) |ty| try writer.print("_{m}", .{ty.fmt(data.builder)});
- for (ints) |int| try writer.print("_{d}", .{int});
- try writer.writeByte('t');
+ try w.print("t{s}", .{extra.data.name.slice(data.builder).?});
+ for (types) |ty| try w.print("_{fm}", .{ty.fmt(data.builder)});
+ for (ints) |int| try w.print("_{d}", .{int});
+ try w.writeByte('t');
},
.vector, .scalable_vector => |kind| {
const extra = data.builder.typeExtraData(Type.Vector, item.data);
- try writer.print("{s}v{d}{m}", .{
+ try w.print("{s}v{d}{fm}", .{
switch (kind) {
.vector => "",
.scalable_vector => "nx",
@@ -719,65 +722,65 @@ pub const Type = enum(u32) {
.array => Type.Array,
else => unreachable,
}, item.data);
- try writer.print("a{d}{m}", .{ extra.length(), extra.child.fmt(data.builder) });
+ try w.print("a{d}{fm}", .{ extra.length(), extra.child.fmt(data.builder) });
},
.structure, .packed_structure => {
var extra = data.builder.typeExtraDataTrail(Type.Structure, item.data);
const fields = extra.trail.next(extra.data.fields_len, Type, data.builder);
- try writer.writeAll("sl_");
- for (fields) |field| try writer.print("{m}", .{field.fmt(data.builder)});
- try writer.writeByte('s');
+ try w.writeAll("sl_");
+ for (fields) |field| try w.print("{fm}", .{field.fmt(data.builder)});
+ try w.writeByte('s');
},
.named_structure => {
const extra = data.builder.typeExtraData(Type.NamedStructure, item.data);
- try writer.writeAll("s_");
- if (extra.id.slice(data.builder)) |id| try writer.writeAll(id);
+ try w.writeAll("s_");
+ if (extra.id.slice(data.builder)) |id| try w.writeAll(id);
},
}
return;
}
- if (std.enums.tagName(Type, data.type)) |name| return writer.writeAll(name);
+ if (std.enums.tagName(Type, data.type)) |name| return w.writeAll(name);
const item = data.builder.type_items.items[@intFromEnum(data.type)];
switch (item.tag) {
.simple => unreachable,
.function, .vararg_function => |kind| {
var extra = data.builder.typeExtraDataTrail(Type.Function, item.data);
const params = extra.trail.next(extra.data.params_len, Type, data.builder);
- if (!comptime std.mem.eql(u8, fmt_str, ">"))
- try writer.print("{%} ", .{extra.data.ret.fmt(data.builder)});
- if (!comptime std.mem.eql(u8, fmt_str, "<")) {
- try writer.writeByte('(');
+ if (data.mode != .gt)
+ try w.print("{f%} ", .{extra.data.ret.fmt(data.builder)});
+ if (data.mode != .lt) {
+ try w.writeByte('(');
for (params, 0..) |param, index| {
- if (index > 0) try writer.writeAll(", ");
- try writer.print("{%}", .{param.fmt(data.builder)});
+ if (index > 0) try w.writeAll(", ");
+ try w.print("{f%}", .{param.fmt(data.builder)});
}
switch (kind) {
.function => {},
.vararg_function => {
- if (params.len > 0) try writer.writeAll(", ");
- try writer.writeAll("...");
+ if (params.len > 0) try w.writeAll(", ");
+ try w.writeAll("...");
},
else => unreachable,
}
- try writer.writeByte(')');
+ try w.writeByte(')');
}
},
- .integer => try writer.print("i{d}", .{item.data}),
- .pointer => try writer.print("ptr{ }", .{@as(AddrSpace, @enumFromInt(item.data))}),
+ .integer => try w.print("i{d}", .{item.data}),
+ .pointer => try w.print("ptr{f }", .{@as(AddrSpace, @enumFromInt(item.data))}),
.target => {
var extra = data.builder.typeExtraDataTrail(Type.Target, item.data);
const types = extra.trail.next(extra.data.types_len, Type, data.builder);
const ints = extra.trail.next(extra.data.ints_len, u32, data.builder);
- try writer.print(
- \\target({"}
+ try w.print(
+ \\target({f"}
, .{extra.data.name.fmt(data.builder)});
- for (types) |ty| try writer.print(", {%}", .{ty.fmt(data.builder)});
- for (ints) |int| try writer.print(", {d}", .{int});
- try writer.writeByte(')');
+ for (types) |ty| try w.print(", {f%}", .{ty.fmt(data.builder)});
+ for (ints) |int| try w.print(", {d}", .{int});
+ try w.writeByte(')');
},
.vector, .scalable_vector => |kind| {
const extra = data.builder.typeExtraData(Type.Vector, item.data);
- try writer.print("<{s}{d} x {%}>", .{
+ try w.print("<{s}{d} x {f%}>", .{
switch (kind) {
.vector => "",
.scalable_vector => "vscale x ",
@@ -793,44 +796,45 @@ pub const Type = enum(u32) {
.array => Type.Array,
else => unreachable,
}, item.data);
- try writer.print("[{d} x {%}]", .{ extra.length(), extra.child.fmt(data.builder) });
+ try w.print("[{d} x {f%}]", .{ extra.length(), extra.child.fmt(data.builder) });
},
.structure, .packed_structure => |kind| {
var extra = data.builder.typeExtraDataTrail(Type.Structure, item.data);
const fields = extra.trail.next(extra.data.fields_len, Type, data.builder);
switch (kind) {
.structure => {},
- .packed_structure => try writer.writeByte('<'),
+ .packed_structure => try w.writeByte('<'),
else => unreachable,
}
- try writer.writeAll("{ ");
+ try w.writeAll("{ ");
for (fields, 0..) |field, index| {
- if (index > 0) try writer.writeAll(", ");
- try writer.print("{%}", .{field.fmt(data.builder)});
+ if (index > 0) try w.writeAll(", ");
+ try w.print("{f%}", .{field.fmt(data.builder)});
}
- try writer.writeAll(" }");
+ try w.writeAll(" }");
switch (kind) {
.structure => {},
- .packed_structure => try writer.writeByte('>'),
+ .packed_structure => try w.writeByte('>'),
else => unreachable,
}
},
.named_structure => {
const extra = data.builder.typeExtraData(Type.NamedStructure, item.data);
- if (comptime std.mem.eql(u8, fmt_str, "%")) try writer.print("%{}", .{
+ if (data.mode == .percent) try w.print("%{f}", .{
extra.id.fmt(data.builder),
}) else switch (extra.body) {
- .none => try writer.writeAll("opaque"),
+ .none => try w.writeAll("opaque"),
else => try format(.{
.type = extra.body,
.builder = data.builder,
- }, fmt_str, fmt_opts, writer),
+ .mode = data.mode,
+ }, w),
}
},
}
}
- pub fn fmt(self: Type, builder: *const Builder) std.fmt.Formatter(format) {
- return .{ .data = .{ .type = self, .builder = builder } };
+ pub fn fmt(self: Type, builder: *const Builder, mode: FormatData.Mode) std.fmt.Formatter(FormatData, format) {
+ return .{ .data = .{ .type = self, .builder = builder, .mode = mode } };
}
const IsSizedVisited = std.AutoHashMapUnmanaged(Type, void);
@@ -1138,15 +1142,10 @@ pub const Attribute = union(Kind) {
const FormatData = struct {
attribute_index: Index,
builder: *const Builder,
+ mode: Mode,
+ const Mode = enum { default, quote, pound };
};
- fn format(
- data: FormatData,
- comptime fmt_str: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- if (comptime std.mem.indexOfNone(u8, fmt_str, "\"#")) |_|
- @compileError("invalid format string: '" ++ fmt_str ++ "'");
+ fn format(data: FormatData, w: *Writer) Writer.Error!void {
const attribute = data.attribute_index.toAttribute(data.builder);
switch (attribute) {
.zeroext,
@@ -1219,97 +1218,94 @@ pub const Attribute = union(Kind) {
.no_sanitize_address,
.no_sanitize_hwaddress,
.sanitize_address_dyninit,
- => try writer.print(" {s}", .{@tagName(attribute)}),
+ => try w.print(" {s}", .{@tagName(attribute)}),
.byval,
.byref,
.preallocated,
.inalloca,
.sret,
.elementtype,
- => |ty| try writer.print(" {s}({%})", .{ @tagName(attribute), ty.fmt(data.builder) }),
- .@"align" => |alignment| try writer.print("{ }", .{alignment}),
+ => |ty| try w.print(" {s}({f%})", .{ @tagName(attribute), ty.fmt(data.builder) }),
+ .@"align" => |alignment| try w.print("{f }", .{alignment}),
.dereferenceable,
.dereferenceable_or_null,
- => |size| try writer.print(" {s}({d})", .{ @tagName(attribute), size }),
+ => |size| try w.print(" {s}({d})", .{ @tagName(attribute), size }),
.nofpclass => |fpclass| {
const Int = @typeInfo(FpClass).@"struct".backing_integer.?;
- try writer.print(" {s}(", .{@tagName(attribute)});
+ try w.print(" {s}(", .{@tagName(attribute)});
var any = false;
var remaining: Int = @bitCast(fpclass);
inline for (@typeInfo(FpClass).@"struct".decls) |decl| {
const pattern: Int = @bitCast(@field(FpClass, decl.name));
if (remaining & pattern == pattern) {
if (!any) {
- try writer.writeByte(' ');
+ try w.writeByte(' ');
any = true;
}
- try writer.writeAll(decl.name);
+ try w.writeAll(decl.name);
remaining &= ~pattern;
}
}
- try writer.writeByte(')');
+ try w.writeByte(')');
},
- .alignstack => |alignment| try writer.print(
- if (comptime std.mem.indexOfScalar(u8, fmt_str, '#') != null)
- " {s}={d}"
- else
- " {s}({d})",
+ .alignstack => |alignment| try w.print(
+ if (data.mode == .pound) " {s}={d}" else " {s}({d})",
.{ @tagName(attribute), alignment.toByteUnits() orelse return },
),
.allockind => |allockind| {
- try writer.print(" {s}(\"", .{@tagName(attribute)});
+ try w.print(" {s}(\"", .{@tagName(attribute)});
var any = false;
inline for (@typeInfo(AllocKind).@"struct".fields) |field| {
if (comptime std.mem.eql(u8, field.name, "_")) continue;
if (@field(allockind, field.name)) {
if (!any) {
- try writer.writeByte(',');
+ try w.writeByte(',');
any = true;
}
- try writer.writeAll(field.name);
+ try w.writeAll(field.name);
}
}
- try writer.writeAll("\")");
+ try w.writeAll("\")");
},
.allocsize => |allocsize| {
- try writer.print(" {s}({d}", .{ @tagName(attribute), allocsize.elem_size });
+ try w.print(" {s}({d}", .{ @tagName(attribute), allocsize.elem_size });
if (allocsize.num_elems != AllocSize.none)
- try writer.print(",{d}", .{allocsize.num_elems});
- try writer.writeByte(')');
+ try w.print(",{d}", .{allocsize.num_elems});
+ try w.writeByte(')');
},
.memory => |memory| {
- try writer.print(" {s}(", .{@tagName(attribute)});
+ try w.print(" {s}(", .{@tagName(attribute)});
var any = memory.other != .none or
(memory.argmem == .none and memory.inaccessiblemem == .none);
- if (any) try writer.writeAll(@tagName(memory.other));
+ if (any) try w.writeAll(@tagName(memory.other));
inline for (.{ "argmem", "inaccessiblemem" }) |kind| {
if (@field(memory, kind) != memory.other) {
- if (any) try writer.writeAll(", ");
- try writer.print("{s}: {s}", .{ kind, @tagName(@field(memory, kind)) });
+ if (any) try w.writeAll(", ");
+ try w.print("{s}: {s}", .{ kind, @tagName(@field(memory, kind)) });
any = true;
}
}
- try writer.writeByte(')');
+ try w.writeByte(')');
},
.uwtable => |uwtable| if (uwtable != .none) {
- try writer.print(" {s}", .{@tagName(attribute)});
- if (uwtable != UwTable.default) try writer.print("({s})", .{@tagName(uwtable)});
+ try w.print(" {s}", .{@tagName(attribute)});
+ if (uwtable != UwTable.default) try w.print("({s})", .{@tagName(uwtable)});
},
- .vscale_range => |vscale_range| try writer.print(" {s}({d},{d})", .{
+ .vscale_range => |vscale_range| try w.print(" {s}({d},{d})", .{
@tagName(attribute),
vscale_range.min.toByteUnits().?,
vscale_range.max.toByteUnits() orelse 0,
}),
- .string => |string_attr| if (comptime std.mem.indexOfScalar(u8, fmt_str, '"') != null) {
- try writer.print(" {\"}", .{string_attr.kind.fmt(data.builder)});
+ .string => |string_attr| if (data.mode == .quote) {
+ try w.print(" {f\"}", .{string_attr.kind.fmt(data.builder)});
if (string_attr.value != .empty)
- try writer.print("={\"}", .{string_attr.value.fmt(data.builder)});
+ try w.print("={f\"}", .{string_attr.value.fmt(data.builder)});
},
.none => unreachable,
}
}
- pub fn fmt(self: Index, builder: *const Builder) std.fmt.Formatter(format) {
- return .{ .data = .{ .attribute_index = self, .builder = builder } };
+ pub fn fmt(self: Index, builder: *const Builder, mode: FormatData.mode) std.fmt.Formatter(FormatData, format) {
+ return .{ .data = .{ .attribute_index = self, .builder = builder, .mode = mode } };
}
fn toStorage(self: Index, builder: *const Builder) Storage {
@@ -1583,18 +1579,13 @@ pub const Attributes = enum(u32) {
attributes: Attributes,
builder: *const Builder,
};
- fn format(
- data: FormatData,
- comptime fmt_str: []const u8,
- fmt_opts: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
+ fn format(data: FormatData, w: *Writer) Writer.Error!void {
for (data.attributes.slice(data.builder)) |attribute_index| try Attribute.Index.format(.{
.attribute_index = attribute_index,
.builder = data.builder,
- }, fmt_str, fmt_opts, writer);
+ }, w);
}
- pub fn fmt(self: Attributes, builder: *const Builder) std.fmt.Formatter(format) {
+ pub fn fmt(self: Attributes, builder: *const Builder) std.fmt.Formatter(FormatData, format) {
return .{ .data = .{ .attributes = self, .builder = builder } };
}
};
@@ -1781,24 +1772,15 @@ pub const Linkage = enum(u4) {
extern_weak = 7,
external = 0,
- pub fn format(
- self: Linkage,
- comptime _: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- if (self != .external) try writer.print(" {s}", .{@tagName(self)});
+ pub fn format(self: Linkage, w: *Writer, comptime f: []const u8) Writer.Error!void {
+ comptime assert(f.len == 0);
+ if (self != .external) try w.print(" {s}", .{@tagName(self)});
}
- fn formatOptional(
- data: ?Linkage,
- comptime _: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- if (data) |linkage| try writer.print(" {s}", .{@tagName(linkage)});
+ fn formatOptional(data: ?Linkage, w: *Writer) Writer.Error!void {
+ if (data) |linkage| try w.print(" {s}", .{@tagName(linkage)});
}
- pub fn fmtOptional(self: ?Linkage) std.fmt.Formatter(formatOptional) {
+ pub fn fmtOptional(self: ?Linkage) std.fmt.Formatter(?Linkage, formatOptional) {
return .{ .data = self };
}
};
@@ -1808,13 +1790,8 @@ pub const Preemption = enum {
dso_local,
implicit_dso_local,
- pub fn format(
- self: Preemption,
- comptime _: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- if (self == .dso_local) try writer.print(" {s}", .{@tagName(self)});
+ pub fn format(self: Preemption, w: *Writer, comptime _: []const u8) Writer.Error!void {
+ if (self == .dso_local) try w.print(" {s}", .{@tagName(self)});
}
};
@@ -1831,12 +1808,8 @@ pub const Visibility = enum(u2) {
};
}
- pub fn format(
- self: Visibility,
- comptime _: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
+ pub fn format(self: Visibility, comptime format_string: []const u8, writer: *Writer) Writer.Error!void {
+ comptime assert(format_string.len == 0);
if (self != .default) try writer.print(" {s}", .{@tagName(self)});
}
};
@@ -1846,13 +1819,8 @@ pub const DllStorageClass = enum(u2) {
dllimport = 1,
dllexport = 2,
- pub fn format(
- self: DllStorageClass,
- comptime _: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- if (self != .default) try writer.print(" {s}", .{@tagName(self)});
+ pub fn format(self: DllStorageClass, w: *Writer, comptime _: []const u8) Writer.Error!void {
+ if (self != .default) try w.print(" {s}", .{@tagName(self)});
}
};
@@ -1863,15 +1831,10 @@ pub const ThreadLocal = enum(u3) {
initialexec = 3,
localexec = 4,
- pub fn format(
- self: ThreadLocal,
- comptime prefix: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
+ pub fn format(self: ThreadLocal, w: *Writer, comptime prefix: []const u8) Writer.Error!void {
if (self == .default) return;
- try writer.print("{s}thread_local", .{prefix});
- if (self != .generaldynamic) try writer.print("({s})", .{@tagName(self)});
+ try w.print("{s}thread_local", .{prefix});
+ if (self != .generaldynamic) try w.print("({s})", .{@tagName(self)});
}
};
@@ -1882,13 +1845,8 @@ pub const UnnamedAddr = enum(u2) {
unnamed_addr = 1,
local_unnamed_addr = 2,
- pub fn format(
- self: UnnamedAddr,
- comptime _: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- if (self != .default) try writer.print(" {s}", .{@tagName(self)});
+ pub fn format(self: UnnamedAddr, w: *Writer, comptime _: []const u8) Writer.Error!void {
+ if (self != .default) try w.print(" {s}", .{@tagName(self)});
}
};
@@ -1981,13 +1939,8 @@ pub const AddrSpace = enum(u24) {
pub const funcref: AddrSpace = @enumFromInt(20);
};
- pub fn format(
- self: AddrSpace,
- comptime prefix: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- if (self != .default) try writer.print("{s}addrspace({d})", .{ prefix, @intFromEnum(self) });
+ pub fn format(self: AddrSpace, w: *Writer, comptime prefix: []const u8) Writer.Error!void {
+ if (self != .default) try w.print("{s}addrspace({d})", .{ prefix, @intFromEnum(self) });
}
};
@@ -1995,15 +1948,8 @@ pub const ExternallyInitialized = enum {
default,
externally_initialized,
- pub fn format(
- self: ExternallyInitialized,
- comptime _: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- if (self == .default) return;
- try writer.writeByte(' ');
- try writer.writeAll(@tagName(self));
+ pub fn format(self: ExternallyInitialized, w: *Writer, comptime _: []const u8) Writer.Error!void {
+ if (self != .default) try w.print(" {s}", .{@tagName(self)});
}
};
@@ -2026,13 +1972,8 @@ pub const Alignment = enum(u6) {
return if (self == .default) 0 else (@intFromEnum(self) + 1);
}
- pub fn format(
- self: Alignment,
- comptime prefix: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- try writer.print("{s}align {d}", .{ prefix, self.toByteUnits() orelse return });
+ pub fn format(self: Alignment, w: *Writer, comptime prefix: []const u8) Writer.Error!void {
+ try w.print("{s}align {d}", .{ prefix, self.toByteUnits() orelse return });
}
};
@@ -2105,12 +2046,7 @@ pub const CallConv = enum(u10) {
pub const default = CallConv.ccc;
- pub fn format(
- self: CallConv,
- comptime _: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
+ pub fn format(self: CallConv, w: *Writer, comptime _: []const u8) Writer.Error!void {
switch (self) {
default => {},
.fastcc,
@@ -2164,8 +2100,8 @@ pub const CallConv = enum(u10) {
.aarch64_sme_preservemost_from_x2,
.m68k_rtdcc,
.riscv_vectorcallcc,
- => try writer.print(" {s}", .{@tagName(self)}),
- _ => try writer.print(" cc{d}", .{@intFromEnum(self)}),
+ => try w.print(" {s}", .{@tagName(self)}),
+ _ => try w.print(" cc{d}", .{@intFromEnum(self)}),
}
}
};
@@ -2190,31 +2126,25 @@ pub const StrtabString = enum(u32) {
const FormatData = struct {
string: StrtabString,
builder: *const Builder,
+ quote_behavior: ?QuoteBehavior,
};
- fn format(
- data: FormatData,
- comptime fmt_str: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- if (comptime std.mem.indexOfNone(u8, fmt_str, "\"r")) |_|
- @compileError("invalid format string: '" ++ fmt_str ++ "'");
+ fn format(data: FormatData, w: *Writer) Writer.Error!void {
assert(data.string != .none);
const string_slice = data.string.slice(data.builder) orelse
- return writer.print("{d}", .{@intFromEnum(data.string)});
- if (comptime std.mem.indexOfScalar(u8, fmt_str, 'r')) |_|
- return writer.writeAll(string_slice);
- try printEscapedString(
- string_slice,
- if (comptime std.mem.indexOfScalar(u8, fmt_str, '"')) |_|
- .always_quote
- else
- .quote_unless_valid_identifier,
- writer,
- );
+ return w.print("{d}", .{@intFromEnum(data.string)});
+ const quote_behavior = data.quote_behavior orelse return w.writeAll(string_slice);
+ return printEscapedString(string_slice, quote_behavior, w);
}
- pub fn fmt(self: StrtabString, builder: *const Builder) std.fmt.Formatter(format) {
- return .{ .data = .{ .string = self, .builder = builder } };
+ pub fn fmt(
+ self: StrtabString,
+ builder: *const Builder,
+ quote_behavior: ?QuoteBehavior,
+ ) std.fmt.Formatter(FormatData, format) {
+ return .{ .data = .{
+ .string = self,
+ .builder = builder,
+ .quote_behavior = quote_behavior,
+ } };
}
fn fromIndex(index: ?usize) StrtabString {
@@ -2264,7 +2194,7 @@ pub fn strtabStringFmt(self: *Builder, comptime fmt_str: []const u8, fmt_args: a
}
pub fn strtabStringFmtAssumeCapacity(self: *Builder, comptime fmt_str: []const u8, fmt_args: anytype) StrtabString {
- self.strtab_string_bytes.writer(undefined).print(fmt_str, fmt_args) catch unreachable;
+ self.strtab_string_bytes.printAssumeCapacity(fmt_str, fmt_args);
return self.trailingStrtabStringAssumeCapacity();
}
@@ -2383,17 +2313,12 @@ pub const Global = struct {
global: Index,
builder: *const Builder,
};
- fn format(
- data: FormatData,
- comptime _: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- try writer.print("@{}", .{
+ fn format(data: FormatData, w: *Writer) Writer.Error!void {
+ try w.print("@{f}", .{
data.global.unwrap(data.builder).name(data.builder).fmt(data.builder),
});
}
- pub fn fmt(self: Index, builder: *const Builder) std.fmt.Formatter(format) {
+ pub fn fmt(self: Index, builder: *const Builder) std.fmt.Formatter(FormatData, format) {
return .{ .data = .{ .global = self, .builder = builder } };
}
@@ -4833,29 +4758,28 @@ pub const Function = struct {
instruction: Instruction.Index,
function: Function.Index,
builder: *Builder,
+ flags: Flags,
+ const Flags = struct {
+ comma: bool = false,
+ space: bool = false,
+ percent: bool = false,
+ };
};
- fn format(
- data: FormatData,
- comptime fmt_str: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- if (comptime std.mem.indexOfNone(u8, fmt_str, ", %")) |_|
- @compileError("invalid format string: '" ++ fmt_str ++ "'");
- if (comptime std.mem.indexOfScalar(u8, fmt_str, ',') != null) {
+ fn format(data: FormatData, w: *Writer) Writer.Error!void {
+ if (data.flags.comma) {
if (data.instruction == .none) return;
- try writer.writeByte(',');
+ try w.writeByte(',');
}
- if (comptime std.mem.indexOfScalar(u8, fmt_str, ' ') != null) {
+ if (data.flags.space) {
if (data.instruction == .none) return;
- try writer.writeByte(' ');
+ try w.writeByte(' ');
}
- if (comptime std.mem.indexOfScalar(u8, fmt_str, '%') != null) try writer.print(
- "{%} ",
+ if (data.flags.percent) try w.print(
+ "{f%} ",
.{data.instruction.typeOf(data.function, data.builder).fmt(data.builder)},
);
assert(data.instruction != .none);
- try writer.print("%{}", .{
+ try w.print("%{f}", .{
data.instruction.name(data.function.ptrConst(data.builder)).fmt(data.builder),
});
}
@@ -4863,8 +4787,14 @@ pub const Function = struct {
self: Instruction.Index,
function: Function.Index,
builder: *Builder,
- ) std.fmt.Formatter(format) {
- return .{ .data = .{ .instruction = self, .function = function, .builder = builder } };
+ flags: FormatData.Flags,
+ ) std.fmt.Formatter(FormatData, format) {
+ return .{ .data = .{
+ .instruction = self,
+ .function = function,
+ .builder = builder,
+ .flags = flags,
+ } };
}
};
@@ -6361,7 +6291,7 @@ pub const WipFunction = struct {
while (true) {
gop.value_ptr.* = @enumFromInt(@intFromEnum(gop.value_ptr.*) + 1);
- const unique_name = try wip_name.builder.fmt("{r}{s}{r}", .{
+ const unique_name = try wip_name.builder.fmt("{fr}{s}{fr}", .{
name.fmt(wip_name.builder),
sep,
gop.value_ptr.fmt(wip_name.builder),
@@ -7031,13 +6961,8 @@ pub const MemoryAccessKind = enum(u1) {
normal,
@"volatile",
- pub fn format(
- self: MemoryAccessKind,
- comptime prefix: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- if (self != .normal) try writer.print("{s}{s}", .{ prefix, @tagName(self) });
+ pub fn format(self: MemoryAccessKind, w: *Writer, comptime prefix: []const u8) Writer.Error!void {
+ if (self != .normal) try w.print("{s}{s}", .{ prefix, @tagName(self) });
}
};
@@ -7045,13 +6970,8 @@ pub const SyncScope = enum(u1) {
singlethread,
system,
- pub fn format(
- self: SyncScope,
- comptime prefix: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- if (self != .system) try writer.print(
+ pub fn format(self: SyncScope, w: *Writer, comptime prefix: []const u8) Writer.Error!void {
+ if (self != .system) try w.print(
\\{s}syncscope("{s}")
, .{ prefix, @tagName(self) });
}
@@ -7066,13 +6986,8 @@ pub const AtomicOrdering = enum(u3) {
acq_rel = 5,
seq_cst = 6,
- pub fn format(
- self: AtomicOrdering,
- comptime prefix: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- if (self != .none) try writer.print("{s}{s}", .{ prefix, @tagName(self) });
+ pub fn format(self: AtomicOrdering, w: *Writer, comptime prefix: []const u8) Writer.Error!void {
+ if (self != .none) try w.print("{s}{s}", .{ prefix, @tagName(self) });
}
};
@@ -7486,27 +7401,26 @@ pub const Constant = enum(u32) {
const FormatData = struct {
constant: Constant,
builder: *Builder,
+ flags: Flags,
+ const Flags = struct {
+ comma: bool = false,
+ space: bool = false,
+ percent: bool = false,
+ };
};
- fn format(
- data: FormatData,
- comptime fmt_str: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- if (comptime std.mem.indexOfNone(u8, fmt_str, ", %")) |_|
- @compileError("invalid format string: '" ++ fmt_str ++ "'");
- if (comptime std.mem.indexOfScalar(u8, fmt_str, ',') != null) {
+ fn format(data: FormatData, w: *Writer) Writer.Error!void {
+ if (data.flags.comma) {
if (data.constant == .no_init) return;
- try writer.writeByte(',');
+ try w.writeByte(',');
}
- if (comptime std.mem.indexOfScalar(u8, fmt_str, ' ') != null) {
+ if (data.flags.space) {
if (data.constant == .no_init) return;
- try writer.writeByte(' ');
+ try w.writeByte(' ');
}
- if (comptime std.mem.indexOfScalar(u8, fmt_str, '%') != null)
- try writer.print("{%} ", .{data.constant.typeOf(data.builder).fmt(data.builder)});
+ if (data.flags.percent)
+ try w.print("{f%} ", .{data.constant.typeOf(data.builder).fmt(data.builder)});
assert(data.constant != .no_init);
- if (std.enums.tagName(Constant, data.constant)) |name| return writer.writeAll(name);
+ if (std.enums.tagName(Constant, data.constant)) |name| return w.writeAll(name);
switch (data.constant.unwrap()) {
.constant => |constant| {
const item = data.builder.constant_items.get(constant);
@@ -7545,11 +7459,11 @@ pub const Constant = enum(u32) {
const allocator = stack.get();
const str = try bigint.toStringAlloc(allocator, 10, undefined);
defer allocator.free(str);
- try writer.writeAll(str);
+ try w.writeAll(str);
},
.half,
.bfloat,
- => |tag| try writer.print("0x{c}{X:0>4}", .{ @as(u8, switch (tag) {
+ => |tag| try w.print("0x{c}{X:0>4}", .{ @as(u8, switch (tag) {
.half => 'H',
.bfloat => 'R',
else => unreachable,
@@ -7580,7 +7494,7 @@ pub const Constant = enum(u32) {
) + 1,
else => 0,
};
- try writer.print("0x{X:0>16}", .{@as(u64, @bitCast(Float.Repr(f64){
+ try w.print("0x{X:0>16}", .{@as(u64, @bitCast(Float.Repr(f64){
.mantissa = std.math.shl(
Mantissa64,
repr.mantissa,
@@ -7602,13 +7516,13 @@ pub const Constant = enum(u32) {
},
.double => {
const extra = data.builder.constantExtraData(Double, item.data);
- try writer.print("0x{X:0>8}{X:0>8}", .{ extra.hi, extra.lo });
+ try w.print("0x{X:0>8}{X:0>8}", .{ extra.hi, extra.lo });
},
.fp128,
.ppc_fp128,
=> |tag| {
const extra = data.builder.constantExtraData(Fp128, item.data);
- try writer.print("0x{c}{X:0>8}{X:0>8}{X:0>8}{X:0>8}", .{
+ try w.print("0x{c}{X:0>8}{X:0>8}{X:0>8}{X:0>8}", .{
@as(u8, switch (tag) {
.fp128 => 'L',
.ppc_fp128 => 'M',
@@ -7622,7 +7536,7 @@ pub const Constant = enum(u32) {
},
.x86_fp80 => {
const extra = data.builder.constantExtraData(Fp80, item.data);
- try writer.print("0xK{X:0>4}{X:0>8}{X:0>8}", .{
+ try w.print("0xK{X:0>4}{X:0>8}{X:0>8}", .{
extra.hi, extra.lo_hi, extra.lo_lo,
});
},
@@ -7631,7 +7545,7 @@ pub const Constant = enum(u32) {
.zeroinitializer,
.undef,
.poison,
- => |tag| try writer.writeAll(@tagName(tag)),
+ => |tag| try w.writeAll(@tagName(tag)),
.structure,
.packed_structure,
.array,
@@ -7640,7 +7554,7 @@ pub const Constant = enum(u32) {
var extra = data.builder.constantExtraDataTrail(Aggregate, item.data);
const len: u32 = @intCast(extra.data.type.aggregateLen(data.builder));
const vals = extra.trail.next(len, Constant, data.builder);
- try writer.writeAll(switch (tag) {
+ try w.writeAll(switch (tag) {
.structure => "{ ",
.packed_structure => "<{ ",
.array => "[",
@@ -7648,10 +7562,10 @@ pub const Constant = enum(u32) {
else => unreachable,
});
for (vals, 0..) |val, index| {
- if (index > 0) try writer.writeAll(", ");
- try writer.print("{%}", .{val.fmt(data.builder)});
+ if (index > 0) try w.writeAll(", ");
+ try w.print("{f%}", .{val.fmt(data.builder)});
}
- try writer.writeAll(switch (tag) {
+ try w.writeAll(switch (tag) {
.structure => " }",
.packed_structure => " }>",
.array => "]",
@@ -7662,20 +7576,20 @@ pub const Constant = enum(u32) {
.splat => {
const extra = data.builder.constantExtraData(Splat, item.data);
const len = extra.type.vectorLen(data.builder);
- try writer.writeByte('<');
+ try w.writeByte('<');
for (0..len) |index| {
- if (index > 0) try writer.writeAll(", ");
- try writer.print("{%}", .{extra.value.fmt(data.builder)});
+ if (index > 0) try w.writeAll(", ");
+ try w.print("{f%}", .{extra.value.fmt(data.builder)});
}
- try writer.writeByte('>');
+ try w.writeByte('>');
},
- .string => try writer.print("c{\"}", .{
+ .string => try w.print("c{f\"}", .{
@as(String, @enumFromInt(item.data)).fmt(data.builder),
}),
.blockaddress => |tag| {
const extra = data.builder.constantExtraData(BlockAddress, item.data);
const function = extra.function.ptrConst(data.builder);
- try writer.print("{s}({}, {})", .{
+ try w.print("{s}({f}, {f})", .{
@tagName(tag),
function.global.fmt(data.builder),
extra.block.toInst(function).fmt(extra.function, data.builder),
@@ -7685,7 +7599,7 @@ pub const Constant = enum(u32) {
.no_cfi,
=> |tag| {
const function: Function.Index = @enumFromInt(item.data);
- try writer.print("{s} {}", .{
+ try w.print("{s} {f}", .{
@tagName(tag),
function.ptrConst(data.builder).global.fmt(data.builder),
});
@@ -7697,7 +7611,7 @@ pub const Constant = enum(u32) {
.addrspacecast,
=> |tag| {
const extra = data.builder.constantExtraData(Cast, item.data);
- try writer.print("{s} ({%} to {%})", .{
+ try w.print("{s} ({f%} to {f%})", .{
@tagName(tag),
extra.val.fmt(data.builder),
extra.type.fmt(data.builder),
@@ -7709,13 +7623,13 @@ pub const Constant = enum(u32) {
var extra = data.builder.constantExtraDataTrail(GetElementPtr, item.data);
const indices =
extra.trail.next(extra.data.info.indices_len, Constant, data.builder);
- try writer.print("{s} ({%}, {%}", .{
+ try w.print("{s} ({f%}, {f%}", .{
@tagName(tag),
extra.data.type.fmt(data.builder),
extra.data.base.fmt(data.builder),
});
- for (indices) |index| try writer.print(", {%}", .{index.fmt(data.builder)});
- try writer.writeByte(')');
+ for (indices) |index| try w.print(", {f%}", .{index.fmt(data.builder)});
+ try w.writeByte(')');
},
.add,
.@"add nsw",
@@ -7727,7 +7641,7 @@ pub const Constant = enum(u32) {
.xor,
=> |tag| {
const extra = data.builder.constantExtraData(Binary, item.data);
- try writer.print("{s} ({%}, {%})", .{
+ try w.print("{s} ({f%}, {f%})", .{
@tagName(tag),
extra.lhs.fmt(data.builder),
extra.rhs.fmt(data.builder),
@@ -7751,7 +7665,7 @@ pub const Constant = enum(u32) {
.@"asm sideeffect alignstack inteldialect unwind",
=> |tag| {
const extra = data.builder.constantExtraData(Assembly, item.data);
- try writer.print("{s} {\"}, {\"}", .{
+ try w.print("{s} {f\"}, {f\"}", .{
@tagName(tag),
extra.assembly.fmt(data.builder),
extra.constraints.fmt(data.builder),
@@ -7759,11 +7673,15 @@ pub const Constant = enum(u32) {
},
}
},
- .global => |global| try writer.print("{}", .{global.fmt(data.builder)}),
+ .global => |global| try w.print("{f}", .{global.fmt(data.builder)}),
}
}
- pub fn fmt(self: Constant, builder: *Builder) std.fmt.Formatter(format) {
- return .{ .data = .{ .constant = self, .builder = builder } };
+ pub fn fmt(self: Constant, builder: *Builder, flags: FormatData.Flags) std.fmt.Formatter(FormatData, format) {
+ return .{ .data = .{
+ .constant = self,
+ .builder = builder,
+ .flags = flags,
+ } };
}
};
@@ -7819,26 +7737,21 @@ pub const Value = enum(u32) {
function: Function.Index,
builder: *Builder,
};
- fn format(
- data: FormatData,
- comptime fmt_str: []const u8,
- fmt_opts: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
+ fn format(data: FormatData, w: *Writer) Writer.Error!void {
switch (data.value.unwrap()) {
.instruction => |instruction| try Function.Instruction.Index.format(.{
.instruction = instruction,
.function = data.function,
.builder = data.builder,
- }, fmt_str, fmt_opts, writer),
+ }, w),
.constant => |constant| try Constant.format(.{
.constant = constant,
.builder = data.builder,
- }, fmt_str, fmt_opts, writer),
+ }, w),
.metadata => unreachable,
}
}
- pub fn fmt(self: Value, function: Function.Index, builder: *Builder) std.fmt.Formatter(format) {
+ pub fn fmt(self: Value, function: Function.Index, builder: *Builder) std.fmt.Formatter(FormatData, format) {
return .{ .data = .{ .value = self, .function = function, .builder = builder } };
}
};
@@ -7869,15 +7782,10 @@ pub const MetadataString = enum(u32) {
metadata_string: MetadataString,
builder: *const Builder,
};
- fn format(
- data: FormatData,
- comptime _: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- try printEscapedString(data.metadata_string.slice(data.builder), .always_quote, writer);
+ fn format(data: FormatData, w: *Writer) Writer.Error!void {
+ try printEscapedString(data.metadata_string.slice(data.builder), .always_quote, w);
}
- fn fmt(self: MetadataString, builder: *const Builder) std.fmt.Formatter(format) {
+ fn fmt(self: MetadataString, builder: *const Builder) std.fmt.Formatter(FormatData, format) {
return .{ .data = .{ .metadata_string = self, .builder = builder } };
}
};
@@ -8039,29 +7947,24 @@ pub const Metadata = enum(u32) {
AllCallsDescribed: bool = false,
Unused: u2 = 0,
- pub fn format(
- self: DIFlags,
- comptime _: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
+ pub fn format(self: DIFlags, w: *Writer, comptime _: []const u8) Writer.Error!void {
var need_pipe = false;
inline for (@typeInfo(DIFlags).@"struct".fields) |field| {
switch (@typeInfo(field.type)) {
.bool => if (@field(self, field.name)) {
- if (need_pipe) try writer.writeAll(" | ") else need_pipe = true;
- try writer.print("DIFlag{s}", .{field.name});
+ if (need_pipe) try w.writeAll(" | ") else need_pipe = true;
+ try w.print("DIFlag{s}", .{field.name});
},
.@"enum" => if (@field(self, field.name) != .Zero) {
- if (need_pipe) try writer.writeAll(" | ") else need_pipe = true;
- try writer.print("DIFlag{s}", .{@tagName(@field(self, field.name))});
+ if (need_pipe) try w.writeAll(" | ") else need_pipe = true;
+ try w.print("DIFlag{s}", .{@tagName(@field(self, field.name))});
},
.int => assert(@field(self, field.name) == 0),
else => @compileError("bad field type: " ++ field.name ++ ": " ++
@typeName(field.type)),
}
}
- if (!need_pipe) try writer.writeByte('0');
+ if (!need_pipe) try w.writeByte('0');
}
};
@@ -8101,29 +8004,24 @@ pub const Metadata = enum(u32) {
ObjCDirect: bool = false,
Unused: u20 = 0,
- pub fn format(
- self: DISPFlags,
- comptime _: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
+ pub fn format(self: DISPFlags, w: *Writer, comptime _: []const u8) Writer.Error!void {
var need_pipe = false;
inline for (@typeInfo(DISPFlags).@"struct".fields) |field| {
switch (@typeInfo(field.type)) {
.bool => if (@field(self, field.name)) {
- if (need_pipe) try writer.writeAll(" | ") else need_pipe = true;
- try writer.print("DISPFlag{s}", .{field.name});
+ if (need_pipe) try w.writeAll(" | ") else need_pipe = true;
+ try w.print("DISPFlag{s}", .{field.name});
},
.@"enum" => if (@field(self, field.name) != .Zero) {
- if (need_pipe) try writer.writeAll(" | ") else need_pipe = true;
- try writer.print("DISPFlag{s}", .{@tagName(@field(self, field.name))});
+ if (need_pipe) try w.writeAll(" | ") else need_pipe = true;
+ try w.print("DISPFlag{s}", .{@tagName(@field(self, field.name))});
},
.int => assert(@field(self, field.name) == 0),
else => @compileError("bad field type: " ++ field.name ++ ": " ++
@typeName(field.type)),
}
}
- if (!need_pipe) try writer.writeByte('0');
+ if (!need_pipe) try w.writeByte('0');
}
};
@@ -8298,6 +8196,9 @@ pub const Metadata = enum(u32) {
formatter: *Formatter,
prefix: []const u8 = "",
node: Node,
+ specialized: ?TODO,
+
+ const TODO = opaque {};
const Node = union(enum) {
none,
@@ -8323,20 +8224,15 @@ pub const Metadata = enum(u32) {
};
};
};
- fn format(
- data: FormatData,
- comptime fmt_str: []const u8,
- fmt_opts: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
+ fn format(data: FormatData, w: *Writer) Writer.Error!void {
if (data.node == .none) return;
- const is_specialized = fmt_str.len > 0 and fmt_str[0] == 'S';
- const recurse_fmt_str = if (is_specialized) fmt_str[1..] else fmt_str;
+ const is_specialized = data.specialized != null;
+ const recurse_fmt_str = data.specialized orelse {};
- if (data.formatter.need_comma) try writer.writeAll(", ");
+ if (data.formatter.need_comma) try w.writeAll(", ");
defer data.formatter.need_comma = true;
- try writer.writeAll(data.prefix);
+ try w.writeAll(data.prefix);
const builder = data.formatter.builder;
switch (data.node) {
@@ -8351,54 +8247,50 @@ pub const Metadata = enum(u32) {
.expression => {
var extra = builder.metadataExtraDataTrail(Expression, item.data);
const elements = extra.trail.next(extra.data.elements_len, u32, builder);
- try writer.writeAll("!DIExpression(");
+ try w.writeAll("!DIExpression(");
for (elements) |element| try format(.{
.formatter = data.formatter,
.node = .{ .u64 = element },
- }, "%", fmt_opts, writer);
- try writer.writeByte(')');
+ }, w, "%");
+ try w.writeByte(')');
},
.constant => try Constant.format(.{
.constant = @enumFromInt(item.data),
.builder = builder,
- }, recurse_fmt_str, fmt_opts, writer),
+ }, w, recurse_fmt_str),
else => unreachable,
}
},
- .index => |node| try writer.print("!{d}", .{node}),
+ .index => |node| try w.print("!{d}", .{node}),
inline .local_value, .local_metadata => |node, tag| try Value.format(.{
.value = node.value,
.function = node.function,
.builder = builder,
- }, switch (tag) {
+ }, w, switch (tag) {
.local_value => recurse_fmt_str,
.local_metadata => "%",
else => unreachable,
- }, fmt_opts, writer),
+ }),
inline .local_inline, .local_index => |node, tag| {
if (comptime std.mem.eql(u8, recurse_fmt_str, "%"))
- try writer.print("{%} ", .{Type.metadata.fmt(builder)});
+ try w.print("{f%} ", .{Type.metadata.fmt(builder)});
try format(.{
.formatter = data.formatter,
.node = @unionInit(FormatData.Node, @tagName(tag)["local_".len..], node),
- }, "%", fmt_opts, writer);
+ }, w, "%");
},
- .string => |node| try writer.print((if (is_specialized) "" else "!") ++ "{}", .{
+ .string => |node| try w.print((if (is_specialized) "" else "!") ++ "{f}", .{
node.fmt(builder),
}),
- inline .bool,
- .u32,
- .u64,
- .di_flags,
- .sp_flags,
- => |node| try writer.print("{}", .{node}),
- .raw => |node| try writer.writeAll(node),
+ inline .bool, .u32, .u64 => |node| try w.print("{}", .{node}),
+ inline .di_flags, .sp_flags => |node| try w.print("{f}", .{node}),
+ .raw => |node| try w.writeAll(node),
}
}
inline fn fmt(formatter: *Formatter, prefix: []const u8, node: anytype) switch (@TypeOf(node)) {
Metadata => Allocator.Error,
else => error{},
- }!std.fmt.Formatter(format) {
+ }!std.fmt.Formatter(FormatData, format) {
const Node = @TypeOf(node);
const MaybeNode = switch (@typeInfo(Node)) {
.optional => Node,
@@ -8442,7 +8334,7 @@ pub const Metadata = enum(u32) {
prefix: []const u8,
value: Value,
function: Function.Index,
- ) Allocator.Error!std.fmt.Formatter(format) {
+ ) Allocator.Error!std.fmt.Formatter(FormatData, format) {
return .{ .data = .{
.formatter = formatter,
.prefix = prefix,
@@ -8506,7 +8398,7 @@ pub const Metadata = enum(u32) {
DIGlobalVariableExpression,
},
nodes: anytype,
- writer: anytype,
+ w: *Writer,
) !void {
comptime var fmt_str: []const u8 = "";
const names = comptime std.meta.fieldNames(@TypeOf(nodes));
@@ -8523,10 +8415,10 @@ pub const Metadata = enum(u32) {
}
fmt_str = fmt_str ++ "(";
inline for (fields[2..], names) |*field, name| {
- fmt_str = fmt_str ++ "{[" ++ name ++ "]S}";
+ fmt_str = fmt_str ++ "{[" ++ name ++ "]fS}";
field.* = .{
.name = name,
- .type = std.fmt.Formatter(format),
+ .type = std.fmt.Formatter(FormatData, format),
.default_value_ptr = null,
.is_comptime = false,
.alignment = 0,
@@ -8546,7 +8438,7 @@ pub const Metadata = enum(u32) {
name ++ ": ",
@field(nodes, name),
);
- try writer.print(fmt_str, fmt_args);
+ try w.print(fmt_str, fmt_args);
}
};
};
@@ -8636,7 +8528,7 @@ pub fn init(options: Options) Allocator.Error!Builder {
inline for (.{ 0, 4 }) |addr_space_index| {
const addr_space: AddrSpace = @enumFromInt(addr_space_index);
assert(self.ptrTypeAssumeCapacity(addr_space) ==
- @field(Type, std.fmt.comptimePrint("ptr{ }", .{addr_space})));
+ @field(Type, std.fmt.comptimePrint("ptr{f }", .{addr_space})));
}
}
@@ -8759,16 +8651,8 @@ pub fn deinit(self: *Builder) void {
self.* = undefined;
}
-pub fn setModuleAsm(self: *Builder) std.ArrayListUnmanaged(u8).Writer {
- self.module_asm.clearRetainingCapacity();
- return self.appendModuleAsm();
-}
-
-pub fn appendModuleAsm(self: *Builder) std.ArrayListUnmanaged(u8).Writer {
- return self.module_asm.writer(self.gpa);
-}
-
-pub fn finishModuleAsm(self: *Builder) Allocator.Error!void {
+pub fn finishModuleAsm(self: *Builder, aw: *Writer.Allocating) Allocator.Error!void {
+ self.module_asm = aw.toArrayList();
if (self.module_asm.getLastOrNull()) |last| if (last != '\n')
try self.module_asm.append(self.gpa, '\n');
}
@@ -8804,7 +8688,7 @@ pub fn fmt(self: *Builder, comptime fmt_str: []const u8, fmt_args: anytype) Allo
}
pub fn fmtAssumeCapacity(self: *Builder, comptime fmt_str: []const u8, fmt_args: anytype) String {
- self.string_bytes.writer(undefined).print(fmt_str, fmt_args) catch unreachable;
+ self.string_bytes.printAssumeCapacity(fmt_str, fmt_args);
return self.trailingStringAssumeCapacity();
}
@@ -9076,9 +8960,13 @@ pub fn getIntrinsic(
const allocator = stack.get();
const name = name: {
- const writer = self.strtab_string_bytes.writer(self.gpa);
- try writer.print("llvm.{s}", .{@tagName(id)});
- for (overload) |ty| try writer.print(".{m}", .{ty.fmt(self)});
+ {
+ var aw: Writer.Allocating = .fromArrayList(self.gpa, &self.strtab_string_bytes);
+ const w = &aw.interface;
+ defer self.strtab_string_bytes = aw.toArrayList();
+ w.print("llvm.{s}", .{@tagName(id)}) catch return error.OutOfMemory;
+ for (overload) |ty| w.print(".{fm}", .{ty.fmt(self)}) catch return error.OutOfMemory;
+ }
break :name try self.trailingStrtabString();
};
if (self.getGlobal(name)) |global| return global.ptrConst(self).kind.function;
@@ -9492,110 +9380,74 @@ pub fn asmValue(
return (try self.asmConst(ty, info, assembly, constraints)).toValue();
}
-pub fn dump(self: *Builder) void {
+pub fn dump(b: *Builder) void {
+ var buffer: [4000]u8 = undefined;
const stderr: std.fs.File = .stderr();
- self.print(stderr.writer()) catch {};
+ b.printToFile(stderr, &buffer) catch {};
}
-pub fn printToFile(self: *Builder, path: []const u8) Allocator.Error!bool {
- var file = std.fs.cwd().createFile(path, .{}) catch |err| {
- log.err("failed printing LLVM module to \"{s}\": {s}", .{ path, @errorName(err) });
- return false;
- };
+pub fn printToFilePath(b: *Builder, dir: std.fs.Dir, path: []const u8) !void {
+ var buffer: [4000]u8 = undefined;
+ const file = try dir.createFile(path, .{});
defer file.close();
- self.print(file.writer()) catch |err| {
- log.err("failed printing LLVM module to \"{s}\": {s}", .{ path, @errorName(err) });
- return false;
- };
- return true;
+ try b.printToFile(file, &buffer);
}
-pub fn print(self: *Builder, writer: anytype) (@TypeOf(writer).Error || Allocator.Error)!void {
- var bw = std.io.bufferedWriter(writer);
- try self.printUnbuffered(bw.writer());
- try bw.flush();
-}
-
-fn WriterWithErrors(comptime BackingWriter: type, comptime ExtraErrors: type) type {
- return struct {
- backing_writer: BackingWriter,
-
- pub const Error = BackingWriter.Error || ExtraErrors;
- pub const Writer = std.io.GenericWriter(*const Self, Error, write);
-
- const Self = @This();
-
- pub fn writer(self: *const Self) Writer {
- return .{ .context = self };
- }
-
- pub fn write(self: *const Self, bytes: []const u8) Error!usize {
- return self.backing_writer.write(bytes);
- }
- };
+pub fn printToFile(b: *Builder, file: std.fs.File, buffer: []u8) !void {
+ var fw = file.writer(buffer);
+ try print(b, &fw.interface);
+ try fw.interface.flush();
}
-fn writerWithErrors(
- backing_writer: anytype,
- comptime ExtraErrors: type,
-) WriterWithErrors(@TypeOf(backing_writer), ExtraErrors) {
- return .{ .backing_writer = backing_writer };
-}
-
-pub fn printUnbuffered(
- self: *Builder,
- backing_writer: anytype,
-) (@TypeOf(backing_writer).Error || Allocator.Error)!void {
- const writer_with_errors = writerWithErrors(backing_writer, Allocator.Error);
- const writer = writer_with_errors.writer();
+pub fn print(self: *Builder, w: *Writer) Writer.Error!void {
var need_newline = false;
var metadata_formatter: Metadata.Formatter = .{ .builder = self, .need_comma = undefined };
defer metadata_formatter.map.deinit(self.gpa);
if (self.source_filename != .none or self.data_layout != .none or self.target_triple != .none) {
- if (need_newline) try writer.writeByte('\n') else need_newline = true;
- if (self.source_filename != .none) try writer.print(
+ if (need_newline) try w.writeByte('\n') else need_newline = true;
+ if (self.source_filename != .none) try w.print(
\\; ModuleID = '{s}'
- \\source_filename = {"}
+ \\source_filename = {f"}
\\
, .{ self.source_filename.slice(self).?, self.source_filename.fmt(self) });
- if (self.data_layout != .none) try writer.print(
- \\target datalayout = {"}
+ if (self.data_layout != .none) try w.print(
+ \\target datalayout = {f"}
\\
, .{self.data_layout.fmt(self)});
- if (self.target_triple != .none) try writer.print(
- \\target triple = {"}
+ if (self.target_triple != .none) try w.print(
+ \\target triple = {f"}
\\
, .{self.target_triple.fmt(self)});
}
if (self.module_asm.items.len > 0) {
- if (need_newline) try writer.writeByte('\n') else need_newline = true;
+ if (need_newline) try w.writeByte('\n') else need_newline = true;
var line_it = std.mem.tokenizeScalar(u8, self.module_asm.items, '\n');
while (line_it.next()) |line| {
- try writer.writeAll("module asm ");
- try printEscapedString(line, .always_quote, writer);
- try writer.writeByte('\n');
+ try w.writeAll("module asm ");
+ try printEscapedString(line, .always_quote, w);
+ try w.writeByte('\n');
}
}
if (self.types.count() > 0) {
- if (need_newline) try writer.writeByte('\n') else need_newline = true;
- for (self.types.keys(), self.types.values()) |id, ty| try writer.print(
- \\%{} = type {}
+ if (need_newline) try w.writeByte('\n') else need_newline = true;
+ for (self.types.keys(), self.types.values()) |id, ty| try w.print(
+ \\%{f} = type {f}
\\
, .{ id.fmt(self), ty.fmt(self) });
}
if (self.variables.items.len > 0) {
- if (need_newline) try writer.writeByte('\n') else need_newline = true;
+ if (need_newline) try w.writeByte('\n') else need_newline = true;
for (self.variables.items) |variable| {
if (variable.global.getReplacement(self) != .none) continue;
const global = variable.global.ptrConst(self);
metadata_formatter.need_comma = true;
defer metadata_formatter.need_comma = undefined;
- try writer.print(
- \\{} ={}{}{}{}{ }{}{ }{} {s} {%}{ }{, }{}
+ try w.print(
+ \\{f} ={f}{f}{f}{f}{f }{f}{f }{f} {s} {f%}{f }{f, }{f}
\\
, .{
variable.global.fmt(self),
@@ -9618,14 +9470,14 @@ pub fn printUnbuffered(
}
if (self.aliases.items.len > 0) {
- if (need_newline) try writer.writeByte('\n') else need_newline = true;
+ if (need_newline) try w.writeByte('\n') else need_newline = true;
for (self.aliases.items) |alias| {
if (alias.global.getReplacement(self) != .none) continue;
const global = alias.global.ptrConst(self);
metadata_formatter.need_comma = true;
defer metadata_formatter.need_comma = undefined;
- try writer.print(
- \\{} ={}{}{}{}{ }{} alias {%}, {%}{}
+ try w.print(
+ \\{f} ={f}{f}{f}{f}{f }{f} alias {f%}, {f%}{f}
\\
, .{
alias.global.fmt(self),
@@ -9647,17 +9499,17 @@ pub fn printUnbuffered(
for (0.., self.functions.items) |function_i, function| {
if (function.global.getReplacement(self) != .none) continue;
- if (need_newline) try writer.writeByte('\n') else need_newline = true;
+ if (need_newline) try w.writeByte('\n') else need_newline = true;
const function_index: Function.Index = @enumFromInt(function_i);
const global = function.global.ptrConst(self);
const params_len = global.type.functionParameters(self).len;
const function_attributes = function.attributes.func(self);
- if (function_attributes != .none) try writer.print(
- \\; Function Attrs:{}
+ if (function_attributes != .none) try w.print(
+ \\; Function Attrs:{f}
\\
, .{function_attributes.fmt(self)});
- try writer.print(
- \\{s}{}{}{}{}{}{"} {%} {}(
+ try w.print(
+ \\{s}{f}{f}{f}{f}{f}{f"} {f%} {f}(
, .{
if (function.instructions.len > 0) "define" else "declare",
global.linkage,
@@ -9670,40 +9522,40 @@ pub fn printUnbuffered(
function.global.fmt(self),
});
for (0..params_len) |arg| {
- if (arg > 0) try writer.writeAll(", ");
- try writer.print(
- \\{%}{"}
+ if (arg > 0) try w.writeAll(", ");
+ try w.print(
+ \\{f%}{f"}
, .{
global.type.functionParameters(self)[arg].fmt(self),
function.attributes.param(arg, self).fmt(self),
});
if (function.instructions.len > 0)
- try writer.print(" {}", .{function.arg(@intCast(arg)).fmt(function_index, self)})
+ try w.print(" {f}", .{function.arg(@intCast(arg)).fmt(function_index, self)})
else
- try writer.print(" %{d}", .{arg});
+ try w.print(" %{d}", .{arg});
}
switch (global.type.functionKind(self)) {
.normal => {},
.vararg => {
- if (params_len > 0) try writer.writeAll(", ");
- try writer.writeAll("...");
+ if (params_len > 0) try w.writeAll(", ");
+ try w.writeAll("...");
},
}
- try writer.print("){}{ }", .{ global.unnamed_addr, global.addr_space });
- if (function_attributes != .none) try writer.print(" #{d}", .{
+ try w.print("){f}{f }", .{ global.unnamed_addr, global.addr_space });
+ if (function_attributes != .none) try w.print(" #{d}", .{
(try attribute_groups.getOrPutValue(self.gpa, function_attributes, {})).index,
});
{
metadata_formatter.need_comma = false;
defer metadata_formatter.need_comma = undefined;
- try writer.print("{ }{}", .{
+ try w.print("{f }{f}", .{
function.alignment,
try metadata_formatter.fmt(" !dbg ", global.dbg),
});
}
if (function.instructions.len > 0) {
var block_incoming_len: u32 = undefined;
- try writer.writeAll(" {\n");
+ try w.writeAll(" {\n");
var maybe_dbg_index: ?u32 = null;
for (params_len..function.instructions.len) |instruction_i| {
const instruction_index: Function.Instruction.Index = @enumFromInt(instruction_i);
@@ -9801,7 +9653,7 @@ pub fn printUnbuffered(
.xor,
=> |tag| {
const extra = function.extraData(Function.Instruction.Binary, instruction.data);
- try writer.print(" %{} = {s} {%}, {}", .{
+ try w.print(" %{f} = {s} {f%}, {f}", .{
instruction_index.name(&function).fmt(self),
@tagName(tag),
extra.lhs.fmt(function_index, self),
@@ -9823,7 +9675,7 @@ pub fn printUnbuffered(
.zext,
=> |tag| {
const extra = function.extraData(Function.Instruction.Cast, instruction.data);
- try writer.print(" %{} = {s} {%} to {%}", .{
+ try w.print(" %{f} = {s} {f%} to {f%}", .{
instruction_index.name(&function).fmt(self),
@tagName(tag),
extra.val.fmt(function_index, self),
@@ -9834,7 +9686,7 @@ pub fn printUnbuffered(
.@"alloca inalloca",
=> |tag| {
const extra = function.extraData(Function.Instruction.Alloca, instruction.data);
- try writer.print(" %{} = {s} {%}{,%}{, }{, }", .{
+ try w.print(" %{f} = {s} {f%}{f,%}{f, }{f, }", .{
instruction_index.name(&function).fmt(self),
@tagName(tag),
extra.type.fmt(self),
@@ -9850,7 +9702,7 @@ pub fn printUnbuffered(
.atomicrmw => |tag| {
const extra =
function.extraData(Function.Instruction.AtomicRmw, instruction.data);
- try writer.print(" %{} = {s}{ } {s} {%}, {%}{ }{ }{, }", .{
+ try w.print(" %{f} = {s}{f } {s} {f%}, {f%}{f }{f }{f, }", .{
instruction_index.name(&function).fmt(self),
@tagName(tag),
extra.info.access_kind,
@@ -9866,19 +9718,19 @@ pub fn printUnbuffered(
block_incoming_len = instruction.data;
const name = instruction_index.name(&function);
if (@intFromEnum(instruction_index) > params_len)
- try writer.writeByte('\n');
- try writer.print("{}:\n", .{name.fmt(self)});
+ try w.writeByte('\n');
+ try w.print("{f}:\n", .{name.fmt(self)});
continue;
},
.br => |tag| {
const target: Function.Block.Index = @enumFromInt(instruction.data);
- try writer.print(" {s} {%}", .{
+ try w.print(" {s} {f%}", .{
@tagName(tag), target.toInst(&function).fmt(function_index, self),
});
},
.br_cond => {
const extra = function.extraData(Function.Instruction.BrCond, instruction.data);
- try writer.print(" br {%}, {%}, {%}", .{
+ try w.print(" br {f%}, {f%}, {f%}", .{
extra.cond.fmt(function_index, self),
extra.then.toInst(&function).fmt(function_index, self),
extra.@"else".toInst(&function).fmt(function_index, self),
@@ -9887,8 +9739,8 @@ pub fn printUnbuffered(
defer metadata_formatter.need_comma = undefined;
switch (extra.weights) {
.none => {},
- .unpredictable => try writer.writeAll("!unpredictable !{}"),
- _ => try writer.print("{}", .{
+ .unpredictable => try w.writeAll("!unpredictable !{}"),
+ _ => try w.print("{f}", .{
try metadata_formatter.fmt("!prof ", @as(Metadata, @enumFromInt(@intFromEnum(extra.weights)))),
}),
}
@@ -9905,16 +9757,16 @@ pub fn printUnbuffered(
var extra =
function.extraDataTrail(Function.Instruction.Call, instruction.data);
const args = extra.trail.next(extra.data.args_len, Value, &function);
- try writer.writeAll(" ");
+ try w.writeAll(" ");
const ret_ty = extra.data.ty.functionReturn(self);
switch (ret_ty) {
.void => {},
- else => try writer.print("%{} = ", .{
+ else => try w.print("%{f} = ", .{
instruction_index.name(&function).fmt(self),
}),
.none => unreachable,
}
- try writer.print("{s}{}{}{} {%} {}(", .{
+ try w.print("{s}{f}{f}{f} {f%} {f}(", .{
@tagName(tag),
extra.data.info.call_conv,
extra.data.attributes.ret(self).fmt(self),
@@ -9926,21 +9778,21 @@ pub fn printUnbuffered(
extra.data.callee.fmt(function_index, self),
});
for (0.., args) |arg_index, arg| {
- if (arg_index > 0) try writer.writeAll(", ");
+ if (arg_index > 0) try w.writeAll(", ");
metadata_formatter.need_comma = false;
defer metadata_formatter.need_comma = undefined;
- try writer.print("{%}{}{}", .{
+ try w.print("{f%}{f}{f}", .{
arg.typeOf(function_index, self).fmt(self),
extra.data.attributes.param(arg_index, self).fmt(self),
try metadata_formatter.fmtLocal(" ", arg, function_index),
});
}
- try writer.writeByte(')');
+ try w.writeByte(')');
if (extra.data.info.has_op_bundle_cold) {
- try writer.writeAll(" [ \"cold\"() ]");
+ try w.writeAll(" [ \"cold\"() ]");
}
const call_function_attributes = extra.data.attributes.func(self);
- if (call_function_attributes != .none) try writer.print(" #{d}", .{
+ if (call_function_attributes != .none) try w.print(" #{d}", .{
(try attribute_groups.getOrPutValue(
self.gpa,
call_function_attributes,
@@ -9953,7 +9805,7 @@ pub fn printUnbuffered(
=> |tag| {
const extra =
function.extraData(Function.Instruction.CmpXchg, instruction.data);
- try writer.print(" %{} = {s}{ } {%}, {%}, {%}{ }{ }{ }{, }", .{
+ try w.print(" %{f} = {s}{f } {f%}, {f%}, {f%}{f }{f }{f }{f, }", .{
instruction_index.name(&function).fmt(self),
@tagName(tag),
extra.info.access_kind,
@@ -9969,7 +9821,7 @@ pub fn printUnbuffered(
.extractelement => |tag| {
const extra =
function.extraData(Function.Instruction.ExtractElement, instruction.data);
- try writer.print(" %{} = {s} {%}, {%}", .{
+ try w.print(" %{f} = {s} {f%}, {f%}", .{
instruction_index.name(&function).fmt(self),
@tagName(tag),
extra.val.fmt(function_index, self),
@@ -9982,16 +9834,16 @@ pub fn printUnbuffered(
instruction.data,
);
const indices = extra.trail.next(extra.data.indices_len, u32, &function);
- try writer.print(" %{} = {s} {%}", .{
+ try w.print(" %{f} = {s} {f%}", .{
instruction_index.name(&function).fmt(self),
@tagName(tag),
extra.data.val.fmt(function_index, self),
});
- for (indices) |index| try writer.print(", {d}", .{index});
+ for (indices) |index| try w.print(", {d}", .{index});
},
.fence => |tag| {
const info: MemoryAccessInfo = @bitCast(instruction.data);
- try writer.print(" {s}{ }{ }", .{
+ try w.print(" {s}{f }{f }", .{
@tagName(tag),
info.sync_scope,
info.success_ordering,
@@ -10001,7 +9853,7 @@ pub fn printUnbuffered(
.@"fneg fast",
=> |tag| {
const val: Value = @enumFromInt(instruction.data);
- try writer.print(" %{} = {s} {%}", .{
+ try w.print(" %{f} = {s} {f%}", .{
instruction_index.name(&function).fmt(self),
@tagName(tag),
val.fmt(function_index, self),
@@ -10015,13 +9867,13 @@ pub fn printUnbuffered(
instruction.data,
);
const indices = extra.trail.next(extra.data.indices_len, Value, &function);
- try writer.print(" %{} = {s} {%}, {%}", .{
+ try w.print(" %{f} = {s} {f%}, {f%}", .{
instruction_index.name(&function).fmt(self),
@tagName(tag),
extra.data.type.fmt(self),
extra.data.base.fmt(function_index, self),
});
- for (indices) |index| try writer.print(", {%}", .{
+ for (indices) |index| try w.print(", {f%}", .{
index.fmt(function_index, self),
});
},
@@ -10030,22 +9882,22 @@ pub fn printUnbuffered(
function.extraDataTrail(Function.Instruction.IndirectBr, instruction.data);
const targets =
extra.trail.next(extra.data.targets_len, Function.Block.Index, &function);
- try writer.print(" {s} {%}, [", .{
+ try w.print(" {s} {f%}, [", .{
@tagName(tag),
extra.data.addr.fmt(function_index, self),
});
for (0.., targets) |target_index, target| {
- if (target_index > 0) try writer.writeAll(", ");
- try writer.print("{%}", .{
+ if (target_index > 0) try w.writeAll(", ");
+ try w.print("{f%}", .{
target.toInst(&function).fmt(function_index, self),
});
}
- try writer.writeByte(']');
+ try w.writeByte(']');
},
.insertelement => |tag| {
const extra =
function.extraData(Function.Instruction.InsertElement, instruction.data);
- try writer.print(" %{} = {s} {%}, {%}, {%}", .{
+ try w.print(" %{f} = {s} {f%}, {f%}, {f%}", .{
instruction_index.name(&function).fmt(self),
@tagName(tag),
extra.val.fmt(function_index, self),
@@ -10057,19 +9909,19 @@ pub fn printUnbuffered(
var extra =
function.extraDataTrail(Function.Instruction.InsertValue, instruction.data);
const indices = extra.trail.next(extra.data.indices_len, u32, &function);
- try writer.print(" %{} = {s} {%}, {%}", .{
+ try w.print(" %{f} = {s} {f%}, {f%}", .{
instruction_index.name(&function).fmt(self),
@tagName(tag),
extra.data.val.fmt(function_index, self),
extra.data.elem.fmt(function_index, self),
});
- for (indices) |index| try writer.print(", {d}", .{index});
+ for (indices) |index| try w.print(", {d}", .{index});
},
.load,
.@"load atomic",
=> |tag| {
const extra = function.extraData(Function.Instruction.Load, instruction.data);
- try writer.print(" %{} = {s}{ } {%}, {%}{ }{ }{, }", .{
+ try w.print(" %{f} = {s}{f } {f%}, {f%}{f }{f }{f, }", .{
instruction_index.name(&function).fmt(self),
@tagName(tag),
extra.info.access_kind,
@@ -10087,14 +9939,14 @@ pub fn printUnbuffered(
const vals = extra.trail.next(block_incoming_len, Value, &function);
const blocks =
extra.trail.next(block_incoming_len, Function.Block.Index, &function);
- try writer.print(" %{} = {s} {%} ", .{
+ try w.print(" %{f} = {s} {f%} ", .{
instruction_index.name(&function).fmt(self),
@tagName(tag),
vals[0].typeOf(function_index, self).fmt(self),
});
for (0.., vals, blocks) |incoming_index, incoming_val, incoming_block| {
- if (incoming_index > 0) try writer.writeAll(", ");
- try writer.print("[ {}, {} ]", .{
+ if (incoming_index > 0) try w.writeAll(", ");
+ try w.print("[ {f}, {f} ]", .{
incoming_val.fmt(function_index, self),
incoming_block.toInst(&function).fmt(function_index, self),
});
@@ -10102,19 +9954,19 @@ pub fn printUnbuffered(
},
.ret => |tag| {
const val: Value = @enumFromInt(instruction.data);
- try writer.print(" {s} {%}", .{
+ try w.print(" {s} {f%}", .{
@tagName(tag),
val.fmt(function_index, self),
});
},
.@"ret void",
.@"unreachable",
- => |tag| try writer.print(" {s}", .{@tagName(tag)}),
+ => |tag| try w.print(" {s}", .{@tagName(tag)}),
.select,
.@"select fast",
=> |tag| {
const extra = function.extraData(Function.Instruction.Select, instruction.data);
- try writer.print(" %{} = {s} {%}, {%}, {%}", .{
+ try w.print(" %{f} = {s} {f%}, {f%}, {f%}", .{
instruction_index.name(&function).fmt(self),
@tagName(tag),
extra.cond.fmt(function_index, self),
@@ -10125,7 +9977,7 @@ pub fn printUnbuffered(
.shufflevector => |tag| {
const extra =
function.extraData(Function.Instruction.ShuffleVector, instruction.data);
- try writer.print(" %{} = {s} {%}, {%}, {%}", .{
+ try w.print(" %{f} = {s} {f%}, {f%}, {f%}", .{
instruction_index.name(&function).fmt(self),
@tagName(tag),
extra.lhs.fmt(function_index, self),
@@ -10137,7 +9989,7 @@ pub fn printUnbuffered(
.@"store atomic",
=> |tag| {
const extra = function.extraData(Function.Instruction.Store, instruction.data);
- try writer.print(" {s}{ } {%}, {%}{ }{ }{, }", .{
+ try w.print(" {s}{f } {f%}, {f%}{f }{f }{f, }", .{
@tagName(tag),
extra.info.access_kind,
extra.val.fmt(function_index, self),
@@ -10153,32 +10005,32 @@ pub fn printUnbuffered(
const vals = extra.trail.next(extra.data.cases_len, Constant, &function);
const blocks =
extra.trail.next(extra.data.cases_len, Function.Block.Index, &function);
- try writer.print(" {s} {%}, {%} [\n", .{
+ try w.print(" {s} {f%}, {f%} [\n", .{
@tagName(tag),
extra.data.val.fmt(function_index, self),
extra.data.default.toInst(&function).fmt(function_index, self),
});
- for (vals, blocks) |case_val, case_block| try writer.print(
- " {%}, {%}\n",
+ for (vals, blocks) |case_val, case_block| try w.print(
+ " {f%}, {f%}\n",
.{
case_val.fmt(self),
case_block.toInst(&function).fmt(function_index, self),
},
);
- try writer.writeAll(" ]");
+ try w.writeAll(" ]");
metadata_formatter.need_comma = true;
defer metadata_formatter.need_comma = undefined;
switch (extra.data.weights) {
.none => {},
- .unpredictable => try writer.writeAll("!unpredictable !{}"),
- _ => try writer.print("{}", .{
+ .unpredictable => try w.writeAll("!unpredictable !{}"),
+ _ => try w.print("{f}", .{
try metadata_formatter.fmt("!prof ", @as(Metadata, @enumFromInt(@intFromEnum(extra.data.weights)))),
}),
}
},
.va_arg => |tag| {
const extra = function.extraData(Function.Instruction.VaArg, instruction.data);
- try writer.print(" %{} = {s} {%}, {%}", .{
+ try w.print(" %{f} = {s} {f%}, {f%}", .{
instruction_index.name(&function).fmt(self),
@tagName(tag),
extra.list.fmt(function_index, self),
@@ -10188,45 +10040,45 @@ pub fn printUnbuffered(
}
if (maybe_dbg_index) |dbg_index| {
- try writer.print(", !dbg !{}", .{dbg_index});
+ try w.print(", !dbg !{d}", .{dbg_index});
}
- try writer.writeByte('\n');
+ try w.writeByte('\n');
}
- try writer.writeByte('}');
+ try w.writeByte('}');
}
- try writer.writeByte('\n');
+ try w.writeByte('\n');
}
if (attribute_groups.count() > 0) {
- if (need_newline) try writer.writeByte('\n') else need_newline = true;
+ if (need_newline) try w.writeByte('\n') else need_newline = true;
for (0.., attribute_groups.keys()) |attribute_group_index, attribute_group|
- try writer.print(
- \\attributes #{d} = {{{#"} }}
+ try w.print(
+ \\attributes #{d} = {{{f#"} }}
\\
, .{ attribute_group_index, attribute_group.fmt(self) });
}
if (self.metadata_named.count() > 0) {
- if (need_newline) try writer.writeByte('\n') else need_newline = true;
+ if (need_newline) try w.writeByte('\n') else need_newline = true;
for (self.metadata_named.keys(), self.metadata_named.values()) |name, data| {
const elements: []const Metadata =
@ptrCast(self.metadata_extra.items[data.index..][0..data.len]);
- try writer.writeByte('!');
- try printEscapedString(name.slice(self), .quote_unless_valid_identifier, writer);
- try writer.writeAll(" = !{");
+ try w.writeByte('!');
+ try printEscapedString(name.slice(self), .quote_unless_valid_identifier, w);
+ try w.writeAll(" = !{");
metadata_formatter.need_comma = false;
defer metadata_formatter.need_comma = undefined;
- for (elements) |element| try writer.print("{}", .{try metadata_formatter.fmt("", element)});
- try writer.writeAll("}\n");
+ for (elements) |element| try w.print("{f}", .{try metadata_formatter.fmt("", element)});
+ try w.writeAll("}\n");
}
}
if (metadata_formatter.map.count() > 0) {
- if (need_newline) try writer.writeByte('\n') else need_newline = true;
+ if (need_newline) try w.writeByte('\n') else need_newline = true;
var metadata_index: usize = 0;
while (metadata_index < metadata_formatter.map.count()) : (metadata_index += 1) {
@setEvalBranchQuota(10_000);
- try writer.print("!{} = ", .{metadata_index});
+ try w.print("!{d} = ", .{metadata_index});
metadata_formatter.need_comma = false;
defer metadata_formatter.need_comma = undefined;
@@ -10239,7 +10091,7 @@ pub fn printUnbuffered(
.scope = location.scope,
.inlinedAt = location.inlined_at,
.isImplicitCode = false,
- }, writer);
+ }, w);
continue;
},
.metadata => |metadata| self.metadata_items.get(@intFromEnum(metadata)),
@@ -10255,7 +10107,7 @@ pub fn printUnbuffered(
.checksumkind = null,
.checksum = null,
.source = null,
- }, writer);
+ }, w);
},
.compile_unit,
.@"compile_unit optimized",
@@ -10286,7 +10138,7 @@ pub fn printUnbuffered(
.rangesBaseAddress = null,
.sysroot = null,
.sdk = null,
- }, writer);
+ }, w);
},
.subprogram,
.@"subprogram local",
@@ -10320,7 +10172,7 @@ pub fn printUnbuffered(
.thrownTypes = null,
.annotations = null,
.targetFuncName = null,
- }, writer);
+ }, w);
},
.lexical_block => {
const extra = self.metadataExtraData(Metadata.LexicalBlock, metadata_item.data);
@@ -10329,7 +10181,7 @@ pub fn printUnbuffered(
.file = extra.file,
.line = extra.line,
.column = extra.column,
- }, writer);
+ }, w);
},
.location => {
const extra = self.metadataExtraData(Metadata.Location, metadata_item.data);
@@ -10339,7 +10191,7 @@ pub fn printUnbuffered(
.scope = extra.scope,
.inlinedAt = extra.inlined_at,
.isImplicitCode = false,
- }, writer);
+ }, w);
},
.basic_bool_type,
.basic_unsigned_type,
@@ -10368,7 +10220,7 @@ pub fn printUnbuffered(
else => unreachable,
}),
.flags = null,
- }, writer);
+ }, w);
},
.composite_struct_type,
.composite_union_type,
@@ -10413,7 +10265,7 @@ pub fn printUnbuffered(
.allocated = null,
.rank = null,
.annotations = null,
- }, writer);
+ }, w);
},
.derived_pointer_type,
.derived_member_type,
@@ -10446,7 +10298,7 @@ pub fn printUnbuffered(
.extraData = null,
.dwarfAddressSpace = null,
.annotations = null,
- }, writer);
+ }, w);
},
.subroutine_type => {
const extra = self.metadataExtraData(Metadata.SubroutineType, metadata_item.data);
@@ -10454,7 +10306,7 @@ pub fn printUnbuffered(
.flags = null,
.cc = null,
.types = extra.types_tuple,
- }, writer);
+ }, w);
},
.enumerator_unsigned,
.enumerator_signed_positive,
@@ -10504,7 +10356,7 @@ pub fn printUnbuffered(
=> false,
else => unreachable,
},
- }, writer);
+ }, w);
},
.subrange => {
const extra = self.metadataExtraData(Metadata.Subrange, metadata_item.data);
@@ -10513,31 +10365,31 @@ pub fn printUnbuffered(
.lowerBound = extra.lower_bound,
.upperBound = null,
.stride = null,
- }, writer);
+ }, w);
},
.tuple => {
var extra = self.metadataExtraDataTrail(Metadata.Tuple, metadata_item.data);
const elements = extra.trail.next(extra.data.elements_len, Metadata, self);
- try writer.writeAll("!{");
- for (elements) |element| try writer.print("{[element]%}", .{
+ try w.writeAll("!{");
+ for (elements) |element| try w.print("{[element]f%}", .{
.element = try metadata_formatter.fmt("", element),
});
- try writer.writeAll("}\n");
+ try w.writeAll("}\n");
},
.str_tuple => {
var extra = self.metadataExtraDataTrail(Metadata.StrTuple, metadata_item.data);
const elements = extra.trail.next(extra.data.elements_len, Metadata, self);
- try writer.print("!{{{[str]%}", .{
+ try w.print("!{{{[str]f%}", .{
.str = try metadata_formatter.fmt("", extra.data.str),
});
- for (elements) |element| try writer.print("{[element]%}", .{
+ for (elements) |element| try w.print("{[element]f%}", .{
.element = try metadata_formatter.fmt("", element),
});
- try writer.writeAll("}\n");
+ try w.writeAll("}\n");
},
.module_flag => {
const extra = self.metadataExtraData(Metadata.ModuleFlag, metadata_item.data);
- try writer.print("!{{{[behavior]%}{[name]%}{[constant]%}}}\n", .{
+ try w.print("!{{{[behavior]f%}{[name]f%}{[constant]f%}}}\n", .{
.behavior = try metadata_formatter.fmt("", extra.behavior),
.name = try metadata_formatter.fmt("", extra.name),
.constant = try metadata_formatter.fmt("", extra.constant),
@@ -10555,7 +10407,7 @@ pub fn printUnbuffered(
.flags = null,
.@"align" = null,
.annotations = null,
- }, writer);
+ }, w);
},
.parameter => {
const extra = self.metadataExtraData(Metadata.Parameter, metadata_item.data);
@@ -10569,7 +10421,7 @@ pub fn printUnbuffered(
.flags = null,
.@"align" = null,
.annotations = null,
- }, writer);
+ }, w);
},
.global_var,
.@"global_var local",
@@ -10592,7 +10444,7 @@ pub fn printUnbuffered(
.templateParams = null,
.@"align" = null,
.annotations = null,
- }, writer);
+ }, w);
},
.global_var_expression => {
const extra =
@@ -10600,7 +10452,7 @@ pub fn printUnbuffered(
try metadata_formatter.specialized(.@"!", .DIGlobalVariableExpression, .{
.@"var" = extra.variable,
.expr = extra.expression,
- }, writer);
+ }, w);
},
}
}
@@ -10619,22 +10471,18 @@ fn isValidIdentifier(id: []const u8) bool {
}
const QuoteBehavior = enum { always_quote, quote_unless_valid_identifier };
-fn printEscapedString(
- slice: []const u8,
- quotes: QuoteBehavior,
- writer: anytype,
-) @TypeOf(writer).Error!void {
+fn printEscapedString(slice: []const u8, quotes: QuoteBehavior, w: *Writer) Writer.Error!void {
const need_quotes = switch (quotes) {
.always_quote => true,
.quote_unless_valid_identifier => !isValidIdentifier(slice),
};
- if (need_quotes) try writer.writeByte('"');
+ if (need_quotes) try w.writeByte('"');
for (slice) |byte| switch (byte) {
- '\\' => try writer.writeAll("\\\\"),
- ' '...'"' - 1, '"' + 1...'\\' - 1, '\\' + 1...'~' => try writer.writeByte(byte),
- else => try writer.print("\\{X:0>2}", .{byte}),
+ '\\' => try w.writeAll("\\\\"),
+ ' '...'"' - 1, '"' + 1...'\\' - 1, '\\' + 1...'~' => try w.writeByte(byte),
+ else => try w.print("\\{X:0>2}", .{byte}),
};
- if (need_quotes) try writer.writeByte('"');
+ if (need_quotes) try w.writeByte('"');
}
fn ensureUnusedGlobalCapacity(self: *Builder, name: StrtabString) Allocator.Error!void {
@@ -12019,7 +11867,7 @@ pub fn metadataStringFmt(self: *Builder, comptime fmt_str: []const u8, fmt_args:
}
pub fn metadataStringFmtAssumeCapacity(self: *Builder, comptime fmt_str: []const u8, fmt_args: anytype) MetadataString {
- self.metadata_string_bytes.writer(undefined).print(fmt_str, fmt_args) catch unreachable;
+ self.metadata_string_bytes.printAssumeCapacity(fmt_str, fmt_args);
return self.trailingMetadataStringAssumeCapacity();
}
@@ -15261,13 +15109,3 @@ pub fn toBitcode(self: *Builder, allocator: Allocator, producer: Producer) bitco
return bitcode.toOwnedSlice();
}
-
-const Allocator = std.mem.Allocator;
-const assert = std.debug.assert;
-const bitcode_writer = @import("bitcode_writer.zig");
-const Builder = @This();
-const builtin = @import("builtin");
-const DW = std.dwarf;
-const ir = @import("ir.zig");
-const log = std.log.scoped(.llvm);
-const std = @import("../../std.zig");
lib/std/zig/Ast.zig
@@ -565,14 +565,14 @@ pub fn renderError(tree: Ast, parse_error: Error, stream: anytype) !void {
.invalid_byte => {
const tok_slice = tree.source[tree.tokens.items(.start)[parse_error.token]..];
- return stream.print("{s} contains invalid byte: '{'}'", .{
+ return stream.print("{s} contains invalid byte: '{f}'", .{
switch (tok_slice[0]) {
'\'' => "character literal",
'"', '\\' => "string literal",
'/' => "comment",
else => unreachable,
},
- std.zig.fmtEscapes(tok_slice[parse_error.extra.offset..][0..1]),
+ std.zig.fmtChar(tok_slice[parse_error.extra.offset..][0..1]),
});
},
lib/std/zig/ErrorBundle.zig
@@ -165,7 +165,7 @@ pub fn renderToStdErr(eb: ErrorBundle, options: RenderOptions) void {
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
const stderr: std.fs.File = .stderr();
- return renderToWriter(eb, options, stderr.writer()) catch return;
+ return renderToWriter(eb, options, stderr.deprecatedWriter()) catch return;
}
pub fn renderToWriter(eb: ErrorBundle, options: RenderOptions, writer: anytype) anyerror!void {
lib/std/zig/parser_test.zig
@@ -6324,7 +6324,7 @@ test "ampersand" {
var fixed_buffer_mem: [100 * 1024]u8 = undefined;
fn testParse(source: [:0]const u8, allocator: mem.Allocator, anything_changed: *bool) ![]u8 {
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
var tree = try std.zig.Ast.parse(allocator, source, .zig);
defer tree.deinit(allocator);
lib/std/zig/perf_test.zig
@@ -23,7 +23,7 @@ pub fn main() !void {
const bytes_per_sec = @as(u64, @intFromFloat(@floor(bytes_per_sec_float)));
var stdout_file: std.fs.File = .stdout();
- const stdout = stdout_file.writer();
+ const stdout = stdout_file.deprecatedWriter();
try stdout.print("parsing speed: {:.2}/s, {:.2} used \n", .{
fmtIntSizeBin(bytes_per_sec),
fmtIntSizeBin(memory_used),
lib/std/zig/render.zig
@@ -1564,7 +1564,7 @@ fn renderBuiltinCall(
defer r.gpa.free(new_string);
try renderToken(r, builtin_token + 1, .none); // (
- try ais.writer().print("\"{}\"", .{std.zig.fmtEscapes(new_string)});
+ try ais.writer().print("\"{f}\"", .{std.zig.fmtString(new_string)});
return renderToken(r, str_lit_token + 1, space); // )
}
}
@@ -2872,7 +2872,7 @@ fn renderIdentifierContents(writer: anytype, bytes: []const u8) !void {
.success => |codepoint| {
if (codepoint <= 0x7f) {
const buf = [1]u8{@as(u8, @intCast(codepoint))};
- try std.fmt.format(writer, "{}", .{std.zig.fmtEscapes(&buf)});
+ try std.fmt.deprecatedFormat(writer, "{f}", .{std.zig.fmtString(&buf)});
} else {
try writer.writeAll(escape_sequence);
}
@@ -2884,7 +2884,7 @@ fn renderIdentifierContents(writer: anytype, bytes: []const u8) !void {
},
0x00...('\\' - 1), ('\\' + 1)...0x7f => {
const buf = [1]u8{byte};
- try std.fmt.format(writer, "{}", .{std.zig.fmtEscapes(&buf)});
+ try std.fmt.deprecatedFormat(writer, "{f}", .{std.zig.fmtString(&buf)});
pos += 1;
},
0x80...0xff => {
lib/std/zig/string_literal.zig
@@ -44,14 +44,7 @@ pub const Error = union(enum) {
raw_string: []const u8,
};
- fn formatMessage(
- self: FormatMessage,
- comptime f: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
- _ = f;
- _ = options;
+ fn formatMessage(self: FormatMessage, writer: *std.io.Writer) std.io.Writer.Error!void {
switch (self.err) {
.invalid_escape_character => |bad_index| try writer.print(
"invalid escape character: '{c}'",
@@ -93,7 +86,7 @@ pub const Error = union(enum) {
}
}
- pub fn fmt(self: @This(), raw_string: []const u8) std.fmt.Formatter(formatMessage) {
+ pub fn fmt(self: @This(), raw_string: []const u8) std.fmt.Formatter(FormatMessage, formatMessage) {
return .{ .data = .{
.err = self,
.raw_string = raw_string,
lib/std/zig/ZonGen.zig
@@ -756,13 +756,7 @@ fn lowerStrLitError(
raw_string: []const u8,
offset: u32,
) Allocator.Error!void {
- return ZonGen.addErrorTokOff(
- zg,
- token,
- @intCast(offset + err.offset()),
- "{}",
- .{err.fmt(raw_string)},
- );
+ return ZonGen.addErrorTokOff(zg, token, @intCast(offset + err.offset()), "{f}", .{err.fmt(raw_string)});
}
fn lowerNumberError(zg: *ZonGen, err: std.zig.number_literal.Error, token: Ast.TokenIndex, bytes: []const u8) Allocator.Error!void {
lib/std/zip/test.zig
@@ -33,7 +33,7 @@ pub fn expectFiles(
var file = try dir.openFile(normalized_sub_path, .{});
defer file.close();
var content_buf: [4096]u8 = undefined;
- const n = try file.reader().readAll(&content_buf);
+ const n = try file.deprecatedReader().readAll(&content_buf);
try testing.expectEqualStrings(test_file.content, content_buf[0..n]);
}
}
lib/std/zon/parse.zig
@@ -64,22 +64,14 @@ pub const Error = union(enum) {
}
};
- fn formatMessage(
- self: []const u8,
- comptime f: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
- _ = f;
- _ = options;
-
+ fn formatMessage(self: []const u8, w: *std.io.Writer) std.io.Writer.Error!void {
// Just writes the string for now, but we're keeping this behind a formatter so we have
// the option to extend it in the future to print more advanced messages (like `Error`
// does) without breaking the API.
- try writer.writeAll(self);
+ try w.writeAll(self);
}
- pub fn fmtMessage(self: Note, diag: *const Diagnostics) std.fmt.Formatter(Note.formatMessage) {
+ pub fn fmtMessage(self: Note, diag: *const Diagnostics) std.fmt.Formatter([]const u8, Note.formatMessage) {
return .{ .data = switch (self) {
.zoir => |note| note.msg.get(diag.zoir),
.type_check => |note| note.msg,
@@ -155,21 +147,14 @@ pub const Error = union(enum) {
diag: *const Diagnostics,
};
- fn formatMessage(
- self: FormatMessage,
- comptime f: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
- _ = f;
- _ = options;
+ fn formatMessage(self: FormatMessage, w: *std.io.Writer) std.io.Writer.Error!void {
switch (self.err) {
- .zoir => |err| try writer.writeAll(err.msg.get(self.diag.zoir)),
- .type_check => |tc| try writer.writeAll(tc.message),
+ .zoir => |err| try w.writeAll(err.msg.get(self.diag.zoir)),
+ .type_check => |tc| try w.writeAll(tc.message),
}
}
- pub fn fmtMessage(self: @This(), diag: *const Diagnostics) std.fmt.Formatter(formatMessage) {
+ pub fn fmtMessage(self: @This(), diag: *const Diagnostics) std.fmt.Formatter(FormatMessage, formatMessage) {
return .{ .data = .{
.err = self,
.diag = diag,
@@ -241,25 +226,19 @@ pub const Diagnostics = struct {
return .{ .diag = self };
}
- pub fn format(
- self: *const @This(),
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
- _ = fmt;
- _ = options;
+ pub fn format(self: *const @This(), w: *std.io.Writer, comptime fmt: []const u8) std.io.Writer.Error!void {
+ comptime assert(fmt.len == 0);
var errors = self.iterateErrors();
while (errors.next()) |err| {
const loc = err.getLocation(self);
const msg = err.fmtMessage(self);
- try writer.print("{}:{}: error: {}\n", .{ loc.line + 1, loc.column + 1, msg });
+ try w.print("{d}:{d}: error: {f}\n", .{ loc.line + 1, loc.column + 1, msg });
var notes = err.iterateNotes(self);
while (notes.next()) |note| {
const note_loc = note.getLocation(self);
const note_msg = note.fmtMessage(self);
- try writer.print("{}:{}: note: {s}\n", .{
+ try w.print("{d}:{d}: note: {f}\n", .{
note_loc.line + 1,
note_loc.column + 1,
note_msg,
@@ -646,7 +625,7 @@ const Parser = struct {
.failure => |err| {
const token = self.ast.nodeMainToken(ast_node);
const raw_string = self.ast.tokenSlice(token);
- return self.failTokenFmt(token, @intCast(err.offset()), "{s}", .{err.fmt(raw_string)});
+ return self.failTokenFmt(token, @intCast(err.offset()), "{f}", .{err.fmt(raw_string)});
},
}
@@ -1087,7 +1066,10 @@ const Parser = struct {
try writer.writeAll(msg);
inline for (info.fields, 0..) |field_info, i| {
if (i != 0) try writer.writeAll(", ");
- try writer.print("'{p_}'", .{std.zig.fmtId(field_info.name)});
+ try writer.print("'{f}'", .{std.zig.fmtIdFlags(field_info.name, .{
+ .allow_primitive = true,
+ .allow_underscore = true,
+ })});
}
break :b .{
.token = token,
@@ -1298,7 +1280,7 @@ test "std.zon ast errors" {
error.ParseZon,
fromSlice(struct {}, gpa, ".{.x = 1 .y = 2}", &diag, .{}),
);
- try std.testing.expectFmt("1:13: error: expected ',' after initializer\n", "{}", .{diag});
+ try std.testing.expectFmt("1:13: error: expected ',' after initializer\n", "{f}", .{diag});
}
test "std.zon comments" {
@@ -1320,7 +1302,7 @@ test "std.zon comments" {
, &diag, .{}));
try std.testing.expectFmt(
"1:1: error: expected expression, found 'a document comment'\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -1341,7 +1323,7 @@ test "std.zon failure/oom formatting" {
&diag,
.{},
));
- try std.testing.expectFmt("", "{}", .{diag});
+ try std.testing.expectFmt("", "{f}", .{diag});
}
test "std.zon fromSlice syntax error" {
@@ -1421,7 +1403,7 @@ test "std.zon unions" {
\\1:4: note: supported: 'x', 'y'
\\
,
- "{}",
+ "{f}",
.{diag},
);
}
@@ -1435,7 +1417,7 @@ test "std.zon unions" {
error.ParseZon,
fromSlice(Union, gpa, ".{.x=1}", &diag, .{}),
);
- try std.testing.expectFmt("1:6: error: expected type 'void'\n", "{}", .{diag});
+ try std.testing.expectFmt("1:6: error: expected type 'void'\n", "{f}", .{diag});
}
// Extra field
@@ -1447,7 +1429,7 @@ test "std.zon unions" {
error.ParseZon,
fromSlice(Union, gpa, ".{.x = 1.5, .y = true}", &diag, .{}),
);
- try std.testing.expectFmt("1:2: error: expected union\n", "{}", .{diag});
+ try std.testing.expectFmt("1:2: error: expected union\n", "{f}", .{diag});
}
// No fields
@@ -1459,7 +1441,7 @@ test "std.zon unions" {
error.ParseZon,
fromSlice(Union, gpa, ".{}", &diag, .{}),
);
- try std.testing.expectFmt("1:2: error: expected union\n", "{}", .{diag});
+ try std.testing.expectFmt("1:2: error: expected union\n", "{f}", .{diag});
}
// Enum literals cannot coerce into untagged unions
@@ -1468,7 +1450,7 @@ test "std.zon unions" {
var diag: Diagnostics = .{};
defer diag.deinit(gpa);
try std.testing.expectError(error.ParseZon, fromSlice(Union, gpa, ".x", &diag, .{}));
- try std.testing.expectFmt("1:2: error: expected union\n", "{}", .{diag});
+ try std.testing.expectFmt("1:2: error: expected union\n", "{f}", .{diag});
}
// Unknown field for enum literal coercion
@@ -1482,7 +1464,7 @@ test "std.zon unions" {
\\1:2: note: supported: 'x'
\\
,
- "{}",
+ "{f}",
.{diag},
);
}
@@ -1493,7 +1475,7 @@ test "std.zon unions" {
var diag: Diagnostics = .{};
defer diag.deinit(gpa);
try std.testing.expectError(error.ParseZon, fromSlice(Union, gpa, ".x", &diag, .{}));
- try std.testing.expectFmt("1:2: error: expected union\n", "{}", .{diag});
+ try std.testing.expectFmt("1:2: error: expected union\n", "{f}", .{diag});
}
}
@@ -1549,7 +1531,7 @@ test "std.zon structs" {
\\1:12: note: supported: 'x', 'y'
\\
,
- "{}",
+ "{f}",
.{diag},
);
}
@@ -1567,7 +1549,7 @@ test "std.zon structs" {
\\1:4: error: duplicate struct field name
\\1:12: note: duplicate name here
\\
- , "{}", .{diag});
+ , "{f}", .{diag});
}
// Ignore unknown fields
@@ -1592,7 +1574,7 @@ test "std.zon structs" {
\\1:4: error: unexpected field 'x'
\\1:4: note: none expected
\\
- , "{}", .{diag});
+ , "{f}", .{diag});
}
// Missing field
@@ -1604,7 +1586,7 @@ test "std.zon structs" {
error.ParseZon,
fromSlice(Vec2, gpa, ".{.x=1.5}", &diag, .{}),
);
- try std.testing.expectFmt("1:2: error: missing required field y\n", "{}", .{diag});
+ try std.testing.expectFmt("1:2: error: missing required field y\n", "{f}", .{diag});
}
// Default field
@@ -1631,7 +1613,7 @@ test "std.zon structs" {
try std.testing.expectFmt(
\\1:18: error: cannot initialize comptime field
\\
- , "{}", .{diag});
+ , "{f}", .{diag});
}
// Enum field (regression test, we were previously getting the field name in an
@@ -1661,7 +1643,7 @@ test "std.zon structs" {
\\1:1: error: types are not available in ZON
\\1:1: note: replace the type with '.'
\\
- , "{}", .{diag});
+ , "{f}", .{diag});
}
// Arrays
@@ -1674,7 +1656,7 @@ test "std.zon structs" {
\\1:1: error: types are not available in ZON
\\1:1: note: replace the type with '.'
\\
- , "{}", .{diag});
+ , "{f}", .{diag});
}
// Slices
@@ -1687,7 +1669,7 @@ test "std.zon structs" {
\\1:1: error: types are not available in ZON
\\1:1: note: replace the type with '.'
\\
- , "{}", .{diag});
+ , "{f}", .{diag});
}
// Tuples
@@ -1706,7 +1688,7 @@ test "std.zon structs" {
\\1:1: error: types are not available in ZON
\\1:1: note: replace the type with '.'
\\
- , "{}", .{diag});
+ , "{f}", .{diag});
}
// Nested
@@ -1719,7 +1701,7 @@ test "std.zon structs" {
\\1:9: error: types are not available in ZON
\\1:9: note: replace the type with '.'
\\
- , "{}", .{diag});
+ , "{f}", .{diag});
}
}
}
@@ -1764,7 +1746,7 @@ test "std.zon tuples" {
error.ParseZon,
fromSlice(Tuple, gpa, ".{0.5, true, 123}", &diag, .{}),
);
- try std.testing.expectFmt("1:14: error: index 2 outside of tuple length 2\n", "{}", .{diag});
+ try std.testing.expectFmt("1:14: error: index 2 outside of tuple length 2\n", "{f}", .{diag});
}
// Extra field
@@ -1778,7 +1760,7 @@ test "std.zon tuples" {
);
try std.testing.expectFmt(
"1:2: error: missing tuple field with index 1\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -1792,7 +1774,7 @@ test "std.zon tuples" {
error.ParseZon,
fromSlice(Tuple, gpa, ".{.foo = 10.0}", &diag, .{}),
);
- try std.testing.expectFmt("1:2: error: expected tuple\n", "{}", .{diag});
+ try std.testing.expectFmt("1:2: error: expected tuple\n", "{f}", .{diag});
}
// Struct with missing field names
@@ -1804,7 +1786,7 @@ test "std.zon tuples" {
error.ParseZon,
fromSlice(Struct, gpa, ".{10.0}", &diag, .{}),
);
- try std.testing.expectFmt("1:2: error: expected struct\n", "{}", .{diag});
+ try std.testing.expectFmt("1:2: error: expected struct\n", "{f}", .{diag});
}
// Comptime field
@@ -1824,7 +1806,7 @@ test "std.zon tuples" {
try std.testing.expectFmt(
\\1:9: error: cannot initialize comptime field
\\
- , "{}", .{diag});
+ , "{f}", .{diag});
}
}
@@ -1936,7 +1918,7 @@ test "std.zon arrays and slices" {
);
try std.testing.expectFmt(
"1:3: error: index 0 outside of array of length 0\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -1951,7 +1933,7 @@ test "std.zon arrays and slices" {
);
try std.testing.expectFmt(
"1:8: error: index 1 outside of array of length 1\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -1966,7 +1948,7 @@ test "std.zon arrays and slices" {
);
try std.testing.expectFmt(
"1:2: error: expected 2 array elements; found 1\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -1981,7 +1963,7 @@ test "std.zon arrays and slices" {
);
try std.testing.expectFmt(
"1:2: error: expected 3 array elements; found 0\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -1996,7 +1978,7 @@ test "std.zon arrays and slices" {
error.ParseZon,
fromSlice([3]bool, gpa, ".{'a', 'b', 'c'}", &diag, .{}),
);
- try std.testing.expectFmt("1:3: error: expected type 'bool'\n", "{}", .{diag});
+ try std.testing.expectFmt("1:3: error: expected type 'bool'\n", "{f}", .{diag});
}
// Slice
@@ -2007,7 +1989,7 @@ test "std.zon arrays and slices" {
error.ParseZon,
fromSlice([]bool, gpa, ".{'a', 'b', 'c'}", &diag, .{}),
);
- try std.testing.expectFmt("1:3: error: expected type 'bool'\n", "{}", .{diag});
+ try std.testing.expectFmt("1:3: error: expected type 'bool'\n", "{f}", .{diag});
}
}
@@ -2021,7 +2003,7 @@ test "std.zon arrays and slices" {
error.ParseZon,
fromSlice([3]u8, gpa, "'a'", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag});
}
// Slice
@@ -2032,7 +2014,7 @@ test "std.zon arrays and slices" {
error.ParseZon,
fromSlice([]u8, gpa, "'a'", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag});
}
}
@@ -2046,7 +2028,7 @@ test "std.zon arrays and slices" {
);
try std.testing.expectFmt(
"1:3: error: pointers are not available in ZON\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -2085,7 +2067,7 @@ test "std.zon string literal" {
error.ParseZon,
fromSlice([]u8, gpa, "\"abcd\"", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag});
}
{
@@ -2095,7 +2077,7 @@ test "std.zon string literal" {
error.ParseZon,
fromSlice([]u8, gpa, "\\\\abcd", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag});
}
}
@@ -2112,7 +2094,7 @@ test "std.zon string literal" {
error.ParseZon,
fromSlice([4:0]u8, gpa, "\"abcd\"", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag});
}
{
@@ -2122,7 +2104,7 @@ test "std.zon string literal" {
error.ParseZon,
fromSlice([4:0]u8, gpa, "\\\\abcd", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag});
}
}
@@ -2164,7 +2146,7 @@ test "std.zon string literal" {
error.ParseZon,
fromSlice([:1]const u8, gpa, "\"foo\"", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag});
}
{
@@ -2174,7 +2156,7 @@ test "std.zon string literal" {
error.ParseZon,
fromSlice([:1]const u8, gpa, "\\\\foo", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag});
}
}
@@ -2186,7 +2168,7 @@ test "std.zon string literal" {
error.ParseZon,
fromSlice([]const u8, gpa, "true", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected string\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected string\n", "{f}", .{diag});
}
// Expecting string literal, getting an incompatible tuple
@@ -2197,7 +2179,7 @@ test "std.zon string literal" {
error.ParseZon,
fromSlice([]const u8, gpa, ".{false}", &diag, .{}),
);
- try std.testing.expectFmt("1:3: error: expected type 'u8'\n", "{}", .{diag});
+ try std.testing.expectFmt("1:3: error: expected type 'u8'\n", "{f}", .{diag});
}
// Invalid string literal
@@ -2208,7 +2190,7 @@ test "std.zon string literal" {
error.ParseZon,
fromSlice([]const i8, gpa, "\"\\a\"", &diag, .{}),
);
- try std.testing.expectFmt("1:3: error: invalid escape character: 'a'\n", "{}", .{diag});
+ try std.testing.expectFmt("1:3: error: invalid escape character: 'a'\n", "{f}", .{diag});
}
// Slice wrong child type
@@ -2220,7 +2202,7 @@ test "std.zon string literal" {
error.ParseZon,
fromSlice([]const i8, gpa, "\"a\"", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag});
}
{
@@ -2230,7 +2212,7 @@ test "std.zon string literal" {
error.ParseZon,
fromSlice([]const i8, gpa, "\\\\a", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag});
}
}
@@ -2243,7 +2225,7 @@ test "std.zon string literal" {
error.ParseZon,
fromSlice([]align(2) const u8, gpa, "\"abc\"", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag});
}
{
@@ -2253,7 +2235,7 @@ test "std.zon string literal" {
error.ParseZon,
fromSlice([]align(2) const u8, gpa, "\\\\abc", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected array\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected array\n", "{f}", .{diag});
}
}
@@ -2327,7 +2309,7 @@ test "std.zon enum literals" {
\\1:2: note: supported: 'foo', 'bar', 'baz', '@"ab\nc"'
\\
,
- "{}",
+ "{f}",
.{diag},
);
}
@@ -2345,7 +2327,7 @@ test "std.zon enum literals" {
\\1:2: note: supported: 'foo', 'bar', 'baz', '@"ab\nc"'
\\
,
- "{}",
+ "{f}",
.{diag},
);
}
@@ -2358,7 +2340,7 @@ test "std.zon enum literals" {
error.ParseZon,
fromSlice(Enum, gpa, "true", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected enum literal\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected enum literal\n", "{f}", .{diag});
}
// Test embedded nulls in an identifier
@@ -2371,7 +2353,7 @@ test "std.zon enum literals" {
);
try std.testing.expectFmt(
"1:2: error: identifier cannot contain null bytes\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -2397,13 +2379,13 @@ test "std.zon parse bool" {
\\1:2: note: ZON allows identifiers 'true', 'false', 'null', 'inf', and 'nan'
\\1:2: note: precede identifier with '.' for an enum literal
\\
- , "{}", .{diag});
+ , "{f}", .{diag});
}
{
var diag: Diagnostics = .{};
defer diag.deinit(gpa);
try std.testing.expectError(error.ParseZon, fromSlice(bool, gpa, "123", &diag, .{}));
- try std.testing.expectFmt("1:1: error: expected type 'bool'\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected type 'bool'\n", "{f}", .{diag});
}
}
@@ -2476,7 +2458,7 @@ test "std.zon parse int" {
));
try std.testing.expectFmt(
"1:1: error: type 'i66' cannot represent value\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -2492,7 +2474,7 @@ test "std.zon parse int" {
));
try std.testing.expectFmt(
"1:1: error: type 'i66' cannot represent value\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -2581,7 +2563,7 @@ test "std.zon parse int" {
try std.testing.expectError(error.ParseZon, fromSlice(u8, gpa, "32a32", &diag, .{}));
try std.testing.expectFmt(
"1:3: error: invalid digit 'a' for decimal base\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -2591,7 +2573,7 @@ test "std.zon parse int" {
var diag: Diagnostics = .{};
defer diag.deinit(gpa);
try std.testing.expectError(error.ParseZon, fromSlice(u8, gpa, "true", &diag, .{}));
- try std.testing.expectFmt("1:1: error: expected type 'u8'\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected type 'u8'\n", "{f}", .{diag});
}
// Failing because an int is out of range
@@ -2601,7 +2583,7 @@ test "std.zon parse int" {
try std.testing.expectError(error.ParseZon, fromSlice(u8, gpa, "256", &diag, .{}));
try std.testing.expectFmt(
"1:1: error: type 'u8' cannot represent value\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -2613,7 +2595,7 @@ test "std.zon parse int" {
try std.testing.expectError(error.ParseZon, fromSlice(i8, gpa, "-129", &diag, .{}));
try std.testing.expectFmt(
"1:1: error: type 'i8' cannot represent value\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -2625,7 +2607,7 @@ test "std.zon parse int" {
try std.testing.expectError(error.ParseZon, fromSlice(u8, gpa, "-1", &diag, .{}));
try std.testing.expectFmt(
"1:1: error: type 'u8' cannot represent value\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -2637,7 +2619,7 @@ test "std.zon parse int" {
try std.testing.expectError(error.ParseZon, fromSlice(u8, gpa, "1.5", &diag, .{}));
try std.testing.expectFmt(
"1:1: error: type 'u8' cannot represent value\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -2649,7 +2631,7 @@ test "std.zon parse int" {
try std.testing.expectError(error.ParseZon, fromSlice(u8, gpa, "-1.0", &diag, .{}));
try std.testing.expectFmt(
"1:1: error: type 'u8' cannot represent value\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -2664,7 +2646,7 @@ test "std.zon parse int" {
\\1:2: note: use '0' for an integer zero
\\1:2: note: use '-0.0' for a floating-point signed zero
\\
- , "{}", .{diag});
+ , "{f}", .{diag});
}
// Negative integer zero casted to float
@@ -2677,7 +2659,7 @@ test "std.zon parse int" {
\\1:2: note: use '0' for an integer zero
\\1:2: note: use '-0.0' for a floating-point signed zero
\\
- , "{}", .{diag});
+ , "{f}", .{diag});
}
// Negative float 0 is allowed
@@ -2693,7 +2675,7 @@ test "std.zon parse int" {
try std.testing.expectError(error.ParseZon, fromSlice(i8, gpa, "--2", &diag, .{}));
try std.testing.expectFmt(
"1:1: error: expected number or 'inf' after '-'\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -2707,7 +2689,7 @@ test "std.zon parse int" {
);
try std.testing.expectFmt(
"1:1: error: expected number or 'inf' after '-'\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -2717,7 +2699,7 @@ test "std.zon parse int" {
var diag: Diagnostics = .{};
defer diag.deinit(gpa);
try std.testing.expectError(error.ParseZon, fromSlice(u8, gpa, "0xg", &diag, .{}));
- try std.testing.expectFmt("1:3: error: invalid digit 'g' for hex base\n", "{}", .{diag});
+ try std.testing.expectFmt("1:3: error: invalid digit 'g' for hex base\n", "{f}", .{diag});
}
// Notes on invalid int literal
@@ -2729,7 +2711,7 @@ test "std.zon parse int" {
\\1:1: error: number '0123' has leading zero
\\1:1: note: use '0o' prefix for octal literals
\\
- , "{}", .{diag});
+ , "{f}", .{diag});
}
}
@@ -2742,7 +2724,7 @@ test "std.zon negative char" {
try std.testing.expectError(error.ParseZon, fromSlice(f32, gpa, "-'a'", &diag, .{}));
try std.testing.expectFmt(
"1:1: error: expected number or 'inf' after '-'\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -2752,7 +2734,7 @@ test "std.zon negative char" {
try std.testing.expectError(error.ParseZon, fromSlice(i16, gpa, "-'a'", &diag, .{}));
try std.testing.expectFmt(
"1:1: error: expected number or 'inf' after '-'\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -2839,7 +2821,7 @@ test "std.zon parse float" {
try std.testing.expectError(error.ParseZon, fromSlice(f32, gpa, "-nan", &diag, .{}));
try std.testing.expectFmt(
"1:1: error: expected number or 'inf' after '-'\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -2849,7 +2831,7 @@ test "std.zon parse float" {
var diag: Diagnostics = .{};
defer diag.deinit(gpa);
try std.testing.expectError(error.ParseZon, fromSlice(i8, gpa, "nan", &diag, .{}));
- try std.testing.expectFmt("1:1: error: expected type 'i8'\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected type 'i8'\n", "{f}", .{diag});
}
// nan as int not allowed
@@ -2857,7 +2839,7 @@ test "std.zon parse float" {
var diag: Diagnostics = .{};
defer diag.deinit(gpa);
try std.testing.expectError(error.ParseZon, fromSlice(i8, gpa, "nan", &diag, .{}));
- try std.testing.expectFmt("1:1: error: expected type 'i8'\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected type 'i8'\n", "{f}", .{diag});
}
// inf as int not allowed
@@ -2865,7 +2847,7 @@ test "std.zon parse float" {
var diag: Diagnostics = .{};
defer diag.deinit(gpa);
try std.testing.expectError(error.ParseZon, fromSlice(i8, gpa, "inf", &diag, .{}));
- try std.testing.expectFmt("1:1: error: expected type 'i8'\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected type 'i8'\n", "{f}", .{diag});
}
// -inf as int not allowed
@@ -2873,7 +2855,7 @@ test "std.zon parse float" {
var diag: Diagnostics = .{};
defer diag.deinit(gpa);
try std.testing.expectError(error.ParseZon, fromSlice(i8, gpa, "-inf", &diag, .{}));
- try std.testing.expectFmt("1:1: error: expected type 'i8'\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected type 'i8'\n", "{f}", .{diag});
}
// Bad identifier as float
@@ -2886,7 +2868,7 @@ test "std.zon parse float" {
\\1:1: note: ZON allows identifiers 'true', 'false', 'null', 'inf', and 'nan'
\\1:1: note: precede identifier with '.' for an enum literal
\\
- , "{}", .{diag});
+ , "{f}", .{diag});
}
{
@@ -2895,7 +2877,7 @@ test "std.zon parse float" {
try std.testing.expectError(error.ParseZon, fromSlice(f32, gpa, "-foo", &diag, .{}));
try std.testing.expectFmt(
"1:1: error: expected number or 'inf' after '-'\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -2908,7 +2890,7 @@ test "std.zon parse float" {
error.ParseZon,
fromSlice(f32, gpa, "\"foo\"", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected type 'f32'\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected type 'f32'\n", "{f}", .{diag});
}
}
@@ -3152,7 +3134,7 @@ test "std.zon vector" {
);
try std.testing.expectFmt(
"1:2: error: expected 2 vector elements; found 1\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -3167,7 +3149,7 @@ test "std.zon vector" {
);
try std.testing.expectFmt(
"1:2: error: expected 2 vector elements; found 3\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -3182,7 +3164,7 @@ test "std.zon vector" {
);
try std.testing.expectFmt(
"1:8: error: expected type 'f32'\n",
- "{}",
+ "{f}",
.{diag},
);
}
@@ -3195,7 +3177,7 @@ test "std.zon vector" {
error.ParseZon,
fromSlice(@Vector(3, u8), gpa, "true", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected type '@Vector(3, u8)'\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected type '@Vector(3, u8)'\n", "{f}", .{diag});
}
// Elements should get freed on error
@@ -3206,7 +3188,7 @@ test "std.zon vector" {
error.ParseZon,
fromSlice(@Vector(3, *u8), gpa, ".{1, true, 3}", &diag, .{}),
);
- try std.testing.expectFmt("1:6: error: expected type 'u8'\n", "{}", .{diag});
+ try std.testing.expectFmt("1:6: error: expected type 'u8'\n", "{f}", .{diag});
}
}
@@ -3330,7 +3312,7 @@ test "std.zon add pointers" {
error.ParseZon,
fromSlice(*const ?*const u8, gpa, "true", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected type '?u8'\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected type '?u8'\n", "{f}", .{diag});
}
{
@@ -3340,7 +3322,7 @@ test "std.zon add pointers" {
error.ParseZon,
fromSlice(*const ?*const f32, gpa, "true", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected type '?f32'\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected type '?f32'\n", "{f}", .{diag});
}
{
@@ -3350,7 +3332,7 @@ test "std.zon add pointers" {
error.ParseZon,
fromSlice(*const ?*const @Vector(3, u8), gpa, "true", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected type '?@Vector(3, u8)'\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected type '?@Vector(3, u8)'\n", "{f}", .{diag});
}
{
@@ -3360,7 +3342,7 @@ test "std.zon add pointers" {
error.ParseZon,
fromSlice(*const ?*const bool, gpa, "10", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected type '?bool'\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected type '?bool'\n", "{f}", .{diag});
}
{
@@ -3370,7 +3352,7 @@ test "std.zon add pointers" {
error.ParseZon,
fromSlice(*const ?*const struct { a: i32 }, gpa, "true", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected optional struct\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected optional struct\n", "{f}", .{diag});
}
{
@@ -3380,7 +3362,7 @@ test "std.zon add pointers" {
error.ParseZon,
fromSlice(*const ?*const struct { i32 }, gpa, "true", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected optional tuple\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected optional tuple\n", "{f}", .{diag});
}
{
@@ -3390,7 +3372,7 @@ test "std.zon add pointers" {
error.ParseZon,
fromSlice(*const ?*const union { x: void }, gpa, "true", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected optional union\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected optional union\n", "{f}", .{diag});
}
{
@@ -3400,7 +3382,7 @@ test "std.zon add pointers" {
error.ParseZon,
fromSlice(*const ?*const [3]u8, gpa, "true", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected optional array\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected optional array\n", "{f}", .{diag});
}
{
@@ -3410,7 +3392,7 @@ test "std.zon add pointers" {
error.ParseZon,
fromSlice(?[3]u8, gpa, "true", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected optional array\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected optional array\n", "{f}", .{diag});
}
{
@@ -3420,7 +3402,7 @@ test "std.zon add pointers" {
error.ParseZon,
fromSlice(*const ?*const []u8, gpa, "true", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected optional array\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected optional array\n", "{f}", .{diag});
}
{
@@ -3430,7 +3412,7 @@ test "std.zon add pointers" {
error.ParseZon,
fromSlice(?[]u8, gpa, "true", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected optional array\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected optional array\n", "{f}", .{diag});
}
{
@@ -3440,7 +3422,7 @@ test "std.zon add pointers" {
error.ParseZon,
fromSlice(*const ?*const []const u8, gpa, "true", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected optional string\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected optional string\n", "{f}", .{diag});
}
{
@@ -3450,7 +3432,7 @@ test "std.zon add pointers" {
error.ParseZon,
fromSlice(*const ?*const enum { foo }, gpa, "true", &diag, .{}),
);
- try std.testing.expectFmt("1:1: error: expected optional enum literal\n", "{}", .{diag});
+ try std.testing.expectFmt("1:1: error: expected optional enum literal\n", "{f}", .{diag});
}
}
lib/std/zon/stringify.zig
@@ -501,7 +501,7 @@ pub fn Serializer(Writer: type) type {
try self.int(val);
},
.float, .comptime_float => try self.float(val),
- .bool, .null => try std.fmt.format(self.writer, "{}", .{val}),
+ .bool, .null => try std.fmt.deprecatedFormat(self.writer, "{}", .{val}),
.enum_literal => try self.ident(@tagName(val)),
.@"enum" => try self.ident(@tagName(val)),
.pointer => |pointer| {
@@ -615,7 +615,8 @@ pub fn Serializer(Writer: type) type {
/// Serialize an integer.
pub fn int(self: *Self, val: anytype) Writer.Error!void {
- try std.fmt.formatInt(val, 10, .lower, .{}, self.writer);
+ //try self.writer.printIntOptions(val, 10, .lower, .{});
+ try std.fmt.deprecatedFormat(self.writer, "{d}", .{val});
}
/// Serialize a float.
@@ -630,12 +631,12 @@ pub fn Serializer(Writer: type) type {
} else if (std.math.isNegativeZero(val)) {
return self.writer.writeAll("-0.0");
} else {
- try std.fmt.format(self.writer, "{d}", .{val});
+ try std.fmt.deprecatedFormat(self.writer, "{d}", .{val});
},
.comptime_float => if (val == 0) {
return self.writer.writeAll("0");
} else {
- try std.fmt.format(self.writer, "{d}", .{val});
+ try std.fmt.deprecatedFormat(self.writer, "{d}", .{val});
},
else => comptime unreachable,
}
@@ -645,7 +646,7 @@ pub fn Serializer(Writer: type) type {
///
/// Escapes the identifier if necessary.
pub fn ident(self: *Self, name: []const u8) Writer.Error!void {
- try self.writer.print(".{p_}", .{std.zig.fmtId(name)});
+ try self.writer.print(".{f}", .{std.zig.fmtIdPU(name)});
}
/// Serialize `val` as a Unicode codepoint.
@@ -658,7 +659,7 @@ pub fn Serializer(Writer: type) type {
var buf: [8]u8 = undefined;
const len = std.unicode.utf8Encode(val, &buf) catch return error.InvalidCodepoint;
const str = buf[0..len];
- try std.fmt.format(self.writer, "'{'}'", .{std.zig.fmtEscapes(str)});
+ try std.fmt.deprecatedFormat(self.writer, "'{f}'", .{std.zig.fmtChar(str)});
}
/// Like `value`, but always serializes `val` as a tuple.
@@ -716,7 +717,7 @@ pub fn Serializer(Writer: type) type {
/// Like `value`, but always serializes `val` as a string.
pub fn string(self: *Self, val: []const u8) Writer.Error!void {
- try std.fmt.format(self.writer, "\"{}\"", .{std.zig.fmtEscapes(val)});
+ try std.fmt.deprecatedFormat(self.writer, "\"{f}\"", .{std.zig.fmtString(val)});
}
/// Options for formatting multiline strings.
lib/std/ascii.zig
@@ -435,3 +435,44 @@ pub fn orderIgnoreCase(lhs: []const u8, rhs: []const u8) std.math.Order {
pub fn lessThanIgnoreCase(lhs: []const u8, rhs: []const u8) bool {
return orderIgnoreCase(lhs, rhs) == .lt;
}
+
+pub const HexEscape = struct {
+ bytes: []const u8,
+ charset: *const [16]u8,
+
+ pub const upper_charset = "0123456789ABCDEF";
+ pub const lower_charset = "0123456789abcdef";
+
+ pub fn format(se: HexEscape, w: *std.io.Writer) std.io.Writer.Error!void {
+ const charset = se.charset;
+
+ var buf: [4]u8 = undefined;
+ buf[0] = '\\';
+ buf[1] = 'x';
+
+ for (se.bytes) |c| {
+ if (std.ascii.isPrint(c)) {
+ try w.writeByte(c);
+ } else {
+ buf[2] = charset[c >> 4];
+ buf[3] = charset[c & 15];
+ try w.writeAll(&buf);
+ }
+ }
+ }
+};
+
+/// Replaces non-ASCII bytes with hex escapes.
+pub fn hexEscape(bytes: []const u8, case: std.fmt.Case) std.fmt.Formatter(HexEscape, HexEscape.format) {
+ return .{ .data = .{ .bytes = bytes, .charset = switch (case) {
+ .lower => HexEscape.lower_charset,
+ .upper => HexEscape.upper_charset,
+ } } };
+}
+
+test hexEscape {
+ try std.testing.expectFmt("abc 123", "{f}", .{hexEscape("abc 123", .lower)});
+ try std.testing.expectFmt("ab\\xffc", "{f}", .{hexEscape("ab\xffc", .lower)});
+ try std.testing.expectFmt("abc 123", "{f}", .{hexEscape("abc 123", .upper)});
+ try std.testing.expectFmt("ab\\xFFc", "{f}", .{hexEscape("ab\xffc", .upper)});
+}
lib/std/Build.zig
@@ -1745,7 +1745,7 @@ pub fn addUserInputOption(b: *Build, name_raw: []const u8, value_raw: []const u8
return true;
},
.lazy_path, .lazy_path_list => {
- log.warn("the lazy path value type isn't added from the CLI, but somehow '{s}' is a .{}", .{ name, std.zig.fmtId(@tagName(gop.value_ptr.value)) });
+ log.warn("the lazy path value type isn't added from the CLI, but somehow '{s}' is a .{f}", .{ name, std.zig.fmtId(@tagName(gop.value_ptr.value)) });
return true;
},
}
@@ -2059,7 +2059,7 @@ pub fn runAllowFail(
try Step.handleVerbose2(b, null, child.env_map, argv);
try child.spawn();
- const stdout = child.stdout.?.reader().readAllAlloc(b.allocator, max_output_size) catch {
+ const stdout = child.stdout.?.deprecatedReader().readAllAlloc(b.allocator, max_output_size) catch {
return error.ReadFailure;
};
errdefer b.allocator.free(stdout);
@@ -2770,7 +2770,7 @@ fn dumpBadDirnameHelp(
defer debug.unlockStdErr();
const stderr: fs.File = .stderr();
- const w = stderr.writer();
+ const w = stderr.deprecatedWriter();
try w.print(msg, args);
const tty_config = std.io.tty.detectConfig(stderr);
@@ -2785,7 +2785,7 @@ fn dumpBadDirnameHelp(
if (asking_step) |as| {
tty_config.setColor(w, .red) catch {};
- try stderr.writer().print(" The step '{s}' that is missing a dependency on the above step was created by this stack trace:\n", .{as.name});
+ try stderr.deprecatedWriter().print(" The step '{s}' that is missing a dependency on the above step was created by this stack trace:\n", .{as.name});
tty_config.setColor(w, .reset) catch {};
as.dump(stderr);
@@ -2803,7 +2803,7 @@ pub fn dumpBadGetPathHelp(
src_builder: *Build,
asking_step: ?*Step,
) anyerror!void {
- const w = stderr.writer();
+ const w = stderr.deprecatedWriter();
try w.print(
\\getPath() was called on a GeneratedFile that wasn't built yet.
\\ source package path: {s}
@@ -2822,7 +2822,7 @@ pub fn dumpBadGetPathHelp(
s.dump(stderr);
if (asking_step) |as| {
tty_config.setColor(w, .red) catch {};
- try stderr.writer().print(" The step '{s}' that is missing a dependency on the above step was created by this stack trace:\n", .{as.name});
+ try stderr.deprecatedWriter().print(" The step '{s}' that is missing a dependency on the above step was created by this stack trace:\n", .{as.name});
tty_config.setColor(w, .reset) catch {};
as.dump(stderr);
lib/std/builtin.zig
@@ -34,20 +34,14 @@ pub const StackTrace = struct {
index: usize,
instruction_addresses: []usize,
- pub fn format(
- self: StackTrace,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
- if (fmt.len != 0) std.fmt.invalidFmtError(fmt, self);
+ pub fn format(self: StackTrace, writer: *std.io.Writer, comptime fmt: []const u8) std.io.Writer.Error!void {
+ if (fmt.len != 0) unreachable;
// TODO: re-evaluate whether to use format() methods at all.
// Until then, avoid an error when using GeneralPurposeAllocator with WebAssembly
// where it tries to call detectTTYConfig here.
if (builtin.os.tag == .freestanding) return;
- _ = options;
const debug_info = std.debug.getSelfDebugInfo() catch |err| {
return writer.print("\nUnable to print stack trace: Unable to open debug info: {s}\n", .{@errorName(err)});
};
lib/std/debug.zig
@@ -12,6 +12,7 @@ const windows = std.os.windows;
const native_arch = builtin.cpu.arch;
const native_os = builtin.os.tag;
const native_endian = native_arch.endian();
+const Writer = std.io.Writer;
pub const MemoryAccessor = @import("debug/MemoryAccessor.zig");
pub const FixedBufferReader = @import("debug/FixedBufferReader.zig");
@@ -204,13 +205,26 @@ pub fn unlockStdErr() void {
std.Progress.unlockStdErr();
}
+/// Allows the caller to freely write to stderr until `unlockStdErr` is called.
+///
+/// During the lock, any `std.Progress` information is cleared from the terminal.
+///
+/// Returns a `Writer` with empty buffer, meaning that it is
+/// in fact unbuffered and does not need to be flushed.
+pub fn lockStderrWriter(buffer: []u8) *Writer {
+ return std.Progress.lockStderrWriter(buffer);
+}
+
+pub fn unlockStderrWriter() void {
+ std.Progress.unlockStderrWriter();
+}
+
/// Print to stderr, unbuffered, and silently returning on failure. Intended
-/// for use in "printf debugging." Use `std.log` functions for proper logging.
+/// for use in "printf debugging". Use `std.log` functions for proper logging.
pub fn print(comptime fmt: []const u8, args: anytype) void {
- lockStdErr();
- defer unlockStdErr();
- const stderr = fs.File.stderr().writer();
- nosuspend stderr.print(fmt, args) catch return;
+ const bw = lockStderrWriter(&.{});
+ defer unlockStderrWriter();
+ nosuspend bw.print(fmt, args) catch return;
}
pub fn getStderrMutex() *std.Thread.Mutex {
@@ -232,50 +246,44 @@ pub fn getSelfDebugInfo() !*SelfInfo {
/// Tries to print a hexadecimal view of the bytes, unbuffered, and ignores any error returned.
/// Obtains the stderr mutex while dumping.
pub fn dumpHex(bytes: []const u8) void {
- lockStdErr();
- defer unlockStdErr();
- dumpHexFallible(bytes) catch {};
-}
-
-/// Prints a hexadecimal view of the bytes, unbuffered, returning any error that occurs.
-pub fn dumpHexFallible(bytes: []const u8) !void {
- const stderr: fs.File = .stderr();
- const ttyconf = std.io.tty.detectConfig(stderr);
- const writer = stderr.writer();
- try dumpHexInternal(bytes, ttyconf, writer);
+ const bw = lockStderrWriter(&.{});
+ defer unlockStderrWriter();
+ const ttyconf = std.io.tty.detectConfig(.stderr());
+ dumpHexFallible(bw, ttyconf, bytes) catch {};
}
-fn dumpHexInternal(bytes: []const u8, ttyconf: std.io.tty.Config, writer: anytype) !void {
+/// Prints a hexadecimal view of the bytes, returning any error that occurs.
+pub fn dumpHexFallible(bw: *Writer, ttyconf: std.io.tty.Config, bytes: []const u8) !void {
var chunks = mem.window(u8, bytes, 16, 16);
while (chunks.next()) |window| {
// 1. Print the address.
const address = (@intFromPtr(bytes.ptr) + 0x10 * (std.math.divCeil(usize, chunks.index orelse bytes.len, 16) catch unreachable)) - 0x10;
- try ttyconf.setColor(writer, .dim);
+ try ttyconf.setColor(bw, .dim);
// We print the address in lowercase and the bytes in uppercase hexadecimal to distinguish them more.
// Also, make sure all lines are aligned by padding the address.
- try writer.print("{x:0>[1]} ", .{ address, @sizeOf(usize) * 2 });
- try ttyconf.setColor(writer, .reset);
+ try bw.print("{x:0>[1]} ", .{ address, @sizeOf(usize) * 2 });
+ try ttyconf.setColor(bw, .reset);
// 2. Print the bytes.
for (window, 0..) |byte, index| {
- try writer.print("{X:0>2} ", .{byte});
- if (index == 7) try writer.writeByte(' ');
+ try bw.print("{X:0>2} ", .{byte});
+ if (index == 7) try bw.writeByte(' ');
}
- try writer.writeByte(' ');
+ try bw.writeByte(' ');
if (window.len < 16) {
var missing_columns = (16 - window.len) * 3;
if (window.len < 8) missing_columns += 1;
- try writer.writeByteNTimes(' ', missing_columns);
+ try bw.splatByteAll(' ', missing_columns);
}
// 3. Print the characters.
for (window) |byte| {
if (std.ascii.isPrint(byte)) {
- try writer.writeByte(byte);
+ try bw.writeByte(byte);
} else {
// Related: https://github.com/ziglang/zig/issues/7600
if (ttyconf == .windows_api) {
- try writer.writeByte('.');
+ try bw.writeByte('.');
continue;
}
@@ -283,22 +291,23 @@ fn dumpHexInternal(bytes: []const u8, ttyconf: std.io.tty.Config, writer: anytyp
// We don't want to do this for all control codes because most control codes apart from
// the ones that Zig has escape sequences for are likely not very useful to print as symbols.
switch (byte) {
- '\n' => try writer.writeAll("โ"),
- '\r' => try writer.writeAll("โ"),
- '\t' => try writer.writeAll("โ"),
- else => try writer.writeByte('.'),
+ '\n' => try bw.writeAll("โ"),
+ '\r' => try bw.writeAll("โ"),
+ '\t' => try bw.writeAll("โ"),
+ else => try bw.writeByte('.'),
}
}
}
- try writer.writeByte('\n');
+ try bw.writeByte('\n');
}
}
-test dumpHexInternal {
+test dumpHexFallible {
const bytes: []const u8 = &.{ 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, 0x01, 0x12, 0x13 };
- var output = std.ArrayList(u8).init(std.testing.allocator);
- defer output.deinit();
- try dumpHexInternal(bytes, .no_color, output.writer());
+ var aw: std.io.Writer.Allocating = .init(std.testing.allocator);
+ defer aw.deinit();
+
+ try dumpHexFallible(&aw.interface, .no_color, bytes);
const expected = try std.fmt.allocPrint(std.testing.allocator,
\\{x:0>[2]} 00 11 22 33 44 55 66 77 88 99 AA BB CC DD EE FF .."3DUfw........
\\{x:0>[2]} 01 12 13 ...
@@ -309,34 +318,36 @@ test dumpHexInternal {
@sizeOf(usize) * 2,
});
defer std.testing.allocator.free(expected);
- try std.testing.expectEqualStrings(expected, output.items);
+ try std.testing.expectEqualStrings(expected, aw.getWritten());
}
/// Tries to print the current stack trace to stderr, unbuffered, and ignores any error returned.
-/// TODO multithreaded awareness
pub fn dumpCurrentStackTrace(start_addr: ?usize) void {
- nosuspend {
- if (builtin.target.cpu.arch.isWasm()) {
- if (native_os == .wasi) {
- const stderr = fs.File.stderr().writer();
- stderr.print("Unable to dump stack trace: not implemented for Wasm\n", .{}) catch return;
- }
- return;
- }
- const stderr = fs.File.stderr().writer();
- if (builtin.strip_debug_info) {
- stderr.print("Unable to dump stack trace: debug info stripped\n", .{}) catch return;
- return;
+ const stderr = lockStderrWriter(&.{});
+ defer unlockStderrWriter();
+ nosuspend dumpCurrentStackTraceToWriter(start_addr, stderr) catch return;
+}
+
+/// Prints the current stack trace to the provided writer.
+pub fn dumpCurrentStackTraceToWriter(start_addr: ?usize, writer: *Writer) !void {
+ if (builtin.target.cpu.arch.isWasm()) {
+ if (native_os == .wasi) {
+ try writer.writeAll("Unable to dump stack trace: not implemented for Wasm\n");
}
- const debug_info = getSelfDebugInfo() catch |err| {
- stderr.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}) catch return;
- return;
- };
- writeCurrentStackTrace(stderr, debug_info, io.tty.detectConfig(fs.File.stderr()), start_addr) catch |err| {
- stderr.print("Unable to dump stack trace: {s}\n", .{@errorName(err)}) catch return;
- return;
- };
+ return;
}
+ if (builtin.strip_debug_info) {
+ try writer.writeAll("Unable to dump stack trace: debug info stripped\n");
+ return;
+ }
+ const debug_info = getSelfDebugInfo() catch |err| {
+ try writer.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)});
+ return;
+ };
+ writeCurrentStackTrace(writer, debug_info, io.tty.detectConfig(.stderr()), start_addr) catch |err| {
+ try writer.print("Unable to dump stack trace: {s}\n", .{@errorName(err)});
+ return;
+ };
}
pub const have_ucontext = posix.ucontext_t != void;
@@ -402,16 +413,14 @@ pub inline fn getContext(context: *ThreadContext) bool {
/// Tries to print the stack trace starting from the supplied base pointer to stderr,
/// unbuffered, and ignores any error returned.
/// TODO multithreaded awareness
-pub fn dumpStackTraceFromBase(context: *ThreadContext) void {
+pub fn dumpStackTraceFromBase(context: *ThreadContext, stderr: *Writer) void {
nosuspend {
if (builtin.target.cpu.arch.isWasm()) {
if (native_os == .wasi) {
- const stderr = fs.File.stderr().writer();
stderr.print("Unable to dump stack trace: not implemented for Wasm\n", .{}) catch return;
}
return;
}
- const stderr = fs.File.stderr().writer();
if (builtin.strip_debug_info) {
stderr.print("Unable to dump stack trace: debug info stripped\n", .{}) catch return;
return;
@@ -420,7 +429,7 @@ pub fn dumpStackTraceFromBase(context: *ThreadContext) void {
stderr.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}) catch return;
return;
};
- const tty_config = io.tty.detectConfig(fs.File.stderr());
+ const tty_config = io.tty.detectConfig(.stderr());
if (native_os == .windows) {
// On x86_64 and aarch64, the stack will be unwound using RtlVirtualUnwind using the context
// provided by the exception handler. On x86, RtlVirtualUnwind doesn't exist. Instead, a new backtrace
@@ -510,21 +519,23 @@ pub fn dumpStackTrace(stack_trace: std.builtin.StackTrace) void {
nosuspend {
if (builtin.target.cpu.arch.isWasm()) {
if (native_os == .wasi) {
- const stderr = fs.File.stderr().writer();
- stderr.print("Unable to dump stack trace: not implemented for Wasm\n", .{}) catch return;
+ const stderr = lockStderrWriter(&.{});
+ defer unlockStderrWriter();
+ stderr.writeAll("Unable to dump stack trace: not implemented for Wasm\n") catch return;
}
return;
}
- const stderr = fs.File.stderr().writer();
+ const stderr = lockStderrWriter(&.{});
+ defer unlockStderrWriter();
if (builtin.strip_debug_info) {
- stderr.print("Unable to dump stack trace: debug info stripped\n", .{}) catch return;
+ stderr.writeAll("Unable to dump stack trace: debug info stripped\n") catch return;
return;
}
const debug_info = getSelfDebugInfo() catch |err| {
stderr.print("Unable to dump stack trace: Unable to open debug info: {s}\n", .{@errorName(err)}) catch return;
return;
};
- writeStackTrace(stack_trace, stderr, debug_info, io.tty.detectConfig(fs.File.stderr())) catch |err| {
+ writeStackTrace(stack_trace, stderr, debug_info, io.tty.detectConfig(.stderr())) catch |err| {
stderr.print("Unable to dump stack trace: {s}\n", .{@errorName(err)}) catch return;
return;
};
@@ -573,14 +584,13 @@ pub fn panicExtra(
const size = 0x1000;
const trunc_msg = "(msg truncated)";
var buf: [size + trunc_msg.len]u8 = undefined;
+ var bw: Writer = .fixed(buf[0..size]);
// a minor annoyance with this is that it will result in the NoSpaceLeft
// error being part of the @panic stack trace (but that error should
// only happen rarely)
- const msg = std.fmt.bufPrint(buf[0..size], format, args) catch |err| switch (err) {
- error.NoSpaceLeft => blk: {
- @memcpy(buf[size..], trunc_msg);
- break :blk &buf;
- },
+ const msg = if (bw.print(format, args)) |_| bw.buffered() else |_| blk: {
+ @memcpy(buf[size..], trunc_msg);
+ break :blk &buf;
};
std.builtin.panic.call(msg, ret_addr);
}
@@ -675,10 +685,9 @@ pub fn defaultPanic(
_ = panicking.fetchAdd(1, .seq_cst);
{
- lockStdErr();
- defer unlockStdErr();
+ const stderr = lockStderrWriter(&.{});
+ defer unlockStderrWriter();
- const stderr = fs.File.stderr().writer();
if (builtin.single_threaded) {
stderr.print("panic: ", .{}) catch posix.abort();
} else {
@@ -688,7 +697,7 @@ pub fn defaultPanic(
stderr.print("{s}\n", .{msg}) catch posix.abort();
if (@errorReturnTrace()) |t| dumpStackTrace(t.*);
- dumpCurrentStackTrace(first_trace_addr orelse @returnAddress());
+ dumpCurrentStackTraceToWriter(first_trace_addr orelse @returnAddress(), stderr) catch {};
}
waitForOtherThreadToFinishPanicking();
@@ -723,7 +732,7 @@ fn waitForOtherThreadToFinishPanicking() void {
pub fn writeStackTrace(
stack_trace: std.builtin.StackTrace,
- out_stream: anytype,
+ writer: *Writer,
debug_info: *SelfInfo,
tty_config: io.tty.Config,
) !void {
@@ -736,15 +745,15 @@ pub fn writeStackTrace(
frame_index = (frame_index + 1) % stack_trace.instruction_addresses.len;
}) {
const return_address = stack_trace.instruction_addresses[frame_index];
- try printSourceAtAddress(debug_info, out_stream, return_address - 1, tty_config);
+ try printSourceAtAddress(debug_info, writer, return_address - 1, tty_config);
}
if (stack_trace.index > stack_trace.instruction_addresses.len) {
const dropped_frames = stack_trace.index - stack_trace.instruction_addresses.len;
- tty_config.setColor(out_stream, .bold) catch {};
- try out_stream.print("({d} additional stack frames skipped...)\n", .{dropped_frames});
- tty_config.setColor(out_stream, .reset) catch {};
+ tty_config.setColor(writer, .bold) catch {};
+ try writer.print("({d} additional stack frames skipped...)\n", .{dropped_frames});
+ tty_config.setColor(writer, .reset) catch {};
}
}
@@ -954,7 +963,7 @@ pub const StackIterator = struct {
};
pub fn writeCurrentStackTrace(
- out_stream: anytype,
+ writer: *Writer,
debug_info: *SelfInfo,
tty_config: io.tty.Config,
start_addr: ?usize,
@@ -962,7 +971,7 @@ pub fn writeCurrentStackTrace(
if (native_os == .windows) {
var context: ThreadContext = undefined;
assert(getContext(&context));
- return writeStackTraceWindows(out_stream, debug_info, tty_config, &context, start_addr);
+ return writeStackTraceWindows(writer, debug_info, tty_config, &context, start_addr);
}
var context: ThreadContext = undefined;
const has_context = getContext(&context);
@@ -973,7 +982,7 @@ pub fn writeCurrentStackTrace(
defer it.deinit();
while (it.next()) |return_address| {
- printLastUnwindError(&it, debug_info, out_stream, tty_config);
+ printLastUnwindError(&it, debug_info, writer, tty_config);
// On arm64 macOS, the address of the last frame is 0x0 rather than 0x1 as on x86_64 macOS,
// therefore, we do a check for `return_address == 0` before subtracting 1 from it to avoid
@@ -981,8 +990,8 @@ pub fn writeCurrentStackTrace(
// condition on the subsequent iteration and return `null` thus terminating the loop.
// same behaviour for x86-windows-msvc
const address = return_address -| 1;
- try printSourceAtAddress(debug_info, out_stream, address, tty_config);
- } else printLastUnwindError(&it, debug_info, out_stream, tty_config);
+ try printSourceAtAddress(debug_info, writer, address, tty_config);
+ } else printLastUnwindError(&it, debug_info, writer, tty_config);
}
pub noinline fn walkStackWindows(addresses: []usize, existing_context: ?*const windows.CONTEXT) usize {
@@ -1042,7 +1051,7 @@ pub noinline fn walkStackWindows(addresses: []usize, existing_context: ?*const w
}
pub fn writeStackTraceWindows(
- out_stream: anytype,
+ writer: *Writer,
debug_info: *SelfInfo,
tty_config: io.tty.Config,
context: *const windows.CONTEXT,
@@ -1058,14 +1067,14 @@ pub fn writeStackTraceWindows(
return;
} else 0;
for (addrs[start_i..]) |addr| {
- try printSourceAtAddress(debug_info, out_stream, addr - 1, tty_config);
+ try printSourceAtAddress(debug_info, writer, addr - 1, tty_config);
}
}
-fn printUnknownSource(debug_info: *SelfInfo, out_stream: anytype, address: usize, tty_config: io.tty.Config) !void {
+fn printUnknownSource(debug_info: *SelfInfo, writer: *Writer, address: usize, tty_config: io.tty.Config) !void {
const module_name = debug_info.getModuleNameForAddress(address);
return printLineInfo(
- out_stream,
+ writer,
null,
address,
"???",
@@ -1075,38 +1084,38 @@ fn printUnknownSource(debug_info: *SelfInfo, out_stream: anytype, address: usize
);
}
-fn printLastUnwindError(it: *StackIterator, debug_info: *SelfInfo, out_stream: anytype, tty_config: io.tty.Config) void {
+fn printLastUnwindError(it: *StackIterator, debug_info: *SelfInfo, writer: *Writer, tty_config: io.tty.Config) void {
if (!have_ucontext) return;
if (it.getLastError()) |unwind_error| {
- printUnwindError(debug_info, out_stream, unwind_error.address, unwind_error.err, tty_config) catch {};
+ printUnwindError(debug_info, writer, unwind_error.address, unwind_error.err, tty_config) catch {};
}
}
-fn printUnwindError(debug_info: *SelfInfo, out_stream: anytype, address: usize, err: UnwindError, tty_config: io.tty.Config) !void {
+fn printUnwindError(debug_info: *SelfInfo, writer: *Writer, address: usize, err: UnwindError, tty_config: io.tty.Config) !void {
const module_name = debug_info.getModuleNameForAddress(address) orelse "???";
- try tty_config.setColor(out_stream, .dim);
+ try tty_config.setColor(writer, .dim);
if (err == error.MissingDebugInfo) {
- try out_stream.print("Unwind information for `{s}:0x{x}` was not available, trace may be incomplete\n\n", .{ module_name, address });
+ try writer.print("Unwind information for `{s}:0x{x}` was not available, trace may be incomplete\n\n", .{ module_name, address });
} else {
- try out_stream.print("Unwind error at address `{s}:0x{x}` ({}), trace may be incomplete\n\n", .{ module_name, address, err });
+ try writer.print("Unwind error at address `{s}:0x{x}` ({}), trace may be incomplete\n\n", .{ module_name, address, err });
}
- try tty_config.setColor(out_stream, .reset);
+ try tty_config.setColor(writer, .reset);
}
-pub fn printSourceAtAddress(debug_info: *SelfInfo, out_stream: anytype, address: usize, tty_config: io.tty.Config) !void {
+pub fn printSourceAtAddress(debug_info: *SelfInfo, writer: *Writer, address: usize, tty_config: io.tty.Config) !void {
const module = debug_info.getModuleForAddress(address) catch |err| switch (err) {
- error.MissingDebugInfo, error.InvalidDebugInfo => return printUnknownSource(debug_info, out_stream, address, tty_config),
+ error.MissingDebugInfo, error.InvalidDebugInfo => return printUnknownSource(debug_info, writer, address, tty_config),
else => return err,
};
const symbol_info = module.getSymbolAtAddress(debug_info.allocator, address) catch |err| switch (err) {
- error.MissingDebugInfo, error.InvalidDebugInfo => return printUnknownSource(debug_info, out_stream, address, tty_config),
+ error.MissingDebugInfo, error.InvalidDebugInfo => return printUnknownSource(debug_info, writer, address, tty_config),
else => return err,
};
defer if (symbol_info.source_location) |sl| debug_info.allocator.free(sl.file_name);
return printLineInfo(
- out_stream,
+ writer,
symbol_info.source_location,
address,
symbol_info.name,
@@ -1117,7 +1126,7 @@ pub fn printSourceAtAddress(debug_info: *SelfInfo, out_stream: anytype, address:
}
fn printLineInfo(
- out_stream: anytype,
+ writer: *Writer,
source_location: ?SourceLocation,
address: usize,
symbol_name: []const u8,
@@ -1126,34 +1135,34 @@ fn printLineInfo(
comptime printLineFromFile: anytype,
) !void {
nosuspend {
- try tty_config.setColor(out_stream, .bold);
+ try tty_config.setColor(writer, .bold);
if (source_location) |*sl| {
- try out_stream.print("{s}:{d}:{d}", .{ sl.file_name, sl.line, sl.column });
+ try writer.print("{s}:{d}:{d}", .{ sl.file_name, sl.line, sl.column });
} else {
- try out_stream.writeAll("???:?:?");
+ try writer.writeAll("???:?:?");
}
- try tty_config.setColor(out_stream, .reset);
- try out_stream.writeAll(": ");
- try tty_config.setColor(out_stream, .dim);
- try out_stream.print("0x{x} in {s} ({s})", .{ address, symbol_name, compile_unit_name });
- try tty_config.setColor(out_stream, .reset);
- try out_stream.writeAll("\n");
+ try tty_config.setColor(writer, .reset);
+ try writer.writeAll(": ");
+ try tty_config.setColor(writer, .dim);
+ try writer.print("0x{x} in {s} ({s})", .{ address, symbol_name, compile_unit_name });
+ try tty_config.setColor(writer, .reset);
+ try writer.writeAll("\n");
// Show the matching source code line if possible
if (source_location) |sl| {
- if (printLineFromFile(out_stream, sl)) {
+ if (printLineFromFile(writer, sl)) {
if (sl.column > 0) {
// The caret already takes one char
const space_needed = @as(usize, @intCast(sl.column - 1));
- try out_stream.writeByteNTimes(' ', space_needed);
- try tty_config.setColor(out_stream, .green);
- try out_stream.writeAll("^");
- try tty_config.setColor(out_stream, .reset);
+ try writer.splatByteAll(' ', space_needed);
+ try tty_config.setColor(writer, .green);
+ try writer.writeAll("^");
+ try tty_config.setColor(writer, .reset);
}
- try out_stream.writeAll("\n");
+ try writer.writeAll("\n");
} else |err| switch (err) {
error.EndOfFile, error.FileNotFound => {},
error.BadPathName => {},
@@ -1164,7 +1173,7 @@ fn printLineInfo(
}
}
-fn printLineFromFileAnyOs(out_stream: anytype, source_location: SourceLocation) !void {
+fn printLineFromFileAnyOs(writer: *Writer, source_location: SourceLocation) !void {
// Need this to always block even in async I/O mode, because this could potentially
// be called from e.g. the event loop code crashing.
var f = try fs.cwd().openFile(source_location.file_name, .{});
@@ -1197,31 +1206,31 @@ fn printLineFromFileAnyOs(out_stream: anytype, source_location: SourceLocation)
if (mem.indexOfScalar(u8, slice, '\n')) |pos| {
const line = slice[0 .. pos + 1];
mem.replaceScalar(u8, line, '\t', ' ');
- return out_stream.writeAll(line);
+ return writer.writeAll(line);
} else { // Line is the last inside the buffer, and requires another read to find delimiter. Alternatively the file ends.
mem.replaceScalar(u8, slice, '\t', ' ');
- try out_stream.writeAll(slice);
+ try writer.writeAll(slice);
while (amt_read == buf.len) {
amt_read = try f.read(buf[0..]);
if (mem.indexOfScalar(u8, buf[0..amt_read], '\n')) |pos| {
const line = buf[0 .. pos + 1];
mem.replaceScalar(u8, line, '\t', ' ');
- return out_stream.writeAll(line);
+ return writer.writeAll(line);
} else {
const line = buf[0..amt_read];
mem.replaceScalar(u8, line, '\t', ' ');
- try out_stream.writeAll(line);
+ try writer.writeAll(line);
}
}
// Make sure printing last line of file inserts extra newline
- try out_stream.writeByte('\n');
+ try writer.writeByte('\n');
}
}
test printLineFromFileAnyOs {
- var output = std.ArrayList(u8).init(std.testing.allocator);
- defer output.deinit();
- const output_stream = output.writer();
+ var aw: Writer.Allocating = .init(std.testing.allocator);
+ defer aw.deinit();
+ const output_stream = &aw.interface;
const allocator = std.testing.allocator;
const join = std.fs.path.join;
@@ -1243,8 +1252,8 @@ test printLineFromFileAnyOs {
try expectError(error.EndOfFile, printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 2, .column = 0 }));
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 1, .column = 0 });
- try expectEqualStrings("no new lines in this file, but one is printed anyway\n", output.items);
- output.clearRetainingCapacity();
+ try expectEqualStrings("no new lines in this file, but one is printed anyway\n", aw.getWritten());
+ aw.clearRetainingCapacity();
}
{
const path = try fs.path.join(allocator, &.{ test_dir_path, "three_lines.zig" });
@@ -1259,12 +1268,12 @@ test printLineFromFileAnyOs {
});
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 1, .column = 0 });
- try expectEqualStrings("1\n", output.items);
- output.clearRetainingCapacity();
+ try expectEqualStrings("1\n", aw.getWritten());
+ aw.clearRetainingCapacity();
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 3, .column = 0 });
- try expectEqualStrings("3\n", output.items);
- output.clearRetainingCapacity();
+ try expectEqualStrings("3\n", aw.getWritten());
+ aw.clearRetainingCapacity();
}
{
const file = try test_dir.dir.createFile("line_overlaps_page_boundary.zig", .{});
@@ -1273,14 +1282,15 @@ test printLineFromFileAnyOs {
defer allocator.free(path);
const overlap = 10;
- var writer = file.writer();
- try writer.writeByteNTimes('a', std.heap.page_size_min - overlap);
+ var file_writer = file.writer(&.{});
+ const writer = &file_writer.interface;
+ try writer.splatByteAll('a', std.heap.page_size_min - overlap);
try writer.writeByte('\n');
- try writer.writeByteNTimes('a', overlap);
+ try writer.splatByteAll('a', overlap);
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 2, .column = 0 });
- try expectEqualStrings(("a" ** overlap) ++ "\n", output.items);
- output.clearRetainingCapacity();
+ try expectEqualStrings(("a" ** overlap) ++ "\n", aw.getWritten());
+ aw.clearRetainingCapacity();
}
{
const file = try test_dir.dir.createFile("file_ends_on_page_boundary.zig", .{});
@@ -1288,12 +1298,13 @@ test printLineFromFileAnyOs {
const path = try fs.path.join(allocator, &.{ test_dir_path, "file_ends_on_page_boundary.zig" });
defer allocator.free(path);
- var writer = file.writer();
- try writer.writeByteNTimes('a', std.heap.page_size_max);
+ var file_writer = file.writer(&.{});
+ const writer = &file_writer.interface;
+ try writer.splatByteAll('a', std.heap.page_size_max);
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 1, .column = 0 });
- try expectEqualStrings(("a" ** std.heap.page_size_max) ++ "\n", output.items);
- output.clearRetainingCapacity();
+ try expectEqualStrings(("a" ** std.heap.page_size_max) ++ "\n", aw.getWritten());
+ aw.clearRetainingCapacity();
}
{
const file = try test_dir.dir.createFile("very_long_first_line_spanning_multiple_pages.zig", .{});
@@ -1301,24 +1312,25 @@ test printLineFromFileAnyOs {
const path = try fs.path.join(allocator, &.{ test_dir_path, "very_long_first_line_spanning_multiple_pages.zig" });
defer allocator.free(path);
- var writer = file.writer();
- try writer.writeByteNTimes('a', 3 * std.heap.page_size_max);
+ var file_writer = file.writer(&.{});
+ const writer = &file_writer.interface;
+ try writer.splatByteAll('a', 3 * std.heap.page_size_max);
try expectError(error.EndOfFile, printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 2, .column = 0 }));
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 1, .column = 0 });
- try expectEqualStrings(("a" ** (3 * std.heap.page_size_max)) ++ "\n", output.items);
- output.clearRetainingCapacity();
+ try expectEqualStrings(("a" ** (3 * std.heap.page_size_max)) ++ "\n", aw.getWritten());
+ aw.clearRetainingCapacity();
try writer.writeAll("a\na");
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 1, .column = 0 });
- try expectEqualStrings(("a" ** (3 * std.heap.page_size_max)) ++ "a\n", output.items);
- output.clearRetainingCapacity();
+ try expectEqualStrings(("a" ** (3 * std.heap.page_size_max)) ++ "a\n", aw.getWritten());
+ aw.clearRetainingCapacity();
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = 2, .column = 0 });
- try expectEqualStrings("a\n", output.items);
- output.clearRetainingCapacity();
+ try expectEqualStrings("a\n", aw.getWritten());
+ aw.clearRetainingCapacity();
}
{
const file = try test_dir.dir.createFile("file_of_newlines.zig", .{});
@@ -1326,18 +1338,19 @@ test printLineFromFileAnyOs {
const path = try fs.path.join(allocator, &.{ test_dir_path, "file_of_newlines.zig" });
defer allocator.free(path);
- var writer = file.writer();
+ var file_writer = file.writer(&.{});
+ const writer = &file_writer.interface;
const real_file_start = 3 * std.heap.page_size_min;
- try writer.writeByteNTimes('\n', real_file_start);
+ try writer.splatByteAll('\n', real_file_start);
try writer.writeAll("abc\ndef");
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = real_file_start + 1, .column = 0 });
- try expectEqualStrings("abc\n", output.items);
- output.clearRetainingCapacity();
+ try expectEqualStrings("abc\n", aw.getWritten());
+ aw.clearRetainingCapacity();
try printLineFromFileAnyOs(output_stream, .{ .file_name = path, .line = real_file_start + 2, .column = 0 });
- try expectEqualStrings("def\n", output.items);
- output.clearRetainingCapacity();
+ try expectEqualStrings("def\n", aw.getWritten());
+ aw.clearRetainingCapacity();
}
}
@@ -1461,7 +1474,8 @@ fn handleSegfaultPosix(sig: i32, info: *const posix.siginfo_t, ctx_ptr: ?*anyopa
}
fn dumpSegfaultInfoPosix(sig: i32, code: i32, addr: usize, ctx_ptr: ?*anyopaque) void {
- const stderr = fs.File.stderr().writer();
+ const stderr = lockStderrWriter(&.{});
+ defer unlockStderrWriter();
_ = switch (sig) {
posix.SIG.SEGV => if (native_arch == .x86_64 and native_os == .linux and code == 128) // SI_KERNEL
// x86_64 doesn't have a full 64-bit virtual address space.
@@ -1471,7 +1485,7 @@ fn dumpSegfaultInfoPosix(sig: i32, code: i32, addr: usize, ctx_ptr: ?*anyopaque)
// but can also happen when no addressable memory is involved;
// for example when reading/writing model-specific registers
// by executing `rdmsr` or `wrmsr` in user-space (unprivileged mode).
- stderr.print("General protection exception (no address available)\n", .{})
+ stderr.writeAll("General protection exception (no address available)\n")
else
stderr.print("Segmentation fault at address 0x{x}\n", .{addr}),
posix.SIG.ILL => stderr.print("Illegal instruction at address 0x{x}\n", .{addr}),
@@ -1509,7 +1523,7 @@ fn dumpSegfaultInfoPosix(sig: i32, code: i32, addr: usize, ctx_ptr: ?*anyopaque)
}, @ptrCast(ctx)).__mcontext_data;
}
relocateContext(&new_ctx);
- dumpStackTraceFromBase(&new_ctx);
+ dumpStackTraceFromBase(&new_ctx, stderr);
},
else => {},
}
@@ -1539,10 +1553,10 @@ fn handleSegfaultWindowsExtra(info: *windows.EXCEPTION_POINTERS, msg: u8, label:
_ = panicking.fetchAdd(1, .seq_cst);
{
- lockStdErr();
- defer unlockStdErr();
+ const stderr = lockStderrWriter(&.{});
+ defer unlockStderrWriter();
- dumpSegfaultInfoWindows(info, msg, label);
+ dumpSegfaultInfoWindows(info, msg, label, stderr);
}
waitForOtherThreadToFinishPanicking();
@@ -1556,8 +1570,7 @@ fn handleSegfaultWindowsExtra(info: *windows.EXCEPTION_POINTERS, msg: u8, label:
posix.abort();
}
-fn dumpSegfaultInfoWindows(info: *windows.EXCEPTION_POINTERS, msg: u8, label: ?[]const u8) void {
- const stderr = fs.File.stderr().writer();
+fn dumpSegfaultInfoWindows(info: *windows.EXCEPTION_POINTERS, msg: u8, label: ?[]const u8, stderr: *Writer) void {
_ = switch (msg) {
0 => stderr.print("{s}\n", .{label.?}),
1 => stderr.print("Segmentation fault at address 0x{x}\n", .{info.ExceptionRecord.ExceptionInformation[1]}),
@@ -1565,7 +1578,7 @@ fn dumpSegfaultInfoWindows(info: *windows.EXCEPTION_POINTERS, msg: u8, label: ?[
else => unreachable,
} catch posix.abort();
- dumpStackTraceFromBase(info.ContextRecord);
+ dumpStackTraceFromBase(info.ContextRecord, stderr);
}
pub fn dumpStackPointerAddr(prefix: []const u8) void {
@@ -1588,10 +1601,10 @@ test "manage resources correctly" {
// self-hosted debug info is still too buggy
if (builtin.zig_backend != .stage2_llvm) return error.SkipZigTest;
- const writer = std.io.null_writer;
+ var writer: std.io.Writer = .discarding(&.{});
var di = try SelfInfo.open(testing.allocator);
defer di.deinit();
- try printSourceAtAddress(&di, writer, showMyTrace(), io.tty.detectConfig(std.fs.File.stderr()));
+ try printSourceAtAddress(&di, &writer, showMyTrace(), io.tty.detectConfig(.stderr()));
}
noinline fn showMyTrace() usize {
@@ -1657,8 +1670,9 @@ pub fn ConfigurableTrace(comptime size: usize, comptime stack_frame_count: usize
pub fn dump(t: @This()) void {
if (!enabled) return;
- const tty_config = io.tty.detectConfig(std.fs.File.stderr());
- const stderr = fs.File.stderr().writer();
+ const tty_config = io.tty.detectConfig(.stderr());
+ const stderr = lockStderrWriter(&.{});
+ defer unlockStderrWriter();
const end = @min(t.index, size);
const debug_info = getSelfDebugInfo() catch |err| {
stderr.print(
@@ -1688,7 +1702,7 @@ pub fn ConfigurableTrace(comptime size: usize, comptime stack_frame_count: usize
t: @This(),
comptime fmt: []const u8,
options: std.fmt.FormatOptions,
- writer: anytype,
+ writer: *Writer,
) !void {
if (fmt.len != 0) std.fmt.invalidFmtError(fmt, t);
_ = options;
lib/std/fmt.zig
@@ -1,17 +1,20 @@
//! String formatting and parsing.
-const std = @import("std.zig");
const builtin = @import("builtin");
+const std = @import("std.zig");
const io = std.io;
const math = std.math;
const assert = std.debug.assert;
const mem = std.mem;
-const unicode = std.unicode;
const meta = std.meta;
const lossyCast = math.lossyCast;
const expectFmt = std.testing.expectFmt;
const testing = std.testing;
+const Allocator = std.mem.Allocator;
+const Writer = std.io.Writer;
+
+pub const float = @import("fmt/float.zig");
pub const default_max_depth = 3;
@@ -24,11 +27,14 @@ pub const Alignment = enum {
const default_alignment = .right;
const default_fill_char = ' ';
-pub const FormatOptions = struct {
+/// Deprecated in favor of `Options`.
+pub const FormatOptions = Options;
+
+pub const Options = struct {
precision: ?usize = null,
width: ?usize = null,
alignment: Alignment = default_alignment,
- fill: u21 = default_fill_char,
+ fill: u8 = default_fill_char,
};
/// Renders fmt string with args, calling `writer` with slices of bytes.
@@ -45,9 +51,10 @@ pub const FormatOptions = struct {
/// - when using a field name, you are required to enclose the field name (an identifier) in square
/// brackets, e.g. {[score]...} as opposed to the numeric index form which can be written e.g. {2...}
/// - *specifier* is a type-dependent formatting option that determines how a type should formatted (see below)
-/// - *fill* is a single unicode codepoint which is used to pad the formatted text
+/// - *fill* is a single byte which is used to pad the formatted text
/// - *alignment* is one of the three bytes '<', '^', or '>' to make the text left-, center-, or right-aligned, respectively
-/// - *width* is the total width of the field in unicode codepoints
+/// - *width* is the total width of the field in bytes. This is generally only
+/// useful for ASCII text, such as numbers.
/// - *precision* specifies how many decimals a formatted number should have
///
/// Note that most of the parameters are optional and may be omitted. Also you can leave out separators like `:` and `.` when
@@ -56,16 +63,20 @@ pub const FormatOptions = struct {
/// one has to specify *alignment* as well, as otherwise the digit following `:` is interpreted as *width*, not *fill*.
///
/// The *specifier* has several options for types:
-/// - `x` and `X`: output numeric value in hexadecimal notation
+/// - `x` and `X`: output numeric value in hexadecimal notation, or string in hexadecimal bytes
/// - `s`:
/// - for pointer-to-many and C pointers of u8, print as a C-string using zero-termination
/// - for slices of u8, print the entire slice as a string without zero-termination
+/// - `b64`: output string as standard base64
/// - `e`: output floating point value in scientific notation
/// - `d`: output numeric value in decimal notation
/// - `b`: output integer value in binary notation
/// - `o`: output integer value in octal notation
/// - `c`: output integer as an ASCII character. Integer type must have 8 bits at max.
/// - `u`: output integer as an UTF-8 sequence. Integer type must have 21 bits at max.
+/// - `D`: output nanoseconds as duration
+/// - `B`: output bytes in SI units (decimal)
+/// - `Bi`: output bytes in IEC units (binary)
/// - `?`: output optional value as either the unwrapped value, or `null`; may be followed by a format specifier for the underlying value.
/// - `!`: output error union value as either the unwrapped value, or the formatted error value; may be followed by a format specifier for the underlying value.
/// - `*`: output the address of the value instead of the value itself.
@@ -73,7 +84,7 @@ pub const FormatOptions = struct {
///
/// If a formatted user type contains a function of the type
/// ```
-/// pub fn format(value: ?, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void
+/// pub fn format(value: ?, comptime fmt: []const u8, options: std.fmt.Options, writer: anytype) !void
/// ```
/// with `?` being the type formatted, this function will be called instead of the default implementation.
/// This allows user types to be formatted in a logical manner instead of dumping all fields of the type.
@@ -81,11 +92,7 @@ pub const FormatOptions = struct {
/// A user type may be a `struct`, `vector`, `union` or `enum` type.
///
/// To print literal curly braces, escape them by writing them twice, e.g. `{{` or `}}`.
-pub fn format(
- writer: anytype,
- comptime fmt: []const u8,
- args: anytype,
-) !void {
+pub fn format(w: *Writer, comptime fmt: []const u8, args: anytype) Writer.Error!void {
const ArgsType = @TypeOf(args);
const args_type_info = @typeInfo(ArgsType);
if (args_type_info != .@"struct") {
@@ -97,7 +104,7 @@ pub fn format(
@compileError("32 arguments max are supported per format call");
}
- @setEvalBranchQuota(2000000);
+ @setEvalBranchQuota(fmt.len * 1000);
comptime var arg_state: ArgState = .{ .args_len = fields_info.len };
comptime var i = 0;
comptime var literal: []const u8 = "";
@@ -130,7 +137,7 @@ pub fn format(
// Write out the literal
if (literal.len != 0) {
- try writer.writeAll(literal);
+ try w.writeAll(literal);
literal = "";
}
@@ -157,7 +164,7 @@ pub fn format(
comptime assert(fmt[i] == '}');
i += 1;
- const placeholder = comptime Placeholder.parse(fmt[fmt_begin..fmt_end].*);
+ const placeholder = comptime Placeholder.parse(&(fmt[fmt_begin..fmt_end].*));
const arg_pos = comptime switch (placeholder.arg) {
.none => null,
.number => |pos| pos,
@@ -190,16 +197,15 @@ pub fn format(
const arg_to_print = comptime arg_state.nextArg(arg_pos) orelse
@compileError("too few arguments");
- try formatType(
- @field(args, fields_info[arg_to_print].name),
+ try w.printValue(
placeholder.specifier_arg,
- FormatOptions{
+ .{
.fill = placeholder.fill,
.alignment = placeholder.alignment,
.width = width,
.precision = precision,
},
- writer,
+ @field(args, fields_info[arg_to_print].name),
std.options.fmt_max_depth,
);
}
@@ -214,44 +220,41 @@ pub fn format(
}
}
+/// Deprecated in favor of `format`.
+pub fn deprecatedFormat(writer: anytype, comptime fmt: []const u8, args: anytype) !void {
+ var adapter = writer.adaptToNewApi();
+ return format(&adapter.new_interface, fmt, args) catch |err| switch (err) {
+ error.WriteFailed => return adapter.err.?,
+ };
+}
+
fn cacheString(str: anytype) []const u8 {
return &str;
}
pub const Placeholder = struct {
specifier_arg: []const u8,
- fill: u21,
+ fill: u8,
alignment: Alignment,
arg: Specifier,
width: Specifier,
precision: Specifier,
- pub fn parse(comptime str: anytype) Placeholder {
- const view = std.unicode.Utf8View.initComptime(&str);
- comptime var parser = Parser{
- .iter = view.iterator(),
- };
-
- // Parse the positional argument number
- const arg = comptime parser.specifier() catch |err|
- @compileError(@errorName(err));
-
- // Parse the format specifier
- const specifier_arg = comptime parser.until(':');
-
- // Skip the colon, if present
- if (comptime parser.char()) |ch| {
- if (ch != ':') {
- @compileError("expected : or }, found '" ++ unicode.utf8EncodeComptime(ch) ++ "'");
- }
+ pub fn parse(bytes: []const u8) Placeholder {
+ var parser: Parser = .{ .bytes = bytes, .i = 0 };
+ const arg = parser.specifier() catch |err| @compileError(@errorName(err));
+ const specifier_arg = parser.until(':');
+ if (parser.char()) |b| {
+ if (b != ':') @compileError("expected : or }, found '" ++ &[1]u8{b} ++ "'");
}
- // Parse the fill character, if present.
- // When the width field is also specified, the fill character must
+ // Parse the fill byte, if present.
+ //
+ // When the width field is also specified, the fill byte must
// be followed by an alignment specifier, unless it's '0' (zero)
- // (in which case it's handled as part of the width specifier)
- var fill: ?u21 = comptime if (parser.peek(1)) |ch|
- switch (ch) {
+ // (in which case it's handled as part of the width specifier).
+ var fill: ?u8 = if (parser.peek(1)) |b|
+ switch (b) {
'<', '^', '>' => parser.char(),
else => null,
}
@@ -259,8 +262,8 @@ pub const Placeholder = struct {
null;
// Parse the alignment parameter
- const alignment: ?Alignment = comptime if (parser.peek(0)) |ch| init: {
- switch (ch) {
+ const alignment: ?Alignment = if (parser.peek(0)) |b| init: {
+ switch (b) {
'<', '^', '>' => {
// consume the character
break :init switch (parser.char().?) {
@@ -276,29 +279,23 @@ pub const Placeholder = struct {
// When none of the fill character and the alignment specifier have
// been provided, check whether the width starts with a zero.
if (fill == null and alignment == null) {
- fill = comptime if (parser.peek(0) == '0') '0' else null;
+ fill = if (parser.peek(0) == '0') '0' else null;
}
// Parse the width parameter
- const width = comptime parser.specifier() catch |err|
- @compileError(@errorName(err));
+ const width = parser.specifier() catch |err| @compileError(@errorName(err));
// Skip the dot, if present
- if (comptime parser.char()) |ch| {
- if (ch != '.') {
- @compileError("expected . or }, found '" ++ unicode.utf8EncodeComptime(ch) ++ "'");
- }
+ if (parser.char()) |b| {
+ if (b != '.') @compileError("expected . or }, found '" ++ &[1]u8{b} ++ "'");
}
// Parse the precision parameter
- const precision = comptime parser.specifier() catch |err|
- @compileError(@errorName(err));
+ const precision = parser.specifier() catch |err| @compileError(@errorName(err));
- if (comptime parser.char()) |ch| {
- @compileError("extraneous trailing character '" ++ unicode.utf8EncodeComptime(ch) ++ "'");
- }
+ if (parser.char()) |b| @compileError("extraneous trailing character '" ++ &[1]u8{b} ++ "'");
- return Placeholder{
+ return .{
.specifier_arg = cacheString(specifier_arg[0..specifier_arg.len].*),
.fill = fill orelse default_fill_char,
.alignment = alignment orelse default_alignment,
@@ -320,88 +317,60 @@ pub const Specifier = union(enum) {
/// Allows to implement formatters compatible with std.fmt without replicating
/// the standard library behavior.
pub const Parser = struct {
- iter: std.unicode.Utf8Iterator,
+ bytes: []const u8,
+ i: usize,
- // Returns a decimal number or null if the current character is not a
- // digit
pub fn number(self: *@This()) ?usize {
var r: ?usize = null;
-
- while (self.peek(0)) |code_point| {
- switch (code_point) {
+ while (self.peek(0)) |byte| {
+ switch (byte) {
'0'...'9' => {
if (r == null) r = 0;
r.? *= 10;
- r.? += code_point - '0';
+ r.? += byte - '0';
},
else => break,
}
- _ = self.iter.nextCodepoint();
+ self.i += 1;
}
-
return r;
}
- // Returns a substring of the input starting from the current position
- // and ending where `ch` is found or until the end if not found
- pub fn until(self: *@This(), ch: u21) []const u8 {
- const start = self.iter.i;
- while (self.peek(0)) |code_point| {
- if (code_point == ch)
- break;
- _ = self.iter.nextCodepoint();
- }
- return self.iter.bytes[start..self.iter.i];
+ pub fn until(self: *@This(), delimiter: u8) []const u8 {
+ const start = self.i;
+ self.i = std.mem.indexOfScalarPos(u8, self.bytes, self.i, delimiter) orelse self.bytes.len;
+ return self.bytes[start..self.i];
}
- // Returns the character pointed to by the iterator if available, or
- // null otherwise
- pub fn char(self: *@This()) ?u21 {
- if (self.iter.nextCodepoint()) |code_point| {
- return code_point;
- }
- return null;
+ pub fn char(self: *@This()) ?u8 {
+ const i = self.i;
+ if (self.bytes.len - i == 0) return null;
+ self.i = i + 1;
+ return self.bytes[i];
}
- // Returns true if the iterator points to an existing character and
- // false otherwise
- pub fn maybe(self: *@This(), val: u21) bool {
- if (self.peek(0) == val) {
- _ = self.iter.nextCodepoint();
+ pub fn maybe(self: *@This(), byte: u8) bool {
+ if (self.peek(0) == byte) {
+ self.i += 1;
return true;
}
return false;
}
- // Returns a decimal number or null if the current character is not a
- // digit
pub fn specifier(self: *@This()) !Specifier {
if (self.maybe('[')) {
const arg_name = self.until(']');
-
- if (!self.maybe(']'))
- return @field(anyerror, "Expected closing ]");
-
- return Specifier{ .named = arg_name };
+ if (!self.maybe(']')) return error.@"Expected closing ]";
+ return .{ .named = arg_name };
}
- if (self.number()) |i|
- return Specifier{ .number = i };
-
- return Specifier{ .none = {} };
+ if (self.number()) |i| return .{ .number = i };
+ return .{ .none = {} };
}
- // Returns the n-th next character or null if that's past the end
- pub fn peek(self: *@This(), n: usize) ?u21 {
- const original_i = self.iter.i;
- defer self.iter.i = original_i;
-
- var i: usize = 0;
- var code_point: ?u21 = null;
- while (i <= n) : (i += 1) {
- code_point = self.iter.nextCodepoint();
- if (code_point == null) return null;
- }
- return code_point;
+ pub fn peek(self: *@This(), i: usize) ?u8 {
+ const peek_index = self.i + i;
+ if (peek_index >= self.bytes.len) return null;
+ return self.bytes[peek_index];
}
};
@@ -434,822 +403,14 @@ pub const ArgState = struct {
}
};
-pub fn formatAddress(value: anytype, options: FormatOptions, writer: anytype) @TypeOf(writer).Error!void {
- _ = options;
- const T = @TypeOf(value);
-
- switch (@typeInfo(T)) {
- .pointer => |info| {
- try writer.writeAll(@typeName(info.child) ++ "@");
- if (info.size == .slice)
- try formatInt(@intFromPtr(value.ptr), 16, .lower, FormatOptions{}, writer)
- else
- try formatInt(@intFromPtr(value), 16, .lower, FormatOptions{}, writer);
- return;
- },
- .optional => |info| {
- if (@typeInfo(info.child) == .pointer) {
- try writer.writeAll(@typeName(info.child) ++ "@");
- try formatInt(@intFromPtr(value), 16, .lower, FormatOptions{}, writer);
- return;
- }
- },
- else => {},
- }
-
- @compileError("cannot format non-pointer type " ++ @typeName(T) ++ " with * specifier");
-}
-
-// This ANY const is a workaround for: https://github.com/ziglang/zig/issues/7948
-const ANY = "any";
-
-pub fn defaultSpec(comptime T: type) [:0]const u8 {
- switch (@typeInfo(T)) {
- .array, .vector => return ANY,
- .pointer => |ptr_info| switch (ptr_info.size) {
- .one => switch (@typeInfo(ptr_info.child)) {
- .array => return ANY,
- else => {},
- },
- .many, .c => return "*",
- .slice => return ANY,
- },
- .optional => |info| return "?" ++ defaultSpec(info.child),
- .error_union => |info| return "!" ++ defaultSpec(info.payload),
- else => {},
- }
- return "";
-}
-
-fn stripOptionalOrErrorUnionSpec(comptime fmt: []const u8) []const u8 {
- return if (std.mem.eql(u8, fmt[1..], ANY))
- ANY
- else
- fmt[1..];
-}
-
-pub fn invalidFmtError(comptime fmt: []const u8, value: anytype) void {
- @compileError("invalid format string '" ++ fmt ++ "' for type '" ++ @typeName(@TypeOf(value)) ++ "'");
-}
-
-pub fn formatType(
- value: anytype,
- comptime fmt: []const u8,
- options: FormatOptions,
- writer: anytype,
- max_depth: usize,
-) @TypeOf(writer).Error!void {
- const T = @TypeOf(value);
- const actual_fmt = comptime if (std.mem.eql(u8, fmt, ANY))
- defaultSpec(T)
- else if (fmt.len != 0 and (fmt[0] == '?' or fmt[0] == '!')) switch (@typeInfo(T)) {
- .optional, .error_union => fmt,
- else => stripOptionalOrErrorUnionSpec(fmt),
- } else fmt;
-
- if (comptime std.mem.eql(u8, actual_fmt, "*")) {
- return formatAddress(value, options, writer);
- }
-
- if (std.meta.hasMethod(T, "format")) {
- return try value.format(actual_fmt, options, writer);
- }
-
- switch (@typeInfo(T)) {
- .comptime_int, .int, .comptime_float, .float => {
- return formatValue(value, actual_fmt, options, writer);
- },
- .void => {
- if (actual_fmt.len != 0) invalidFmtError(fmt, value);
- return formatBuf("void", options, writer);
- },
- .bool => {
- if (actual_fmt.len != 0) invalidFmtError(fmt, value);
- return formatBuf(if (value) "true" else "false", options, writer);
- },
- .optional => {
- if (actual_fmt.len == 0 or actual_fmt[0] != '?')
- @compileError("cannot format optional without a specifier (i.e. {?} or {any})");
- const remaining_fmt = comptime stripOptionalOrErrorUnionSpec(actual_fmt);
- if (value) |payload| {
- return formatType(payload, remaining_fmt, options, writer, max_depth);
- } else {
- return formatBuf("null", options, writer);
- }
- },
- .error_union => {
- if (actual_fmt.len == 0 or actual_fmt[0] != '!')
- @compileError("cannot format error union without a specifier (i.e. {!} or {any})");
- const remaining_fmt = comptime stripOptionalOrErrorUnionSpec(actual_fmt);
- if (value) |payload| {
- return formatType(payload, remaining_fmt, options, writer, max_depth);
- } else |err| {
- return formatType(err, "", options, writer, max_depth);
- }
- },
- .error_set => {
- if (actual_fmt.len != 0) invalidFmtError(fmt, value);
- try writer.writeAll("error.");
- return writer.writeAll(@errorName(value));
- },
- .@"enum" => |enumInfo| {
- try writer.writeAll(@typeName(T));
- if (enumInfo.is_exhaustive) {
- if (actual_fmt.len != 0) invalidFmtError(fmt, value);
- try writer.writeAll(".");
- try writer.writeAll(@tagName(value));
- return;
- }
-
- // Use @tagName only if value is one of known fields
- @setEvalBranchQuota(3 * enumInfo.fields.len);
- inline for (enumInfo.fields) |enumField| {
- if (@intFromEnum(value) == enumField.value) {
- try writer.writeAll(".");
- try writer.writeAll(@tagName(value));
- return;
- }
- }
-
- try writer.writeAll("(");
- try formatType(@intFromEnum(value), actual_fmt, options, writer, max_depth);
- try writer.writeAll(")");
- },
- .@"union" => |info| {
- if (actual_fmt.len != 0) invalidFmtError(fmt, value);
- try writer.writeAll(@typeName(T));
- if (max_depth == 0) {
- return writer.writeAll("{ ... }");
- }
- if (info.tag_type) |UnionTagType| {
- try writer.writeAll("{ .");
- try writer.writeAll(@tagName(@as(UnionTagType, value)));
- try writer.writeAll(" = ");
- inline for (info.fields) |u_field| {
- if (value == @field(UnionTagType, u_field.name)) {
- try formatType(@field(value, u_field.name), ANY, options, writer, max_depth - 1);
- }
- }
- try writer.writeAll(" }");
- } else {
- try format(writer, "@{x}", .{@intFromPtr(&value)});
- }
- },
- .@"struct" => |info| {
- if (actual_fmt.len != 0) invalidFmtError(fmt, value);
- if (info.is_tuple) {
- // Skip the type and field names when formatting tuples.
- if (max_depth == 0) {
- return writer.writeAll("{ ... }");
- }
- try writer.writeAll("{");
- inline for (info.fields, 0..) |f, i| {
- if (i == 0) {
- try writer.writeAll(" ");
- } else {
- try writer.writeAll(", ");
- }
- try formatType(@field(value, f.name), ANY, options, writer, max_depth - 1);
- }
- return writer.writeAll(" }");
- }
- try writer.writeAll(@typeName(T));
- if (max_depth == 0) {
- return writer.writeAll("{ ... }");
- }
- try writer.writeAll("{");
- inline for (info.fields, 0..) |f, i| {
- if (i == 0) {
- try writer.writeAll(" .");
- } else {
- try writer.writeAll(", .");
- }
- try writer.writeAll(f.name);
- try writer.writeAll(" = ");
- try formatType(@field(value, f.name), ANY, options, writer, max_depth - 1);
- }
- try writer.writeAll(" }");
- },
- .pointer => |ptr_info| switch (ptr_info.size) {
- .one => switch (@typeInfo(ptr_info.child)) {
- .array, .@"enum", .@"union", .@"struct" => {
- return formatType(value.*, actual_fmt, options, writer, max_depth);
- },
- else => return format(writer, "{s}@{x}", .{ @typeName(ptr_info.child), @intFromPtr(value) }),
- },
- .many, .c => {
- if (actual_fmt.len == 0)
- @compileError("cannot format pointer without a specifier (i.e. {s} or {*})");
- if (ptr_info.sentinel() != null) {
- return formatType(mem.span(value), actual_fmt, options, writer, max_depth);
- }
- if (actual_fmt[0] == 's' and ptr_info.child == u8) {
- return formatBuf(mem.span(value), options, writer);
- }
- invalidFmtError(fmt, value);
- },
- .slice => {
- if (actual_fmt.len == 0)
- @compileError("cannot format slice without a specifier (i.e. {s} or {any})");
- if (max_depth == 0) {
- return writer.writeAll("{ ... }");
- }
- if (actual_fmt[0] == 's' and ptr_info.child == u8) {
- return formatBuf(value, options, writer);
- }
- try writer.writeAll("{ ");
- for (value, 0..) |elem, i| {
- try formatType(elem, actual_fmt, options, writer, max_depth - 1);
- if (i != value.len - 1) {
- try writer.writeAll(", ");
- }
- }
- try writer.writeAll(" }");
- },
- },
- .array => |info| {
- if (actual_fmt.len == 0)
- @compileError("cannot format array without a specifier (i.e. {s} or {any})");
- if (max_depth == 0) {
- return writer.writeAll("{ ... }");
- }
- if (actual_fmt[0] == 's' and info.child == u8) {
- return formatBuf(&value, options, writer);
- }
- try writer.writeAll("{ ");
- for (value, 0..) |elem, i| {
- try formatType(elem, actual_fmt, options, writer, max_depth - 1);
- if (i < value.len - 1) {
- try writer.writeAll(", ");
- }
- }
- try writer.writeAll(" }");
- },
- .vector => |info| {
- if (max_depth == 0) {
- return writer.writeAll("{ ... }");
- }
- try writer.writeAll("{ ");
- var i: usize = 0;
- while (i < info.len) : (i += 1) {
- try formatType(value[i], actual_fmt, options, writer, max_depth - 1);
- if (i < info.len - 1) {
- try writer.writeAll(", ");
- }
- }
- try writer.writeAll(" }");
- },
- .@"fn" => @compileError("unable to format function body type, use '*const " ++ @typeName(T) ++ "' for a function pointer type"),
- .type => {
- if (actual_fmt.len != 0) invalidFmtError(fmt, value);
- return formatBuf(@typeName(value), options, writer);
- },
- .enum_literal => {
- if (actual_fmt.len != 0) invalidFmtError(fmt, value);
- const buffer = [_]u8{'.'} ++ @tagName(value);
- return formatBuf(buffer, options, writer);
- },
- .null => {
- if (actual_fmt.len != 0) invalidFmtError(fmt, value);
- return formatBuf("null", options, writer);
- },
- else => @compileError("unable to format type '" ++ @typeName(T) ++ "'"),
- }
-}
-
-fn formatValue(
- value: anytype,
- comptime fmt: []const u8,
- options: FormatOptions,
- writer: anytype,
-) !void {
- const T = @TypeOf(value);
- switch (@typeInfo(T)) {
- .float, .comptime_float => return formatFloatValue(value, fmt, options, writer),
- .int, .comptime_int => return formatIntValue(value, fmt, options, writer),
- .bool => return formatBuf(if (value) "true" else "false", options, writer),
- else => comptime unreachable,
- }
-}
-
-pub fn formatIntValue(
- value: anytype,
- comptime fmt: []const u8,
- options: FormatOptions,
- writer: anytype,
-) !void {
- comptime var base = 10;
- comptime var case: Case = .lower;
-
- const int_value = if (@TypeOf(value) == comptime_int) blk: {
- const Int = math.IntFittingRange(value, value);
- break :blk @as(Int, value);
- } else value;
-
- if (fmt.len == 0 or comptime std.mem.eql(u8, fmt, "d")) {
- base = 10;
- case = .lower;
- } else if (comptime std.mem.eql(u8, fmt, "c")) {
- if (@typeInfo(@TypeOf(int_value)).int.bits <= 8) {
- return formatAsciiChar(@as(u8, int_value), options, writer);
- } else {
- @compileError("cannot print integer that is larger than 8 bits as an ASCII character");
- }
- } else if (comptime std.mem.eql(u8, fmt, "u")) {
- if (@typeInfo(@TypeOf(int_value)).int.bits <= 21) {
- return formatUnicodeCodepoint(@as(u21, int_value), options, writer);
- } else {
- @compileError("cannot print integer that is larger than 21 bits as an UTF-8 sequence");
- }
- } else if (comptime std.mem.eql(u8, fmt, "b")) {
- base = 2;
- case = .lower;
- } else if (comptime std.mem.eql(u8, fmt, "x")) {
- base = 16;
- case = .lower;
- } else if (comptime std.mem.eql(u8, fmt, "X")) {
- base = 16;
- case = .upper;
- } else if (comptime std.mem.eql(u8, fmt, "o")) {
- base = 8;
- case = .lower;
- } else {
- invalidFmtError(fmt, value);
- }
-
- return formatInt(int_value, base, case, options, writer);
-}
-
-pub const format_float = @import("fmt/format_float.zig");
-pub const formatFloat = format_float.formatFloat;
-pub const FormatFloatError = format_float.FormatError;
-
-fn formatFloatValue(
- value: anytype,
- comptime fmt: []const u8,
- options: FormatOptions,
- writer: anytype,
-) !void {
- var buf: [format_float.bufferSize(.decimal, f64)]u8 = undefined;
-
- if (fmt.len == 0 or comptime std.mem.eql(u8, fmt, "e")) {
- const s = formatFloat(&buf, value, .{ .mode = .scientific, .precision = options.precision }) catch |err| switch (err) {
- error.BufferTooSmall => "(float)",
- };
- return formatBuf(s, options, writer);
- } else if (comptime std.mem.eql(u8, fmt, "d")) {
- const s = formatFloat(&buf, value, .{ .mode = .decimal, .precision = options.precision }) catch |err| switch (err) {
- error.BufferTooSmall => "(float)",
- };
- return formatBuf(s, options, writer);
- } else if (comptime std.mem.eql(u8, fmt, "x")) {
- var buf_stream = std.io.fixedBufferStream(&buf);
- formatFloatHexadecimal(value, options, buf_stream.writer()) catch |err| switch (err) {
- error.NoSpaceLeft => unreachable,
- };
- return formatBuf(buf_stream.getWritten(), options, writer);
- } else {
- invalidFmtError(fmt, value);
- }
-}
-
-test {
- _ = &format_float;
-}
-
pub const Case = enum { lower, upper };
-fn SliceHex(comptime case: Case) type {
- const charset = "0123456789" ++ if (case == .upper) "ABCDEF" else "abcdef";
-
- return struct {
- pub fn format(
- bytes: []const u8,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
- _ = fmt;
- _ = options;
- var buf: [2]u8 = undefined;
-
- for (bytes) |c| {
- buf[0] = charset[c >> 4];
- buf[1] = charset[c & 15];
- try writer.writeAll(&buf);
- }
- }
- };
-}
-
-const formatSliceHexLower = SliceHex(.lower).format;
-const formatSliceHexUpper = SliceHex(.upper).format;
-
-/// Return a Formatter for a []const u8 where every byte is formatted as a pair
-/// of lowercase hexadecimal digits.
-pub fn fmtSliceHexLower(bytes: []const u8) std.fmt.Formatter(formatSliceHexLower) {
- return .{ .data = bytes };
-}
-
-/// Return a Formatter for a []const u8 where every byte is formatted as pair
-/// of uppercase hexadecimal digits.
-pub fn fmtSliceHexUpper(bytes: []const u8) std.fmt.Formatter(formatSliceHexUpper) {
- return .{ .data = bytes };
-}
-
-fn SliceEscape(comptime case: Case) type {
- const charset = "0123456789" ++ if (case == .upper) "ABCDEF" else "abcdef";
-
- return struct {
- pub fn format(
- bytes: []const u8,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
- _ = fmt;
- _ = options;
- var buf: [4]u8 = undefined;
-
- buf[0] = '\\';
- buf[1] = 'x';
-
- for (bytes) |c| {
- if (std.ascii.isPrint(c)) {
- try writer.writeByte(c);
- } else {
- buf[2] = charset[c >> 4];
- buf[3] = charset[c & 15];
- try writer.writeAll(&buf);
- }
- }
- }
- };
-}
-
-const formatSliceEscapeLower = SliceEscape(.lower).format;
-const formatSliceEscapeUpper = SliceEscape(.upper).format;
-
-/// Return a Formatter for a []const u8 where every non-printable ASCII
-/// character is escaped as \xNN, where NN is the character in lowercase
-/// hexadecimal notation.
-pub fn fmtSliceEscapeLower(bytes: []const u8) std.fmt.Formatter(formatSliceEscapeLower) {
- return .{ .data = bytes };
-}
-
-/// Return a Formatter for a []const u8 where every non-printable ASCII
-/// character is escaped as \xNN, where NN is the character in uppercase
-/// hexadecimal notation.
-pub fn fmtSliceEscapeUpper(bytes: []const u8) std.fmt.Formatter(formatSliceEscapeUpper) {
- return .{ .data = bytes };
-}
-
-fn Size(comptime base: comptime_int) type {
- return struct {
- fn format(
- value: u64,
- comptime fmt: []const u8,
- options: FormatOptions,
- writer: anytype,
- ) !void {
- _ = fmt;
- if (value == 0) {
- return formatBuf("0B", options, writer);
- }
- // The worst case in terms of space needed is 32 bytes + 3 for the suffix.
- var buf: [format_float.min_buffer_size + 3]u8 = undefined;
-
- const mags_si = " kMGTPEZY";
- const mags_iec = " KMGTPEZY";
-
- const log2 = math.log2(value);
- const magnitude = switch (base) {
- 1000 => @min(log2 / comptime math.log2(1000), mags_si.len - 1),
- 1024 => @min(log2 / 10, mags_iec.len - 1),
- else => unreachable,
- };
- const new_value = lossyCast(f64, value) / math.pow(f64, lossyCast(f64, base), lossyCast(f64, magnitude));
- const suffix = switch (base) {
- 1000 => mags_si[magnitude],
- 1024 => mags_iec[magnitude],
- else => unreachable,
- };
-
- const s = switch (magnitude) {
- 0 => buf[0..formatIntBuf(&buf, value, 10, .lower, .{})],
- else => formatFloat(&buf, new_value, .{ .mode = .decimal, .precision = options.precision }) catch |err| switch (err) {
- error.BufferTooSmall => unreachable,
- },
- };
-
- var i: usize = s.len;
- if (suffix == ' ') {
- buf[i] = 'B';
- i += 1;
- } else switch (base) {
- 1000 => {
- buf[i..][0..2].* = [_]u8{ suffix, 'B' };
- i += 2;
- },
- 1024 => {
- buf[i..][0..3].* = [_]u8{ suffix, 'i', 'B' };
- i += 3;
- },
- else => unreachable,
- }
-
- return formatBuf(buf[0..i], options, writer);
- }
- };
-}
-const formatSizeDec = Size(1000).format;
-const formatSizeBin = Size(1024).format;
-
-/// Return a Formatter for a u64 value representing a file size.
-/// This formatter represents the number as multiple of 1000 and uses the SI
-/// measurement units (kB, MB, GB, ...).
-/// Format option `precision` is ignored when `value` is less than 1kB
-pub fn fmtIntSizeDec(value: u64) std.fmt.Formatter(formatSizeDec) {
- return .{ .data = value };
-}
-
-/// Return a Formatter for a u64 value representing a file size.
-/// This formatter represents the number as multiple of 1024 and uses the IEC
-/// measurement units (KiB, MiB, GiB, ...).
-/// Format option `precision` is ignored when `value` is less than 1KiB
-pub fn fmtIntSizeBin(value: u64) std.fmt.Formatter(formatSizeBin) {
- return .{ .data = value };
-}
-
-fn checkTextFmt(comptime fmt: []const u8) void {
- if (fmt.len != 1)
- @compileError("unsupported format string '" ++ fmt ++ "' when formatting text");
- switch (fmt[0]) {
- // Example of deprecation:
- // '[deprecated_specifier]' => @compileError("specifier '[deprecated_specifier]' has been deprecated, wrap your argument in `std.some_function` instead"),
- 'x' => @compileError("specifier 'x' has been deprecated, wrap your argument in std.fmt.fmtSliceHexLower instead"),
- 'X' => @compileError("specifier 'X' has been deprecated, wrap your argument in std.fmt.fmtSliceHexUpper instead"),
- else => {},
- }
-}
-
-pub fn formatText(
- bytes: []const u8,
- comptime fmt: []const u8,
- options: FormatOptions,
- writer: anytype,
-) !void {
- comptime checkTextFmt(fmt);
- return formatBuf(bytes, options, writer);
-}
-
-pub fn formatAsciiChar(
- c: u8,
- options: FormatOptions,
- writer: anytype,
-) !void {
- return formatBuf(@as(*const [1]u8, &c), options, writer);
-}
-
-pub fn formatUnicodeCodepoint(
- c: u21,
- options: FormatOptions,
- writer: anytype,
-) !void {
- var buf: [4]u8 = undefined;
- const len = unicode.utf8Encode(c, &buf) catch |err| switch (err) {
- error.Utf8CannotEncodeSurrogateHalf, error.CodepointTooLarge => {
- return formatBuf(&unicode.utf8EncodeComptime(unicode.replacement_character), options, writer);
- },
- };
- return formatBuf(buf[0..len], options, writer);
-}
-
-pub fn formatBuf(
- buf: []const u8,
- options: FormatOptions,
- writer: anytype,
-) !void {
- if (options.width) |min_width| {
- // In case of error assume the buffer content is ASCII-encoded
- const width = unicode.utf8CountCodepoints(buf) catch buf.len;
- const padding = if (width < min_width) min_width - width else 0;
-
- if (padding == 0)
- return writer.writeAll(buf);
-
- var fill_buffer: [4]u8 = undefined;
- const fill_utf8 = if (unicode.utf8Encode(options.fill, &fill_buffer)) |len|
- fill_buffer[0..len]
- else |err| switch (err) {
- error.Utf8CannotEncodeSurrogateHalf,
- error.CodepointTooLarge,
- => &unicode.utf8EncodeComptime(unicode.replacement_character),
- };
- switch (options.alignment) {
- .left => {
- try writer.writeAll(buf);
- try writer.writeBytesNTimes(fill_utf8, padding);
- },
- .center => {
- const left_padding = padding / 2;
- const right_padding = (padding + 1) / 2;
- try writer.writeBytesNTimes(fill_utf8, left_padding);
- try writer.writeAll(buf);
- try writer.writeBytesNTimes(fill_utf8, right_padding);
- },
- .right => {
- try writer.writeBytesNTimes(fill_utf8, padding);
- try writer.writeAll(buf);
- },
- }
- } else {
- // Fast path, avoid counting the number of codepoints
- try writer.writeAll(buf);
- }
-}
-
-pub fn formatFloatHexadecimal(
- value: anytype,
- options: FormatOptions,
- writer: anytype,
-) !void {
- if (math.signbit(value)) {
- try writer.writeByte('-');
- }
- if (math.isNan(value)) {
- return writer.writeAll("nan");
- }
- if (math.isInf(value)) {
- return writer.writeAll("inf");
- }
-
- const T = @TypeOf(value);
- const TU = std.meta.Int(.unsigned, @bitSizeOf(T));
-
- const mantissa_bits = math.floatMantissaBits(T);
- const fractional_bits = math.floatFractionalBits(T);
- const exponent_bits = math.floatExponentBits(T);
- const mantissa_mask = (1 << mantissa_bits) - 1;
- const exponent_mask = (1 << exponent_bits) - 1;
- const exponent_bias = (1 << (exponent_bits - 1)) - 1;
-
- const as_bits = @as(TU, @bitCast(value));
- var mantissa = as_bits & mantissa_mask;
- var exponent: i32 = @as(u16, @truncate((as_bits >> mantissa_bits) & exponent_mask));
-
- const is_denormal = exponent == 0 and mantissa != 0;
- const is_zero = exponent == 0 and mantissa == 0;
-
- if (is_zero) {
- // Handle this case here to simplify the logic below.
- try writer.writeAll("0x0");
- if (options.precision) |precision| {
- if (precision > 0) {
- try writer.writeAll(".");
- try writer.writeByteNTimes('0', precision);
- }
- } else {
- try writer.writeAll(".0");
- }
- try writer.writeAll("p0");
- return;
- }
-
- if (is_denormal) {
- // Adjust the exponent for printing.
- exponent += 1;
- } else {
- if (fractional_bits == mantissa_bits)
- mantissa |= 1 << fractional_bits; // Add the implicit integer bit.
- }
-
- const mantissa_digits = (fractional_bits + 3) / 4;
- // Fill in zeroes to round the fraction width to a multiple of 4.
- mantissa <<= mantissa_digits * 4 - fractional_bits;
-
- if (options.precision) |precision| {
- // Round if needed.
- if (precision < mantissa_digits) {
- // We always have at least 4 extra bits.
- var extra_bits = (mantissa_digits - precision) * 4;
- // The result LSB is the Guard bit, we need two more (Round and
- // Sticky) to round the value.
- while (extra_bits > 2) {
- mantissa = (mantissa >> 1) | (mantissa & 1);
- extra_bits -= 1;
- }
- // Round to nearest, tie to even.
- mantissa |= @intFromBool(mantissa & 0b100 != 0);
- mantissa += 1;
- // Drop the excess bits.
- mantissa >>= 2;
- // Restore the alignment.
- mantissa <<= @as(math.Log2Int(TU), @intCast((mantissa_digits - precision) * 4));
-
- const overflow = mantissa & (1 << 1 + mantissa_digits * 4) != 0;
- // Prefer a normalized result in case of overflow.
- if (overflow) {
- mantissa >>= 1;
- exponent += 1;
- }
- }
- }
-
- // +1 for the decimal part.
- var buf: [1 + mantissa_digits]u8 = undefined;
- _ = formatIntBuf(&buf, mantissa, 16, .lower, .{ .fill = '0', .width = 1 + mantissa_digits });
-
- try writer.writeAll("0x");
- try writer.writeByte(buf[0]);
- const trimmed = mem.trimEnd(u8, buf[1..], "0");
- if (options.precision) |precision| {
- if (precision > 0) try writer.writeAll(".");
- } else if (trimmed.len > 0) {
- try writer.writeAll(".");
- }
- try writer.writeAll(trimmed);
- // Add trailing zeros if explicitly requested.
- if (options.precision) |precision| if (precision > 0) {
- if (precision > trimmed.len)
- try writer.writeByteNTimes('0', precision - trimmed.len);
- };
- try writer.writeAll("p");
- try formatInt(exponent - exponent_bias, 10, .lower, .{}, writer);
-}
-
-pub fn formatInt(
- value: anytype,
- base: u8,
- case: Case,
- options: FormatOptions,
- writer: anytype,
-) !void {
- assert(base >= 2);
-
- const int_value = if (@TypeOf(value) == comptime_int) blk: {
- const Int = math.IntFittingRange(value, value);
- break :blk @as(Int, value);
- } else value;
-
- const value_info = @typeInfo(@TypeOf(int_value)).int;
-
- // The type must have the same size as `base` or be wider in order for the
- // division to work
- const min_int_bits = comptime @max(value_info.bits, 8);
- const MinInt = std.meta.Int(.unsigned, min_int_bits);
-
- const abs_value = @abs(int_value);
- // The worst case in terms of space needed is base 2, plus 1 for the sign
- var buf: [1 + @max(@as(comptime_int, value_info.bits), 1)]u8 = undefined;
-
- var a: MinInt = abs_value;
- var index: usize = buf.len;
-
- if (base == 10) {
- while (a >= 100) : (a = @divTrunc(a, 100)) {
- index -= 2;
- buf[index..][0..2].* = digits2(@intCast(a % 100));
- }
-
- if (a < 10) {
- index -= 1;
- buf[index] = '0' + @as(u8, @intCast(a));
- } else {
- index -= 2;
- buf[index..][0..2].* = digits2(@intCast(a));
- }
- } else {
- while (true) {
- const digit = a % base;
- index -= 1;
- buf[index] = digitToChar(@intCast(digit), case);
- a /= base;
- if (a == 0) break;
- }
- }
-
- if (value_info.signedness == .signed) {
- if (value < 0) {
- // Negative integer
- index -= 1;
- buf[index] = '-';
- } else if (options.width == null or options.width.? == 0) {
- // Positive integer, omit the plus sign
- } else {
- // Positive integer
- index -= 1;
- buf[index] = '+';
- }
- }
-
- return formatBuf(buf[index..], options, writer);
-}
-
-pub fn formatIntBuf(out_buf: []u8, value: anytype, base: u8, case: Case, options: FormatOptions) usize {
- var fbs = std.io.fixedBufferStream(out_buf);
- formatInt(value, base, case, options, fbs.writer()) catch unreachable;
- return fbs.pos;
+/// Asserts the rendered integer value fits in `buffer`.
+/// Returns the end index within `buffer`.
+pub fn printInt(buffer: []u8, value: anytype, base: u8, case: Case, options: Options) usize {
+ var bw: Writer = .fixed(buffer);
+ bw.printIntOptions(value, base, case, options) catch unreachable;
+ return bw.end;
}
/// Converts values in the range [0, 100) to a base 10 string.
@@ -1261,244 +422,22 @@ pub fn digits2(value: u8) [2]u8 {
}
}
-const FormatDurationData = struct {
- ns: u64,
- negative: bool = false,
-};
-
-fn formatDuration(data: FormatDurationData, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void {
- _ = fmt;
-
- // worst case: "-XXXyXXwXXdXXhXXmXX.XXXs".len = 24
- var buf: [24]u8 = undefined;
- var fbs = std.io.fixedBufferStream(&buf);
- var buf_writer = fbs.writer();
- if (data.negative) {
- buf_writer.writeByte('-') catch unreachable;
- }
-
- var ns_remaining = data.ns;
- inline for (.{
- .{ .ns = 365 * std.time.ns_per_day, .sep = 'y' },
- .{ .ns = std.time.ns_per_week, .sep = 'w' },
- .{ .ns = std.time.ns_per_day, .sep = 'd' },
- .{ .ns = std.time.ns_per_hour, .sep = 'h' },
- .{ .ns = std.time.ns_per_min, .sep = 'm' },
- }) |unit| {
- if (ns_remaining >= unit.ns) {
- const units = ns_remaining / unit.ns;
- formatInt(units, 10, .lower, .{}, buf_writer) catch unreachable;
- buf_writer.writeByte(unit.sep) catch unreachable;
- ns_remaining -= units * unit.ns;
- if (ns_remaining == 0)
- return formatBuf(fbs.getWritten(), options, writer);
- }
- }
-
- inline for (.{
- .{ .ns = std.time.ns_per_s, .sep = "s" },
- .{ .ns = std.time.ns_per_ms, .sep = "ms" },
- .{ .ns = std.time.ns_per_us, .sep = "us" },
- }) |unit| {
- const kunits = ns_remaining * 1000 / unit.ns;
- if (kunits >= 1000) {
- formatInt(kunits / 1000, 10, .lower, .{}, buf_writer) catch unreachable;
- const frac = kunits % 1000;
- if (frac > 0) {
- // Write up to 3 decimal places
- var decimal_buf = [_]u8{ '.', 0, 0, 0 };
- _ = formatIntBuf(decimal_buf[1..], frac, 10, .lower, .{ .fill = '0', .width = 3 });
- var end: usize = 4;
- while (end > 1) : (end -= 1) {
- if (decimal_buf[end - 1] != '0') break;
- }
- buf_writer.writeAll(decimal_buf[0..end]) catch unreachable;
- }
- buf_writer.writeAll(unit.sep) catch unreachable;
- return formatBuf(fbs.getWritten(), options, writer);
- }
- }
-
- formatInt(ns_remaining, 10, .lower, .{}, buf_writer) catch unreachable;
- buf_writer.writeAll("ns") catch unreachable;
- return formatBuf(fbs.getWritten(), options, writer);
-}
-
-/// Return a Formatter for number of nanoseconds according to its magnitude:
-/// [#y][#w][#d][#h][#m]#[.###][n|u|m]s
-pub fn fmtDuration(ns: u64) Formatter(formatDuration) {
- const data = FormatDurationData{ .ns = ns };
- return .{ .data = data };
-}
-
-test fmtDuration {
- var buf: [24]u8 = undefined;
- inline for (.{
- .{ .s = "0ns", .d = 0 },
- .{ .s = "1ns", .d = 1 },
- .{ .s = "999ns", .d = std.time.ns_per_us - 1 },
- .{ .s = "1us", .d = std.time.ns_per_us },
- .{ .s = "1.45us", .d = 1450 },
- .{ .s = "1.5us", .d = 3 * std.time.ns_per_us / 2 },
- .{ .s = "14.5us", .d = 14500 },
- .{ .s = "145us", .d = 145000 },
- .{ .s = "999.999us", .d = std.time.ns_per_ms - 1 },
- .{ .s = "1ms", .d = std.time.ns_per_ms + 1 },
- .{ .s = "1.5ms", .d = 3 * std.time.ns_per_ms / 2 },
- .{ .s = "1.11ms", .d = 1110000 },
- .{ .s = "1.111ms", .d = 1111000 },
- .{ .s = "1.111ms", .d = 1111100 },
- .{ .s = "999.999ms", .d = std.time.ns_per_s - 1 },
- .{ .s = "1s", .d = std.time.ns_per_s },
- .{ .s = "59.999s", .d = std.time.ns_per_min - 1 },
- .{ .s = "1m", .d = std.time.ns_per_min },
- .{ .s = "1h", .d = std.time.ns_per_hour },
- .{ .s = "1d", .d = std.time.ns_per_day },
- .{ .s = "1w", .d = std.time.ns_per_week },
- .{ .s = "1y", .d = 365 * std.time.ns_per_day },
- .{ .s = "1y52w23h59m59.999s", .d = 730 * std.time.ns_per_day - 1 }, // 365d = 52w1d
- .{ .s = "1y1h1.001s", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + std.time.ns_per_ms },
- .{ .s = "1y1h1s", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + 999 * std.time.ns_per_us },
- .{ .s = "1y1h999.999us", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms - 1 },
- .{ .s = "1y1h1ms", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms },
- .{ .s = "1y1h1ms", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms + 1 },
- .{ .s = "1y1m999ns", .d = 365 * std.time.ns_per_day + std.time.ns_per_min + 999 },
- .{ .s = "584y49w23h34m33.709s", .d = math.maxInt(u64) },
- }) |tc| {
- const slice = try bufPrint(&buf, "{}", .{fmtDuration(tc.d)});
- try std.testing.expectEqualStrings(tc.s, slice);
- }
-
- inline for (.{
- .{ .s = "=======0ns", .f = "{s:=>10}", .d = 0 },
- .{ .s = "1ns=======", .f = "{s:=<10}", .d = 1 },
- .{ .s = " 999ns ", .f = "{s:^10}", .d = std.time.ns_per_us - 1 },
- }) |tc| {
- const slice = try bufPrint(&buf, tc.f, .{fmtDuration(tc.d)});
- try std.testing.expectEqualStrings(tc.s, slice);
- }
-}
-
-fn formatDurationSigned(ns: i64, comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void {
- const data = FormatDurationData{ .ns = @abs(ns), .negative = ns < 0 };
- try formatDuration(data, fmt, options, writer);
-}
-
-/// Return a Formatter for number of nanoseconds according to its signed magnitude:
-/// [#y][#w][#d][#h][#m]#[.###][n|u|m]s
-pub fn fmtDurationSigned(ns: i64) Formatter(formatDurationSigned) {
- return .{ .data = ns };
-}
-
-test fmtDurationSigned {
- var buf: [24]u8 = undefined;
- inline for (.{
- .{ .s = "0ns", .d = 0 },
- .{ .s = "1ns", .d = 1 },
- .{ .s = "-1ns", .d = -(1) },
- .{ .s = "999ns", .d = std.time.ns_per_us - 1 },
- .{ .s = "-999ns", .d = -(std.time.ns_per_us - 1) },
- .{ .s = "1us", .d = std.time.ns_per_us },
- .{ .s = "-1us", .d = -(std.time.ns_per_us) },
- .{ .s = "1.45us", .d = 1450 },
- .{ .s = "-1.45us", .d = -(1450) },
- .{ .s = "1.5us", .d = 3 * std.time.ns_per_us / 2 },
- .{ .s = "-1.5us", .d = -(3 * std.time.ns_per_us / 2) },
- .{ .s = "14.5us", .d = 14500 },
- .{ .s = "-14.5us", .d = -(14500) },
- .{ .s = "145us", .d = 145000 },
- .{ .s = "-145us", .d = -(145000) },
- .{ .s = "999.999us", .d = std.time.ns_per_ms - 1 },
- .{ .s = "-999.999us", .d = -(std.time.ns_per_ms - 1) },
- .{ .s = "1ms", .d = std.time.ns_per_ms + 1 },
- .{ .s = "-1ms", .d = -(std.time.ns_per_ms + 1) },
- .{ .s = "1.5ms", .d = 3 * std.time.ns_per_ms / 2 },
- .{ .s = "-1.5ms", .d = -(3 * std.time.ns_per_ms / 2) },
- .{ .s = "1.11ms", .d = 1110000 },
- .{ .s = "-1.11ms", .d = -(1110000) },
- .{ .s = "1.111ms", .d = 1111000 },
- .{ .s = "-1.111ms", .d = -(1111000) },
- .{ .s = "1.111ms", .d = 1111100 },
- .{ .s = "-1.111ms", .d = -(1111100) },
- .{ .s = "999.999ms", .d = std.time.ns_per_s - 1 },
- .{ .s = "-999.999ms", .d = -(std.time.ns_per_s - 1) },
- .{ .s = "1s", .d = std.time.ns_per_s },
- .{ .s = "-1s", .d = -(std.time.ns_per_s) },
- .{ .s = "59.999s", .d = std.time.ns_per_min - 1 },
- .{ .s = "-59.999s", .d = -(std.time.ns_per_min - 1) },
- .{ .s = "1m", .d = std.time.ns_per_min },
- .{ .s = "-1m", .d = -(std.time.ns_per_min) },
- .{ .s = "1h", .d = std.time.ns_per_hour },
- .{ .s = "-1h", .d = -(std.time.ns_per_hour) },
- .{ .s = "1d", .d = std.time.ns_per_day },
- .{ .s = "-1d", .d = -(std.time.ns_per_day) },
- .{ .s = "1w", .d = std.time.ns_per_week },
- .{ .s = "-1w", .d = -(std.time.ns_per_week) },
- .{ .s = "1y", .d = 365 * std.time.ns_per_day },
- .{ .s = "-1y", .d = -(365 * std.time.ns_per_day) },
- .{ .s = "1y52w23h59m59.999s", .d = 730 * std.time.ns_per_day - 1 }, // 365d = 52w1d
- .{ .s = "-1y52w23h59m59.999s", .d = -(730 * std.time.ns_per_day - 1) }, // 365d = 52w1d
- .{ .s = "1y1h1.001s", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + std.time.ns_per_ms },
- .{ .s = "-1y1h1.001s", .d = -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + std.time.ns_per_ms) },
- .{ .s = "1y1h1s", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + 999 * std.time.ns_per_us },
- .{ .s = "-1y1h1s", .d = -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_s + 999 * std.time.ns_per_us) },
- .{ .s = "1y1h999.999us", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms - 1 },
- .{ .s = "-1y1h999.999us", .d = -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms - 1) },
- .{ .s = "1y1h1ms", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms },
- .{ .s = "-1y1h1ms", .d = -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms) },
- .{ .s = "1y1h1ms", .d = 365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms + 1 },
- .{ .s = "-1y1h1ms", .d = -(365 * std.time.ns_per_day + std.time.ns_per_hour + std.time.ns_per_ms + 1) },
- .{ .s = "1y1m999ns", .d = 365 * std.time.ns_per_day + std.time.ns_per_min + 999 },
- .{ .s = "-1y1m999ns", .d = -(365 * std.time.ns_per_day + std.time.ns_per_min + 999) },
- .{ .s = "292y24w3d23h47m16.854s", .d = math.maxInt(i64) },
- .{ .s = "-292y24w3d23h47m16.854s", .d = math.minInt(i64) + 1 },
- .{ .s = "-292y24w3d23h47m16.854s", .d = math.minInt(i64) },
- }) |tc| {
- const slice = try bufPrint(&buf, "{}", .{fmtDurationSigned(tc.d)});
- try std.testing.expectEqualStrings(tc.s, slice);
- }
-
- inline for (.{
- .{ .s = "=======0ns", .f = "{s:=>10}", .d = 0 },
- .{ .s = "1ns=======", .f = "{s:=<10}", .d = 1 },
- .{ .s = "-1ns======", .f = "{s:=<10}", .d = -(1) },
- .{ .s = " -999ns ", .f = "{s:^10}", .d = -(std.time.ns_per_us - 1) },
- }) |tc| {
- const slice = try bufPrint(&buf, tc.f, .{fmtDurationSigned(tc.d)});
- try std.testing.expectEqualStrings(tc.s, slice);
- }
-}
-
pub const ParseIntError = error{
- /// The result cannot fit in the type specified
+ /// The result cannot fit in the type specified.
Overflow,
-
- /// The input was empty or contained an invalid character
+ /// The input was empty or contained an invalid character.
InvalidCharacter,
};
-/// Creates a Formatter type from a format function. Wrapping data in Formatter(func) causes
-/// the data to be formatted using the given function `func`. `func` must be of the following
-/// form:
-///
-/// fn formatExample(
-/// data: T,
-/// comptime fmt: []const u8,
-/// options: std.fmt.FormatOptions,
-/// writer: anytype,
-/// ) !void;
-///
-pub fn Formatter(comptime formatFn: anytype) type {
- const Data = @typeInfo(@TypeOf(formatFn)).@"fn".params[0].type.?;
+pub fn Formatter(
+ comptime Data: type,
+ comptime formatFn: fn (data: Data, writer: *Writer) Writer.Error!void,
+) type {
return struct {
data: Data,
- pub fn format(
- self: @This(),
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
- try formatFn(self.data, fmt, options, writer);
+ pub fn format(self: @This(), writer: *Writer, comptime fmt: []const u8) Writer.Error!void {
+ comptime assert(fmt.len == 0);
+ try formatFn(self.data, writer);
}
};
}
@@ -1793,15 +732,13 @@ pub const BufPrintError = error{
NoSpaceLeft,
};
-/// Print a Formatter string into `buf`. Actually just a thin wrapper around `format` and `fixedBufferStream`.
-/// Returns a slice of the bytes printed to.
+/// Print a Formatter string into `buf`. Returns a slice of the bytes printed.
pub fn bufPrint(buf: []u8, comptime fmt: []const u8, args: anytype) BufPrintError![]u8 {
- var fbs = std.io.fixedBufferStream(buf);
- format(fbs.writer().any(), fmt, args) catch |err| switch (err) {
- error.NoSpaceLeft => return error.NoSpaceLeft,
- else => unreachable,
+ var w: Writer = .fixed(buf);
+ w.print(fmt, args) catch |err| switch (err) {
+ error.WriteFailed => return error.NoSpaceLeft,
};
- return fbs.getWritten();
+ return w.buffered();
}
pub fn bufPrintZ(buf: []u8, comptime fmt: []const u8, args: anytype) BufPrintError![:0]u8 {
@@ -1809,51 +746,37 @@ pub fn bufPrintZ(buf: []u8, comptime fmt: []const u8, args: anytype) BufPrintErr
return result[0 .. result.len - 1 :0];
}
-/// Count the characters needed for format. Useful for preallocating memory
-pub fn count(comptime fmt: []const u8, args: anytype) u64 {
- var counting_writer = std.io.countingWriter(std.io.null_writer);
- format(counting_writer.writer().any(), fmt, args) catch unreachable;
- return counting_writer.bytes_written;
-}
-
-pub const AllocPrintError = error{OutOfMemory};
-
-pub fn allocPrint(allocator: mem.Allocator, comptime fmt: []const u8, args: anytype) AllocPrintError![]u8 {
- const size = math.cast(usize, count(fmt, args)) orelse return error.OutOfMemory;
- const buf = try allocator.alloc(u8, size);
- return bufPrint(buf, fmt, args) catch |err| switch (err) {
- error.NoSpaceLeft => unreachable, // we just counted the size above
+/// Count the characters needed for format.
+pub fn count(comptime fmt: []const u8, args: anytype) usize {
+ var trash_buffer: [64]u8 = undefined;
+ var w: Writer = .discarding(&trash_buffer);
+ w.print(fmt, args) catch |err| switch (err) {
+ error.WriteFailed => unreachable,
};
+ return w.count;
}
-pub fn allocPrintZ(allocator: mem.Allocator, comptime fmt: []const u8, args: anytype) AllocPrintError![:0]u8 {
- const result = try allocPrint(allocator, fmt ++ "\x00", args);
- return result[0 .. result.len - 1 :0];
-}
-
-test bufPrintIntToSlice {
- var buffer: [100]u8 = undefined;
- const buf = buffer[0..];
-
- try std.testing.expectEqualSlices(u8, "-1", bufPrintIntToSlice(buf, @as(i1, -1), 10, .lower, FormatOptions{}));
-
- try std.testing.expectEqualSlices(u8, "-101111000110000101001110", bufPrintIntToSlice(buf, @as(i32, -12345678), 2, .lower, FormatOptions{}));
- try std.testing.expectEqualSlices(u8, "-12345678", bufPrintIntToSlice(buf, @as(i32, -12345678), 10, .lower, FormatOptions{}));
- try std.testing.expectEqualSlices(u8, "-bc614e", bufPrintIntToSlice(buf, @as(i32, -12345678), 16, .lower, FormatOptions{}));
- try std.testing.expectEqualSlices(u8, "-BC614E", bufPrintIntToSlice(buf, @as(i32, -12345678), 16, .upper, FormatOptions{}));
-
- try std.testing.expectEqualSlices(u8, "12345678", bufPrintIntToSlice(buf, @as(u32, 12345678), 10, .upper, FormatOptions{}));
-
- try std.testing.expectEqualSlices(u8, " 666", bufPrintIntToSlice(buf, @as(u32, 666), 10, .lower, FormatOptions{ .width = 6 }));
- try std.testing.expectEqualSlices(u8, " 1234", bufPrintIntToSlice(buf, @as(u32, 0x1234), 16, .lower, FormatOptions{ .width = 6 }));
- try std.testing.expectEqualSlices(u8, "1234", bufPrintIntToSlice(buf, @as(u32, 0x1234), 16, .lower, FormatOptions{ .width = 1 }));
-
- try std.testing.expectEqualSlices(u8, "+42", bufPrintIntToSlice(buf, @as(i32, 42), 10, .lower, FormatOptions{ .width = 3 }));
- try std.testing.expectEqualSlices(u8, "-42", bufPrintIntToSlice(buf, @as(i32, -42), 10, .lower, FormatOptions{ .width = 3 }));
+pub fn allocPrint(gpa: Allocator, comptime fmt: []const u8, args: anytype) Allocator.Error![]u8 {
+ var aw = try Writer.Allocating.initCapacity(gpa, fmt.len);
+ defer aw.deinit();
+ aw.interface.print(fmt, args) catch |err| switch (err) {
+ error.WriteFailed => return error.OutOfMemory,
+ };
+ return aw.toOwnedSlice();
}
-pub fn bufPrintIntToSlice(buf: []u8, value: anytype, base: u8, case: Case, options: FormatOptions) []u8 {
- return buf[0..formatIntBuf(buf, value, base, case, options)];
+pub fn allocPrintSentinel(
+ gpa: Allocator,
+ comptime fmt: []const u8,
+ args: anytype,
+ comptime sentinel: u8,
+) Allocator.Error![:sentinel]u8 {
+ var aw = try Writer.Allocating.initCapacity(gpa, fmt.len);
+ defer aw.deinit();
+ aw.interface.print(fmt, args) catch |err| switch (err) {
+ error.WriteFailed => return error.OutOfMemory,
+ };
+ return aw.toOwnedSliceSentinel(sentinel);
}
pub inline fn comptimePrint(comptime fmt: []const u8, args: anytype) *const [count(fmt, args):0]u8 {
@@ -1984,26 +907,22 @@ test "int.padded" {
try expectFmt("i16: '-12345'", "i16: '{:4}'", .{@as(i16, -12345)});
try expectFmt("i16: '+12345'", "i16: '{:4}'", .{@as(i16, 12345)});
try expectFmt("u16: '12345'", "u16: '{:4}'", .{@as(u16, 12345)});
-
- try expectFmt("UTF-8: 'รผ '", "UTF-8: '{u:<4}'", .{'รผ'});
- try expectFmt("UTF-8: ' รผ'", "UTF-8: '{u:>4}'", .{'รผ'});
- try expectFmt("UTF-8: ' รผ '", "UTF-8: '{u:^4}'", .{'รผ'});
}
test "buffer" {
{
var buf1: [32]u8 = undefined;
- var fbs = std.io.fixedBufferStream(&buf1);
- try formatType(1234, "", FormatOptions{}, fbs.writer(), std.options.fmt_max_depth);
- try std.testing.expectEqualStrings("1234", fbs.getWritten());
+ var w: Writer = .fixed(&buf1);
+ try w.printValue("", .{}, 1234, std.options.fmt_max_depth);
+ try std.testing.expectEqualStrings("1234", w.buffered());
- fbs.reset();
- try formatType('a', "c", FormatOptions{}, fbs.writer(), std.options.fmt_max_depth);
- try std.testing.expectEqualStrings("a", fbs.getWritten());
+ w = .fixed(&buf1);
+ try w.printValue("c", .{}, 'a', std.options.fmt_max_depth);
+ try std.testing.expectEqualStrings("a", w.buffered());
- fbs.reset();
- try formatType(0b1100, "b", FormatOptions{}, fbs.writer(), std.options.fmt_max_depth);
- try std.testing.expectEqualStrings("1100", fbs.getWritten());
+ w = .fixed(&buf1);
+ try w.printValue("b", .{}, 0b1100, std.options.fmt_max_depth);
+ try std.testing.expectEqualStrings("1100", w.buffered());
}
}
@@ -2021,7 +940,7 @@ test "array" {
const value: [3]u8 = "abc".*;
try expectArrayFmt("array: abc\n", "array: {s}\n", value);
try expectArrayFmt("array: { 97, 98, 99 }\n", "array: {d}\n", value);
- try expectArrayFmt("array: { 61, 62, 63 }\n", "array: {x}\n", value);
+ try expectArrayFmt("array: 616263\n", "array: {x}\n", value);
try expectArrayFmt("array: { 97, 98, 99 }\n", "array: {any}\n", value);
var buf: [100]u8 = undefined;
@@ -2037,7 +956,7 @@ test "array" {
try expectArrayFmt("array: { abc, def }\n", "array: {s}\n", value);
try expectArrayFmt("array: { { 97, 98, 99 }, { 100, 101, 102 } }\n", "array: {d}\n", value);
- try expectArrayFmt("array: { { 61, 62, 63 }, { 64, 65, 66 } }\n", "array: {x}\n", value);
+ try expectArrayFmt("array: { 616263, 646566 }\n", "array: {x}\n", value);
}
}
@@ -2046,7 +965,7 @@ test "slice" {
const value: []const u8 = "abc";
try expectFmt("slice: abc\n", "slice: {s}\n", .{value});
try expectFmt("slice: { 97, 98, 99 }\n", "slice: {d}\n", .{value});
- try expectFmt("slice: { 61, 62, 63 }\n", "slice: {x}\n", .{value});
+ try expectFmt("slice: 616263\n", "slice: {x}\n", .{value});
try expectFmt("slice: { 97, 98, 99 }\n", "slice: {any}\n", .{value});
}
{
@@ -2083,22 +1002,15 @@ test "slice" {
const S2 = struct {
x: u8,
- pub fn format(s: @This(), comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void {
+ pub fn format(s: @This(), writer: *Writer, comptime _: []const u8) Writer.Error!void {
try writer.print("S2({})", .{s.x});
}
};
const struct_slice: []const S2 = &[_]S2{ S2{ .x = 8 }, S2{ .x = 42 } };
- try expectFmt("slice: { S2(8), S2(42) }", "slice: {any}", .{struct_slice});
+ try expectFmt("slice: { fmt.test.slice.S2{ .x = 8 }, fmt.test.slice.S2{ .x = 42 } }", "slice: {any}", .{struct_slice});
}
}
-test "escape non-printable" {
- try expectFmt("abc 123", "{s}", .{fmtSliceEscapeLower("abc 123")});
- try expectFmt("ab\\xffc", "{s}", .{fmtSliceEscapeLower("ab\xffc")});
- try expectFmt("abc 123", "{s}", .{fmtSliceEscapeUpper("abc 123")});
- try expectFmt("ab\\xFFc", "{s}", .{fmtSliceEscapeUpper("ab\xffc")});
-}
-
test "pointer" {
{
const value = @as(*align(1) i32, @ptrFromInt(0xdeadbeef));
@@ -2129,21 +1041,6 @@ test "cstr" {
);
}
-test "filesize" {
- try expectFmt("file size: 42B\n", "file size: {}\n", .{fmtIntSizeDec(42)});
- try expectFmt("file size: 42B\n", "file size: {}\n", .{fmtIntSizeBin(42)});
- try expectFmt("file size: 63MB\n", "file size: {}\n", .{fmtIntSizeDec(63 * 1000 * 1000)});
- try expectFmt("file size: 63MiB\n", "file size: {}\n", .{fmtIntSizeBin(63 * 1024 * 1024)});
- try expectFmt("file size: 42B\n", "file size: {:.2}\n", .{fmtIntSizeDec(42)});
- try expectFmt("file size: 42B\n", "file size: {:>9.2}\n", .{fmtIntSizeDec(42)});
- try expectFmt("file size: 66.06MB\n", "file size: {:.2}\n", .{fmtIntSizeDec(63 * 1024 * 1024)});
- try expectFmt("file size: 60.08MiB\n", "file size: {:.2}\n", .{fmtIntSizeBin(63 * 1000 * 1000)});
- try expectFmt("file size: =66.06MB=\n", "file size: {:=^9.2}\n", .{fmtIntSizeDec(63 * 1024 * 1024)});
- try expectFmt("file size: 66.06MB\n", "file size: {: >9.2}\n", .{fmtIntSizeDec(63 * 1024 * 1024)});
- try expectFmt("file size: 66.06MB \n", "file size: {: <9.2}\n", .{fmtIntSizeDec(63 * 1024 * 1024)});
- try expectFmt("file size: 0.01844674407370955ZB\n", "file size: {}\n", .{fmtIntSizeDec(math.maxInt(u64))});
-}
-
test "struct" {
{
const Struct = struct {
@@ -2176,7 +1073,7 @@ test "struct" {
// Tuples
try expectFmt("{ }", "{}", .{.{}});
try expectFmt("{ -1 }", "{}", .{.{-1}});
- try expectFmt("{ -1, 42, 2.5e4 }", "{}", .{.{ -1, 42, 0.25e5 }});
+ try expectFmt("{ -1, 42, 25000 }", "{}", .{.{ -1, 42, 0.25e5 }});
}
test "enum" {
@@ -2216,10 +1113,14 @@ test "non-exhaustive enum" {
try expectFmt("enum: fmt.test.non-exhaustive enum.Enum.One\n", "enum: {}\n", .{Enum.One});
try expectFmt("enum: fmt.test.non-exhaustive enum.Enum.Two\n", "enum: {}\n", .{Enum.Two});
try expectFmt("enum: fmt.test.non-exhaustive enum.Enum(4660)\n", "enum: {}\n", .{@as(Enum, @enumFromInt(0x1234))});
- try expectFmt("enum: fmt.test.non-exhaustive enum.Enum.One\n", "enum: {x}\n", .{Enum.One});
- try expectFmt("enum: fmt.test.non-exhaustive enum.Enum.Two\n", "enum: {x}\n", .{Enum.Two});
- try expectFmt("enum: fmt.test.non-exhaustive enum.Enum.Two\n", "enum: {X}\n", .{Enum.Two});
- try expectFmt("enum: fmt.test.non-exhaustive enum.Enum(1234)\n", "enum: {x}\n", .{@as(Enum, @enumFromInt(0x1234))});
+ try expectFmt("enum: f\n", "enum: {x}\n", .{Enum.One});
+ try expectFmt("enum: beef\n", "enum: {x}\n", .{Enum.Two});
+ try expectFmt("enum: BEEF\n", "enum: {X}\n", .{Enum.Two});
+ try expectFmt("enum: 1234\n", "enum: {x}\n", .{@as(Enum, @enumFromInt(0x1234))});
+
+ try expectFmt("enum: 15\n", "enum: {d}\n", .{Enum.One});
+ try expectFmt("enum: 48879\n", "enum: {d}\n", .{Enum.Two});
+ try expectFmt("enum: 4660\n", "enum: {d}\n", .{@as(Enum, @enumFromInt(0x1234))});
}
test "float.scientific" {
@@ -2351,13 +1252,7 @@ test "custom" {
x: f32,
y: f32,
- pub fn format(
- self: SelfType,
- comptime fmt: []const u8,
- options: FormatOptions,
- writer: anytype,
- ) !void {
- _ = options;
+ pub fn format(self: SelfType, writer: *Writer, comptime fmt: []const u8) Writer.Error!void {
if (fmt.len == 0 or comptime std.mem.eql(u8, fmt, "p")) {
return std.fmt.format(writer, "({d:.3},{d:.3})", .{ self.x, self.y });
} else if (comptime std.mem.eql(u8, fmt, "d")) {
@@ -2368,16 +1263,16 @@ test "custom" {
}
};
- var value = Vec2{
+ var value: Vec2 = .{
.x = 10.2,
.y = 2.22,
};
- try expectFmt("point: (10.200,2.220)\n", "point: {}\n", .{&value});
- try expectFmt("dim: 10.200x2.220\n", "dim: {d}\n", .{&value});
+ try expectFmt("point: (10.200,2.220)\n", "point: {f}\n", .{&value});
+ try expectFmt("dim: 10.200x2.220\n", "dim: {fd}\n", .{&value});
// same thing but not passing a pointer
- try expectFmt("point: (10.200,2.220)\n", "point: {}\n", .{value});
- try expectFmt("dim: 10.200x2.220\n", "dim: {d}\n", .{value});
+ try expectFmt("point: (10.200,2.220)\n", "point: {f}\n", .{value});
+ try expectFmt("dim: 10.200x2.220\n", "dim: {fd}\n", .{value});
}
test "union" {
@@ -2439,17 +1334,6 @@ test "struct.zero-size" {
try expectFmt("fmt.test.struct.zero-size.B{ .a = fmt.test.struct.zero-size.A{ }, .c = 0 }", "{}", .{b});
}
-test "bytes.hex" {
- const some_bytes = "\xCA\xFE\xBA\xBE";
- try expectFmt("lowercase: cafebabe\n", "lowercase: {x}\n", .{fmtSliceHexLower(some_bytes)});
- try expectFmt("uppercase: CAFEBABE\n", "uppercase: {X}\n", .{fmtSliceHexUpper(some_bytes)});
- //Test Slices
- try expectFmt("uppercase: CAFE\n", "uppercase: {X}\n", .{fmtSliceHexUpper(some_bytes[0..2])});
- try expectFmt("lowercase: babe\n", "lowercase: {x}\n", .{fmtSliceHexLower(some_bytes[2..])});
- const bytes_with_zeros = "\x00\x0E\xBA\xBE";
- try expectFmt("lowercase: 000ebabe\n", "lowercase: {x}\n", .{fmtSliceHexLower(bytes_with_zeros)});
-}
-
/// Encodes a sequence of bytes as hexadecimal digits.
/// Returns an array containing the encoded bytes.
pub fn bytesToHex(input: anytype, case: Case) [input.len * 2]u8 {
@@ -2494,110 +1378,14 @@ test bytesToHex {
test hexToBytes {
var buf: [32]u8 = undefined;
- try expectFmt("90" ** 32, "{s}", .{fmtSliceHexUpper(try hexToBytes(&buf, "90" ** 32))});
- try expectFmt("ABCD", "{s}", .{fmtSliceHexUpper(try hexToBytes(&buf, "ABCD"))});
- try expectFmt("", "{s}", .{fmtSliceHexUpper(try hexToBytes(&buf, ""))});
+ try expectFmt("90" ** 32, "{X}", .{try hexToBytes(&buf, "90" ** 32)});
+ try expectFmt("ABCD", "{X}", .{try hexToBytes(&buf, "ABCD")});
+ try expectFmt("", "{X}", .{try hexToBytes(&buf, "")});
try std.testing.expectError(error.InvalidCharacter, hexToBytes(&buf, "012Z"));
try std.testing.expectError(error.InvalidLength, hexToBytes(&buf, "AAA"));
try std.testing.expectError(error.NoSpaceLeft, hexToBytes(buf[0..1], "ABAB"));
}
-test "formatIntValue with comptime_int" {
- const value: comptime_int = 123456789123456789;
-
- var buf: [20]u8 = undefined;
- var fbs = std.io.fixedBufferStream(&buf);
- try formatIntValue(value, "", FormatOptions{}, fbs.writer());
- try std.testing.expectEqualStrings("123456789123456789", fbs.getWritten());
-}
-
-test "formatFloatValue with comptime_float" {
- const value: comptime_float = 1.0;
-
- var buf: [20]u8 = undefined;
- var fbs = std.io.fixedBufferStream(&buf);
- try formatFloatValue(value, "", FormatOptions{}, fbs.writer());
- try std.testing.expectEqualStrings(fbs.getWritten(), "1e0");
-
- try expectFmt("1e0", "{}", .{value});
- try expectFmt("1e0", "{}", .{1.0});
-}
-
-test "formatType max_depth" {
- const Vec2 = struct {
- const SelfType = @This();
- x: f32,
- y: f32,
-
- pub fn format(
- self: SelfType,
- comptime fmt: []const u8,
- options: FormatOptions,
- writer: anytype,
- ) !void {
- _ = options;
- if (fmt.len == 0) {
- return std.fmt.format(writer, "({d:.3},{d:.3})", .{ self.x, self.y });
- } else {
- @compileError("unknown format string: '" ++ fmt ++ "'");
- }
- }
- };
- const E = enum {
- One,
- Two,
- Three,
- };
- const TU = union(enum) {
- const SelfType = @This();
- float: f32,
- int: u32,
- ptr: ?*SelfType,
- };
- const S = struct {
- const SelfType = @This();
- a: ?*SelfType,
- tu: TU,
- e: E,
- vec: Vec2,
- };
-
- var inst = S{
- .a = null,
- .tu = TU{ .ptr = null },
- .e = E.Two,
- .vec = Vec2{ .x = 10.2, .y = 2.22 },
- };
- inst.a = &inst;
- inst.tu.ptr = &inst.tu;
-
- var buf: [1000]u8 = undefined;
- var fbs = std.io.fixedBufferStream(&buf);
- try formatType(inst, "", FormatOptions{}, fbs.writer(), 0);
- try std.testing.expectEqualStrings("fmt.test.formatType max_depth.S{ ... }", fbs.getWritten());
-
- fbs.reset();
- try formatType(inst, "", FormatOptions{}, fbs.writer(), 1);
- try std.testing.expectEqualStrings("fmt.test.formatType max_depth.S{ .a = fmt.test.formatType max_depth.S{ ... }, .tu = fmt.test.formatType max_depth.TU{ ... }, .e = fmt.test.formatType max_depth.E.Two, .vec = (10.200,2.220) }", fbs.getWritten());
-
- fbs.reset();
- try formatType(inst, "", FormatOptions{}, fbs.writer(), 2);
- try std.testing.expectEqualStrings("fmt.test.formatType max_depth.S{ .a = fmt.test.formatType max_depth.S{ .a = fmt.test.formatType max_depth.S{ ... }, .tu = fmt.test.formatType max_depth.TU{ ... }, .e = fmt.test.formatType max_depth.E.Two, .vec = (10.200,2.220) }, .tu = fmt.test.formatType max_depth.TU{ .ptr = fmt.test.formatType max_depth.TU{ ... } }, .e = fmt.test.formatType max_depth.E.Two, .vec = (10.200,2.220) }", fbs.getWritten());
-
- fbs.reset();
- try formatType(inst, "", FormatOptions{}, fbs.writer(), 3);
- try std.testing.expectEqualStrings("fmt.test.formatType max_depth.S{ .a = fmt.test.formatType max_depth.S{ .a = fmt.test.formatType max_depth.S{ .a = fmt.test.formatType max_depth.S{ ... }, .tu = fmt.test.formatType max_depth.TU{ ... }, .e = fmt.test.formatType max_depth.E.Two, .vec = (10.200,2.220) }, .tu = fmt.test.formatType max_depth.TU{ .ptr = fmt.test.formatType max_depth.TU{ ... } }, .e = fmt.test.formatType max_depth.E.Two, .vec = (10.200,2.220) }, .tu = fmt.test.formatType max_depth.TU{ .ptr = fmt.test.formatType max_depth.TU{ .ptr = fmt.test.formatType max_depth.TU{ ... } } }, .e = fmt.test.formatType max_depth.E.Two, .vec = (10.200,2.220) }", fbs.getWritten());
-
- const vec: @Vector(4, i32) = .{ 1, 2, 3, 4 };
- fbs.reset();
- try formatType(vec, "", FormatOptions{}, fbs.writer(), 0);
- try std.testing.expectEqualStrings("{ ... }", fbs.getWritten());
-
- fbs.reset();
- try formatType(vec, "", FormatOptions{}, fbs.writer(), 1);
- try std.testing.expectEqualStrings("{ 1, 2, 3, 4 }", fbs.getWritten());
-}
-
test "positional" {
try expectFmt("2 1 0", "{2} {1} {0}", .{ @as(usize, 0), @as(usize, 1), @as(usize, 2) });
try expectFmt("2 1 0", "{2} {1} {}", .{ @as(usize, 0), @as(usize, 1), @as(usize, 2) });
@@ -2664,23 +1452,11 @@ test "padding" {
try expectFmt("==================Filled", "{s:=>24}", .{"Filled"});
try expectFmt(" Centered ", "{s:^24}", .{"Centered"});
try expectFmt("-", "{s:-^1}", .{""});
- try expectFmt("==crรชpe===", "{s:=^10}", .{"crรชpe"});
- try expectFmt("=====crรชpe", "{s:=>10}", .{"crรชpe"});
- try expectFmt("crรชpe=====", "{s:=<10}", .{"crรชpe"});
try expectFmt("====a", "{c:=>5}", .{'a'});
try expectFmt("==a==", "{c:=^5}", .{'a'});
try expectFmt("a====", "{c:=<5}", .{'a'});
}
-test "padding fill char utf" {
- try expectFmt("โโcrรชpeโโโ", "{s:โ^10}", .{"crรชpe"});
- try expectFmt("โโโโโcrรชpe", "{s:โ>10}", .{"crรชpe"});
- try expectFmt("crรชpeโโโโโ", "{s:โ<10}", .{"crรชpe"});
- try expectFmt("โโโโa", "{c:โ>5}", .{'a'});
- try expectFmt("โโaโโ", "{c:โ^5}", .{'a'});
- try expectFmt("aโโโโ", "{c:โ<5}", .{'a'});
-}
-
test "decimal float padding" {
const number: f32 = 3.1415;
try expectFmt("left-pad: **3.142\n", "left-pad: {d:*>7.3}\n", .{number});
@@ -2742,16 +1518,16 @@ test "recursive format function" {
Leaf: i32,
Branch: struct { left: *const R, right: *const R },
- pub fn format(self: R, comptime _: []const u8, _: std.fmt.FormatOptions, writer: anytype) !void {
+ pub fn format(self: R, writer: *Writer, comptime _: []const u8) Writer.Error!void {
return switch (self) {
.Leaf => |n| std.fmt.format(writer, "Leaf({})", .{n}),
- .Branch => |b| std.fmt.format(writer, "Branch({}, {})", .{ b.left, b.right }),
+ .Branch => |b| std.fmt.format(writer, "Branch({f}, {f})", .{ b.left, b.right }),
};
}
};
- var r = R{ .Leaf = 1 };
- try expectFmt("Leaf(1)\n", "{}\n", .{&r});
+ var r: R = .{ .Leaf = 1 };
+ try expectFmt("Leaf(1)\n", "{f}\n", .{&r});
}
pub const hex_charset = "0123456789abcdef";
@@ -2785,54 +1561,39 @@ test hex {
test "parser until" {
{ // return substring till ':'
- var parser: Parser = .{
- .iter = .{ .bytes = "abc:1234", .i = 0 },
- };
+ var parser: Parser = .{ .bytes = "abc:1234", .i = 0 };
try testing.expectEqualStrings("abc", parser.until(':'));
}
{ // return the entire string - `ch` not found
- var parser: Parser = .{
- .iter = .{ .bytes = "abc1234", .i = 0 },
- };
+ var parser: Parser = .{ .bytes = "abc1234", .i = 0 };
try testing.expectEqualStrings("abc1234", parser.until(':'));
}
{ // substring is empty - `ch` is the only character
- var parser: Parser = .{
- .iter = .{ .bytes = ":", .i = 0 },
- };
+ var parser: Parser = .{ .bytes = ":", .i = 0 };
try testing.expectEqualStrings("", parser.until(':'));
}
{ // empty string and `ch` not found
- var parser: Parser = .{
- .iter = .{ .bytes = "", .i = 0 },
- };
+ var parser: Parser = .{ .bytes = "", .i = 0 };
try testing.expectEqualStrings("", parser.until(':'));
}
{ // substring starts at index 2 and goes upto `ch`
- var parser: Parser = .{
- .iter = .{ .bytes = "abc:1234", .i = 2 },
- };
+ var parser: Parser = .{ .bytes = "abc:1234", .i = 2 };
try testing.expectEqualStrings("c", parser.until(':'));
}
{ // substring starts at index 4 and goes upto the end - `ch` not found
- var parser: Parser = .{
- .iter = .{ .bytes = "abc1234", .i = 4 },
- };
+ var parser: Parser = .{ .bytes = "abc1234", .i = 4 };
try testing.expectEqualStrings("234", parser.until(':'));
}
}
test "parser peek" {
{ // start iteration from the first index
- var parser: Parser = .{
- .iter = .{ .bytes = "hello world", .i = 0 },
- };
-
+ var parser: Parser = .{ .bytes = "hello world", .i = 0 };
try testing.expectEqual('h', parser.peek(0));
try testing.expectEqual('e', parser.peek(1));
try testing.expectEqual(' ', parser.peek(5));
@@ -2841,9 +1602,7 @@ test "parser peek" {
}
{ // start iteration from the second last index
- var parser: Parser = .{
- .iter = .{ .bytes = "hello world!", .i = 10 },
- };
+ var parser: Parser = .{ .bytes = "hello world!", .i = 10 };
try testing.expectEqual('d', parser.peek(0));
try testing.expectEqual('!', parser.peek(1));
@@ -2851,18 +1610,14 @@ test "parser peek" {
}
{ // start iteration beyond the length of the string
- var parser: Parser = .{
- .iter = .{ .bytes = "hello", .i = 5 },
- };
+ var parser: Parser = .{ .bytes = "hello", .i = 5 };
try testing.expectEqual(null, parser.peek(0));
try testing.expectEqual(null, parser.peek(1));
}
{ // empty string
- var parser: Parser = .{
- .iter = .{ .bytes = "", .i = 0 },
- };
+ var parser: Parser = .{ .bytes = "", .i = 0 };
try testing.expectEqual(null, parser.peek(0));
try testing.expectEqual(null, parser.peek(2));
@@ -2871,78 +1626,78 @@ test "parser peek" {
test "parser char" {
// character exists - iterator at 0
- var parser: Parser = .{ .iter = .{ .bytes = "~~hello", .i = 0 } };
+ var parser: Parser = .{ .bytes = "~~hello", .i = 0 };
try testing.expectEqual('~', parser.char());
// character exists - iterator in the middle
- parser = .{ .iter = .{ .bytes = "~~hello", .i = 3 } };
+ parser = .{ .bytes = "~~hello", .i = 3 };
try testing.expectEqual('e', parser.char());
// character exists - iterator at the end
- parser = .{ .iter = .{ .bytes = "~~hello", .i = 6 } };
+ parser = .{ .bytes = "~~hello", .i = 6 };
try testing.expectEqual('o', parser.char());
// character doesn't exist - iterator beyond the length of the string
- parser = .{ .iter = .{ .bytes = "~~hello", .i = 7 } };
+ parser = .{ .bytes = "~~hello", .i = 7 };
try testing.expectEqual(null, parser.char());
}
test "parser maybe" {
// character exists - iterator at 0
- var parser: Parser = .{ .iter = .{ .bytes = "hello world", .i = 0 } };
+ var parser: Parser = .{ .bytes = "hello world", .i = 0 };
try testing.expect(parser.maybe('h'));
// character exists - iterator at space
- parser = .{ .iter = .{ .bytes = "hello world", .i = 5 } };
+ parser = .{ .bytes = "hello world", .i = 5 };
try testing.expect(parser.maybe(' '));
// character exists - iterator at the end
- parser = .{ .iter = .{ .bytes = "hello world", .i = 10 } };
+ parser = .{ .bytes = "hello world", .i = 10 };
try testing.expect(parser.maybe('d'));
// character doesn't exist - iterator beyond the length of the string
- parser = .{ .iter = .{ .bytes = "hello world", .i = 11 } };
+ parser = .{ .bytes = "hello world", .i = 11 };
try testing.expect(!parser.maybe('e'));
}
test "parser number" {
// input is a single digit natural number - iterator at 0
- var parser: Parser = .{ .iter = .{ .bytes = "7", .i = 0 } };
+ var parser: Parser = .{ .bytes = "7", .i = 0 };
try testing.expect(7 == parser.number());
// input is a two digit natural number - iterator at 1
- parser = .{ .iter = .{ .bytes = "29", .i = 1 } };
+ parser = .{ .bytes = "29", .i = 1 };
try testing.expect(9 == parser.number());
// input is a two digit natural number - iterator beyond the length of the string
- parser = .{ .iter = .{ .bytes = "32", .i = 2 } };
+ parser = .{ .bytes = "32", .i = 2 };
try testing.expectEqual(null, parser.number());
// input is an integer
- parser = .{ .iter = .{ .bytes = "0", .i = 0 } };
+ parser = .{ .bytes = "0", .i = 0 };
try testing.expect(0 == parser.number());
// input is a negative integer
- parser = .{ .iter = .{ .bytes = "-2", .i = 0 } };
+ parser = .{ .bytes = "-2", .i = 0 };
try testing.expectEqual(null, parser.number());
// input is a string
- parser = .{ .iter = .{ .bytes = "no_number", .i = 2 } };
+ parser = .{ .bytes = "no_number", .i = 2 };
try testing.expectEqual(null, parser.number());
// input is a single character string
- parser = .{ .iter = .{ .bytes = "n", .i = 0 } };
+ parser = .{ .bytes = "n", .i = 0 };
try testing.expectEqual(null, parser.number());
// input is an empty string
- parser = .{ .iter = .{ .bytes = "", .i = 0 } };
+ parser = .{ .bytes = "", .i = 0 };
try testing.expectEqual(null, parser.number());
}
test "parser specifier" {
{ // input string is a digit; iterator at 0
const expected: Specifier = Specifier{ .number = 1 };
- var parser: Parser = .{ .iter = .{ .bytes = "1", .i = 0 } };
+ var parser: Parser = .{ .bytes = "1", .i = 0 };
const result = try parser.specifier();
try testing.expect(expected.number == result.number);
@@ -2950,7 +1705,7 @@ test "parser specifier" {
{ // input string is a two digit number; iterator at 0
const digit: Specifier = Specifier{ .number = 42 };
- var parser: Parser = .{ .iter = .{ .bytes = "42", .i = 0 } };
+ var parser: Parser = .{ .bytes = "42", .i = 0 };
const result = try parser.specifier();
try testing.expect(digit.number == result.number);
@@ -2958,7 +1713,7 @@ test "parser specifier" {
{ // input string is a two digit number digit; iterator at 1
const digit: Specifier = Specifier{ .number = 8 };
- var parser: Parser = .{ .iter = .{ .bytes = "28", .i = 1 } };
+ var parser: Parser = .{ .bytes = "28", .i = 1 };
const result = try parser.specifier();
try testing.expect(digit.number == result.number);
@@ -2966,7 +1721,7 @@ test "parser specifier" {
{ // input string is a two digit number with square brackets; iterator at 0
const digit: Specifier = Specifier{ .named = "15" };
- var parser: Parser = .{ .iter = .{ .bytes = "[15]", .i = 0 } };
+ var parser: Parser = .{ .bytes = "[15]", .i = 0 };
const result = try parser.specifier();
try testing.expectEqualStrings(digit.named, result.named);
@@ -2974,21 +1729,21 @@ test "parser specifier" {
{ // input string is not a number and contains square brackets; iterator at 0
const digit: Specifier = Specifier{ .named = "hello" };
- var parser: Parser = .{ .iter = .{ .bytes = "[hello]", .i = 0 } };
+ var parser: Parser = .{ .bytes = "[hello]", .i = 0 };
const result = try parser.specifier();
try testing.expectEqualStrings(digit.named, result.named);
}
{ // input string is not a number and doesn't contain closing square bracket; iterator at 0
- var parser: Parser = .{ .iter = .{ .bytes = "[hello", .i = 0 } };
+ var parser: Parser = .{ .bytes = "[hello", .i = 0 };
const result = parser.specifier();
try testing.expectError(@field(anyerror, "Expected closing ]"), result);
}
{ // input string is not a number and doesn't contain closing square bracket; iterator at 2
- var parser: Parser = .{ .iter = .{ .bytes = "[[[[hello", .i = 2 } };
+ var parser: Parser = .{ .bytes = "[[[[hello", .i = 2 };
const result = parser.specifier();
try testing.expectError(@field(anyerror, "Expected closing ]"), result);
@@ -2996,7 +1751,7 @@ test "parser specifier" {
{ // input string is not a number and contains unbalanced square brackets; iterator at 0
const digit: Specifier = Specifier{ .named = "[[hello" };
- var parser: Parser = .{ .iter = .{ .bytes = "[[[hello]", .i = 0 } };
+ var parser: Parser = .{ .bytes = "[[[hello]", .i = 0 };
const result = try parser.specifier();
try testing.expectEqualStrings(digit.named, result.named);
@@ -3004,7 +1759,7 @@ test "parser specifier" {
{ // input string is not a number and contains unbalanced square brackets; iterator at 1
const digit: Specifier = Specifier{ .named = "[[hello" };
- var parser: Parser = .{ .iter = .{ .bytes = "[[[[hello]]]]]", .i = 1 } };
+ var parser: Parser = .{ .bytes = "[[[[hello]]]]]", .i = 1 };
const result = try parser.specifier();
try testing.expectEqualStrings(digit.named, result.named);
@@ -3012,9 +1767,13 @@ test "parser specifier" {
{ // input string is neither a digit nor a named argument
const char: Specifier = Specifier{ .none = {} };
- var parser: Parser = .{ .iter = .{ .bytes = "hello", .i = 0 } };
+ var parser: Parser = .{ .bytes = "hello", .i = 0 };
const result = try parser.specifier();
try testing.expectEqual(char.none, result.none);
}
}
+
+test {
+ _ = float;
+}
lib/std/http.zig
@@ -1,3 +1,7 @@
+const builtin = @import("builtin");
+const std = @import("std.zig");
+const assert = std.debug.assert;
+
pub const Client = @import("http/Client.zig");
pub const Server = @import("http/Server.zig");
pub const protocol = @import("http/protocol.zig");
@@ -38,8 +42,9 @@ pub const Method = enum(u64) {
return x;
}
- pub fn write(self: Method, w: anytype) !void {
- const bytes = std.mem.asBytes(&@intFromEnum(self));
+ pub fn format(self: Method, w: *std.io.Writer, comptime f: []const u8) std.io.Writer.Error!void {
+ comptime assert(f.len == 0);
+ const bytes: []const u8 = @ptrCast(&@intFromEnum(self));
const str = std.mem.sliceTo(bytes, 0);
try w.writeAll(str);
}
@@ -77,7 +82,9 @@ pub const Method = enum(u64) {
};
}
- /// An HTTP method is idempotent if an identical request can be made once or several times in a row with the same effect while leaving the server in the same state.
+ /// An HTTP method is idempotent if an identical request can be made once
+ /// or several times in a row with the same effect while leaving the server
+ /// in the same state.
///
/// https://developer.mozilla.org/en-US/docs/Glossary/Idempotent
///
@@ -90,7 +97,8 @@ pub const Method = enum(u64) {
};
}
- /// A cacheable response is an HTTP response that can be cached, that is stored to be retrieved and used later, saving a new request to the server.
+ /// A cacheable response can be stored to be retrieved and used later,
+ /// saving a new request to the server.
///
/// https://developer.mozilla.org/en-US/docs/Glossary/cacheable
///
@@ -282,10 +290,10 @@ pub const Status = enum(u10) {
}
};
+/// compression is intentionally omitted here since it is handled in `ContentEncoding`.
pub const TransferEncoding = enum {
chunked,
none,
- // compression is intentionally omitted here, as std.http.Client stores it as content-encoding
};
pub const ContentEncoding = enum {
@@ -308,9 +316,6 @@ pub const Header = struct {
value: []const u8,
};
-const builtin = @import("builtin");
-const std = @import("std.zig");
-
test {
if (builtin.os.tag != .wasi) {
_ = Client;
lib/std/io.zig
@@ -364,6 +364,32 @@ pub fn GenericWriter(
const ptr: *const Context = @alignCast(@ptrCast(context));
return writeFn(ptr.*, bytes);
}
+
+ /// Helper for bridging to the new `Writer` API while upgrading.
+ pub fn adaptToNewApi(self: *const Self) Adapter {
+ return .{
+ .derp_writer = self.*,
+ .new_interface = .{
+ .buffer = &.{},
+ .vtable = &.{ .drain = Adapter.drain },
+ },
+ };
+ }
+
+ pub const Adapter = struct {
+ derp_writer: Self,
+ new_interface: Writer,
+ err: ?Error = null,
+
+ fn drain(w: *Writer, data: []const []const u8, splat: usize) Writer.Error!usize {
+ _ = splat;
+ const a: *@This() = @fieldParentPtr("new_interface", w);
+ return a.derp_writer.write(data[0]) catch |err| {
+ a.err = err;
+ return error.WriteFailed;
+ };
+ }
+ };
};
}
@@ -419,7 +445,7 @@ pub const tty = @import("io/tty.zig");
/// A Writer that doesn't write to anything.
pub const null_writer: NullWriter = .{ .context = {} };
-pub const NullWriter = Writer(void, error{}, dummyWrite);
+pub const NullWriter = GenericWriter(void, error{}, dummyWrite);
fn dummyWrite(context: void, data: []const u8) error{}!usize {
_ = context;
return data.len;
lib/std/log.zig
@@ -47,7 +47,7 @@
//! // Print the message to stderr, silently ignoring any errors
//! std.debug.lockStdErr();
//! defer std.debug.unlockStdErr();
-//! const stderr = std.fs.File.stderr().writer();
+//! const stderr = std.fs.File.stderr().deprecatedWriter();
//! nosuspend stderr.print(prefix ++ format ++ "\n", args) catch return;
//! }
//!
@@ -148,7 +148,7 @@ pub fn defaultLog(
) void {
const level_txt = comptime message_level.asText();
const prefix2 = if (scope == .default) ": " else "(" ++ @tagName(scope) ++ "): ";
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
var bw = std.io.bufferedWriter(stderr);
const writer = bw.writer();
lib/std/multi_array_list.zig
@@ -991,6 +991,7 @@ test "0 sized struct" {
test "struct with many fields" {
const ManyFields = struct {
fn Type(count: comptime_int) type {
+ @setEvalBranchQuota(50000);
var fields: [count]std.builtin.Type.StructField = undefined;
for (0..count) |i| {
fields[i] = .{
lib/std/net.zig
@@ -161,22 +161,14 @@ pub const Address = extern union {
}
}
- pub fn format(
- self: Address,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- out_stream: anytype,
- ) !void {
- if (fmt.len != 0) std.fmt.invalidFmtError(fmt, self);
+ pub fn format(self: Address, w: *std.io.Writer, comptime fmt: []const u8) std.io.Writer.Error!void {
+ comptime assert(fmt.len == 0);
switch (self.any.family) {
- posix.AF.INET => try self.in.format(fmt, options, out_stream),
- posix.AF.INET6 => try self.in6.format(fmt, options, out_stream),
+ posix.AF.INET => try self.in.format(w, fmt),
+ posix.AF.INET6 => try self.in6.format(w, fmt),
posix.AF.UNIX => {
- if (!has_unix_sockets) {
- unreachable;
- }
-
- try std.fmt.format(out_stream, "{s}", .{std.mem.sliceTo(&self.un.path, 0)});
+ if (!has_unix_sockets) unreachable;
+ try w.writeAll(std.mem.sliceTo(&self.un.path, 0));
},
else => unreachable,
}
@@ -349,22 +341,10 @@ pub const Ip4Address = extern struct {
self.sa.port = mem.nativeToBig(u16, port);
}
- pub fn format(
- self: Ip4Address,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- out_stream: anytype,
- ) !void {
- if (fmt.len != 0) std.fmt.invalidFmtError(fmt, self);
- _ = options;
- const bytes = @as(*const [4]u8, @ptrCast(&self.sa.addr));
- try std.fmt.format(out_stream, "{}.{}.{}.{}:{}", .{
- bytes[0],
- bytes[1],
- bytes[2],
- bytes[3],
- self.getPort(),
- });
+ pub fn format(self: Ip4Address, w: *std.io.Writer, comptime fmt: []const u8) std.io.Writer.Error!void {
+ comptime assert(fmt.len == 0);
+ const bytes: *const [4]u8 = @ptrCast(&self.sa.addr);
+ try w.print("{d}.{d}.{d}.{d}:{d}", .{ bytes[0], bytes[1], bytes[2], bytes[3], self.getPort() });
}
pub fn getOsSockLen(self: Ip4Address) posix.socklen_t {
@@ -653,17 +633,11 @@ pub const Ip6Address = extern struct {
self.sa.port = mem.nativeToBig(u16, port);
}
- pub fn format(
- self: Ip6Address,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- out_stream: anytype,
- ) !void {
- if (fmt.len != 0) std.fmt.invalidFmtError(fmt, self);
- _ = options;
+ pub fn format(self: Ip6Address, w: *std.io.Writer, comptime fmt: []const u8) std.io.Writer.Error!void {
+ comptime assert(fmt.len == 0);
const port = mem.bigToNative(u16, self.sa.port);
if (mem.eql(u8, self.sa.addr[0..12], &[_]u8{ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff })) {
- try std.fmt.format(out_stream, "[::ffff:{}.{}.{}.{}]:{}", .{
+ try w.print("[::ffff:{d}.{d}.{d}.{d}]:{d}", .{
self.sa.addr[12],
self.sa.addr[13],
self.sa.addr[14],
@@ -711,14 +685,14 @@ pub const Ip6Address = extern struct {
longest_len = 0;
}
- try out_stream.writeAll("[");
+ try w.writeAll("[");
var i: usize = 0;
var abbrv = false;
while (i < native_endian_parts.len) : (i += 1) {
if (i == longest_start) {
// Emit "::" for the longest zero run
if (!abbrv) {
- try out_stream.writeAll(if (i == 0) "::" else ":");
+ try w.writeAll(if (i == 0) "::" else ":");
abbrv = true;
}
i += longest_len - 1; // Skip the compressed range
@@ -727,12 +701,12 @@ pub const Ip6Address = extern struct {
if (abbrv) {
abbrv = false;
}
- try std.fmt.format(out_stream, "{x}", .{native_endian_parts[i]});
+ try w.print("{x}", .{native_endian_parts[i]});
if (i != native_endian_parts.len - 1) {
- try out_stream.writeAll(":");
+ try w.writeAll(":");
}
}
- try std.fmt.format(out_stream, "]:{}", .{port});
+ try w.print("]:{}", .{port});
}
pub fn getOsSockLen(self: Ip6Address) posix.socklen_t {
@@ -894,7 +868,7 @@ pub fn getAddressList(allocator: mem.Allocator, name: []const u8, port: u16) Get
const name_c = try allocator.dupeZ(u8, name);
defer allocator.free(name_c);
- const port_c = try std.fmt.allocPrintZ(allocator, "{}", .{port});
+ const port_c = try std.fmt.allocPrintSentinel(allocator, "{}", .{port}, 0);
defer allocator.free(port_c);
const ws2_32 = windows.ws2_32;
@@ -966,7 +940,7 @@ pub fn getAddressList(allocator: mem.Allocator, name: []const u8, port: u16) Get
const name_c = try allocator.dupeZ(u8, name);
defer allocator.free(name_c);
- const port_c = try std.fmt.allocPrintZ(allocator, "{}", .{port});
+ const port_c = try std.fmt.allocPrintSentinel(allocator, "{}", .{port}, 0);
defer allocator.free(port_c);
const hints: posix.addrinfo = .{
@@ -1356,7 +1330,7 @@ fn linuxLookupNameFromHosts(
};
defer file.close();
- var buffered_reader = std.io.bufferedReader(file.reader());
+ var buffered_reader = std.io.bufferedReader(file.deprecatedReader());
const reader = buffered_reader.reader();
var line_buf: [512]u8 = undefined;
while (reader.readUntilDelimiterOrEof(&line_buf, '\n') catch |err| switch (err) {
@@ -1557,7 +1531,7 @@ fn getResolvConf(allocator: mem.Allocator, rc: *ResolvConf) !void {
};
defer file.close();
- var buf_reader = std.io.bufferedReader(file.reader());
+ var buf_reader = std.io.bufferedReader(file.deprecatedReader());
const stream = buf_reader.reader();
var line_buf: [512]u8 = undefined;
while (stream.readUntilDelimiterOrEof(&line_buf, '\n') catch |err| switch (err) {
lib/std/os.zig
@@ -31,6 +31,7 @@ pub const uefi = @import("os/uefi.zig");
pub const wasi = @import("os/wasi.zig");
pub const emscripten = @import("os/emscripten.zig");
pub const windows = @import("os/windows.zig");
+pub const freebsd = @import("os/freebsd.zig");
test {
_ = linux;
lib/std/posix.zig
@@ -651,7 +651,7 @@ fn getRandomBytesDevURandom(buf: []u8) !void {
}
const file: fs.File = .{ .handle = fd };
- const stream = file.reader();
+ const stream = file.deprecatedReader();
stream.readNoEof(buf) catch return error.Unexpected;
}
lib/std/process.zig
@@ -1553,7 +1553,7 @@ pub fn posixGetUserInfo(name: []const u8) !UserInfo {
const file = try std.fs.openFileAbsolute("/etc/passwd", .{});
defer file.close();
- const reader = file.reader();
+ const reader = file.deprecatedReader();
const State = enum {
Start,
@@ -1895,7 +1895,7 @@ pub fn createEnvironFromMap(
var i: usize = 0;
if (zig_progress_action == .add) {
- envp_buf[i] = try std.fmt.allocPrintZ(arena, "ZIG_PROGRESS={d}", .{options.zig_progress_fd.?});
+ envp_buf[i] = try std.fmt.allocPrintSentinel(arena, "ZIG_PROGRESS={d}", .{options.zig_progress_fd.?}, 0);
i += 1;
}
@@ -1906,16 +1906,16 @@ pub fn createEnvironFromMap(
.add => unreachable,
.delete => continue,
.edit => {
- envp_buf[i] = try std.fmt.allocPrintZ(arena, "{s}={d}", .{
+ envp_buf[i] = try std.fmt.allocPrintSentinel(arena, "{s}={d}", .{
pair.key_ptr.*, options.zig_progress_fd.?,
- });
+ }, 0);
i += 1;
continue;
},
.nothing => {},
};
- envp_buf[i] = try std.fmt.allocPrintZ(arena, "{s}={s}", .{ pair.key_ptr.*, pair.value_ptr.* });
+ envp_buf[i] = try std.fmt.allocPrintSentinel(arena, "{s}={s}", .{ pair.key_ptr.*, pair.value_ptr.* }, 0);
i += 1;
}
}
@@ -1965,7 +1965,7 @@ pub fn createEnvironFromExisting(
var existing_index: usize = 0;
if (zig_progress_action == .add) {
- envp_buf[i] = try std.fmt.allocPrintZ(arena, "ZIG_PROGRESS={d}", .{options.zig_progress_fd.?});
+ envp_buf[i] = try std.fmt.allocPrintSentinel(arena, "ZIG_PROGRESS={d}", .{options.zig_progress_fd.?}, 0);
i += 1;
}
@@ -1974,7 +1974,7 @@ pub fn createEnvironFromExisting(
.add => unreachable,
.delete => continue,
.edit => {
- envp_buf[i] = try std.fmt.allocPrintZ(arena, "ZIG_PROGRESS={d}", .{options.zig_progress_fd.?});
+ envp_buf[i] = try std.fmt.allocPrintSentinel(arena, "ZIG_PROGRESS={d}", .{options.zig_progress_fd.?}, 0);
i += 1;
continue;
},
lib/std/Progress.zig
@@ -9,6 +9,7 @@ const Progress = @This();
const posix = std.posix;
const is_big_endian = builtin.cpu.arch.endian() == .big;
const is_windows = builtin.os.tag == .windows;
+const Writer = std.io.Writer;
/// `null` if the current node (and its children) should
/// not print on update()
@@ -606,6 +607,36 @@ pub fn unlockStdErr() void {
stderr_mutex.unlock();
}
+/// Protected by `stderr_mutex`.
+const stderr_writer: *Writer = &stderr_file_writer.interface;
+/// Protected by `stderr_mutex`.
+var stderr_file_writer: std.fs.File.Writer = .{
+ .interface = std.fs.File.Writer.initInterface(&.{}),
+ .file = if (is_windows) undefined else .stderr(),
+ .mode = .streaming,
+};
+
+/// Allows the caller to freely write to the returned `Writer`,
+/// initialized with `buffer`, until `unlockStderrWriter` is called.
+///
+/// During the lock, any `std.Progress` information is cleared from the terminal.
+///
+/// The lock is recursive; the same thread may hold the lock multiple times.
+pub fn lockStderrWriter(buffer: []u8) *Writer {
+ stderr_mutex.lock();
+ clearWrittenWithEscapeCodes() catch {};
+ if (is_windows) stderr_file_writer.file = .stderr();
+ stderr_writer.flush() catch {};
+ stderr_writer.buffer = buffer;
+ return stderr_writer;
+}
+
+pub fn unlockStderrWriter() void {
+ stderr_writer.flush() catch {};
+ stderr_writer.buffer = &.{};
+ stderr_mutex.unlock();
+}
+
fn ipcThreadRun(fd: posix.fd_t) anyerror!void {
// Store this data in the thread so that it does not need to be part of the
// linker data of the main executable.
lib/std/SemanticVersion.zig
@@ -150,17 +150,11 @@ fn parseNum(text: []const u8) error{ InvalidVersion, Overflow }!usize {
};
}
-pub fn format(
- self: Version,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- out_stream: anytype,
-) !void {
- _ = options;
+pub fn format(self: Version, w: *std.io.Writer, comptime fmt: []const u8) std.io.Writer.Error!void {
if (fmt.len != 0) std.fmt.invalidFmtError(fmt, self);
- try std.fmt.format(out_stream, "{d}.{d}.{d}", .{ self.major, self.minor, self.patch });
- if (self.pre) |pre| try std.fmt.format(out_stream, "-{s}", .{pre});
- if (self.build) |build| try std.fmt.format(out_stream, "+{s}", .{build});
+ try w.print("{d}.{d}.{d}", .{ self.major, self.minor, self.patch });
+ if (self.pre) |pre| try w.print("-{s}", .{pre});
+ if (self.build) |build| try w.print("+{s}", .{build});
}
const expect = std.testing.expect;
@@ -202,7 +196,7 @@ test format {
"1.0.0+0.build.1-rc.10000aaa-kk-0.1",
"5.4.0-1018-raspi",
"5.7.123",
- }) |valid| try std.testing.expectFmt(valid, "{}", .{try parse(valid)});
+ }) |valid| try std.testing.expectFmt(valid, "{f}", .{try parse(valid)});
// Invalid version strings should be rejected.
for ([_][]const u8{
@@ -269,12 +263,12 @@ test format {
// Valid version string that may overflow.
const big_valid = "99999999999999999999999.999999999999999999.99999999999999999";
if (parse(big_valid)) |ver| {
- try std.testing.expectFmt(big_valid, "{}", .{ver});
+ try std.testing.expectFmt(big_valid, "{f}", .{ver});
} else |err| try expect(err == error.Overflow);
// Invalid version string that may overflow.
const big_invalid = "99999999999999999999999.999999999999999999.99999999999999999----RC-SNAPSHOT.12.09.1--------------------------------..12";
- if (parse(big_invalid)) |ver| std.debug.panic("expected error, found {}", .{ver}) else |_| {}
+ if (parse(big_invalid)) |ver| std.debug.panic("expected error, found {f}", .{ver}) else |_| {}
}
test "precedence" {
lib/std/Target.zig
@@ -301,29 +301,24 @@ pub const Os = struct {
/// This function is defined to serialize a Zig source code representation of this
/// type, that, when parsed, will deserialize into the same data.
- pub fn format(
- ver: WindowsVersion,
- comptime fmt_str: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
+ pub fn format(ver: WindowsVersion, w: *std.io.Writer, comptime f: []const u8) std.io.Writer.Error!void {
const maybe_name = std.enums.tagName(WindowsVersion, ver);
- if (comptime std.mem.eql(u8, fmt_str, "s")) {
+ if (comptime std.mem.eql(u8, f, "s")) {
if (maybe_name) |name|
- try writer.print(".{s}", .{name})
+ try w.print(".{s}", .{name})
else
- try writer.print(".{d}", .{@intFromEnum(ver)});
- } else if (comptime std.mem.eql(u8, fmt_str, "c")) {
+ try w.print(".{d}", .{@intFromEnum(ver)});
+ } else if (comptime std.mem.eql(u8, f, "c")) {
if (maybe_name) |name|
- try writer.print(".{s}", .{name})
+ try w.print(".{s}", .{name})
else
- try writer.print("@enumFromInt(0x{X:0>8})", .{@intFromEnum(ver)});
- } else if (fmt_str.len == 0) {
+ try w.print("@enumFromInt(0x{X:0>8})", .{@intFromEnum(ver)});
+ } else if (f.len == 0) {
if (maybe_name) |name|
- try writer.print("WindowsVersion.{s}", .{name})
+ try w.print("WindowsVersion.{s}", .{name})
else
- try writer.print("WindowsVersion(0x{X:0>8})", .{@intFromEnum(ver)});
- } else std.fmt.invalidFmtError(fmt_str, ver);
+ try w.print("WindowsVersion(0x{X:0>8})", .{@intFromEnum(ver)});
+ } else std.fmt.invalidFmtError(f, ver);
}
};
lib/std/testing.zig
@@ -105,7 +105,7 @@ fn expectEqualInner(comptime T: type, expected: T, actual: T) !void {
.error_set,
=> {
if (actual != expected) {
- print("expected {}, found {}\n", .{ expected, actual });
+ print("expected {any}, found {any}\n", .{ expected, actual });
return error.TestExpectedEqual;
}
},
@@ -267,9 +267,13 @@ test "expectEqual null" {
/// This function is intended to be used only in tests. When the formatted result of the template
/// and its arguments does not equal the expected text, it prints diagnostics to stderr to show how
-/// they are not equal, then returns an error. It depends on `expectEqualStrings()` for printing
+/// they are not equal, then returns an error. It depends on `expectEqualStrings` for printing
/// diagnostics.
pub fn expectFmt(expected: []const u8, comptime template: []const u8, args: anytype) !void {
+ if (@inComptime()) {
+ var buffer: [std.fmt.count(template, args)]u8 = undefined;
+ return expectEqualStrings(expected, try std.fmt.bufPrint(&buffer, template, args));
+ }
const actual = try std.fmt.allocPrint(allocator, template, args);
defer allocator.free(actual);
return expectEqualStrings(expected, actual);
@@ -415,7 +419,7 @@ pub fn expectEqualSlices(comptime T: type, expected: []const T, actual: []const
print("... truncated ...\n", .{});
}
}
- differ.write(stderr.writer()) catch {};
+ differ.write(stderr.deprecatedWriter()) catch {};
if (expected_truncated) {
const end_offset = window_start + expected_window.len;
const num_missing_items = expected.len - (window_start + expected_window.len);
@@ -437,7 +441,7 @@ pub fn expectEqualSlices(comptime T: type, expected: []const T, actual: []const
print("... truncated ...\n", .{});
}
}
- differ.write(stderr.writer()) catch {};
+ differ.write(stderr.deprecatedWriter()) catch {};
if (actual_truncated) {
const end_offset = window_start + actual_window.len;
const num_missing_items = actual.len - (window_start + actual_window.len);
@@ -637,6 +641,11 @@ pub fn tmpDir(opts: std.fs.Dir.OpenOptions) TmpDir {
pub fn expectEqualStrings(expected: []const u8, actual: []const u8) !void {
if (std.mem.indexOfDiff(u8, actual, expected)) |diff_index| {
+ if (@inComptime()) {
+ @compileError(std.fmt.comptimePrint("\nexpected:\n{s}\nfound:\n{s}\ndifference starts at index {d}", .{
+ expected, actual, diff_index,
+ }));
+ }
print("\n====== expected this output: =========\n", .{});
printWithVisibleNewlines(expected);
print("\n======== instead found this: =========\n", .{});
@@ -1108,7 +1117,7 @@ pub fn checkAllAllocationFailures(backing_allocator: std.mem.Allocator, comptime
const arg_i_str = comptime str: {
var str_buf: [100]u8 = undefined;
const args_i = i + 1;
- const str_len = std.fmt.formatIntBuf(&str_buf, args_i, 10, .lower, .{});
+ const str_len = std.fmt.printInt(&str_buf, args_i, 10, .lower, .{});
break :str str_buf[0..str_len];
};
@field(args, arg_i_str) = @field(extra_args, field.name);
@@ -1138,7 +1147,7 @@ pub fn checkAllAllocationFailures(backing_allocator: std.mem.Allocator, comptime
error.OutOfMemory => {
if (failing_allocator_inst.allocated_bytes != failing_allocator_inst.freed_bytes) {
print(
- "\nfail_index: {d}/{d}\nallocated bytes: {d}\nfreed bytes: {d}\nallocations: {d}\ndeallocations: {d}\nallocation that was made to fail: {}",
+ "\nfail_index: {d}/{d}\nallocated bytes: {d}\nfreed bytes: {d}\nallocations: {d}\ndeallocations: {d}\nallocation that was made to fail: {f}",
.{
fail_index,
needed_alloc_count,
lib/std/Thread.zig
@@ -167,7 +167,7 @@ pub fn setName(self: Thread, name: []const u8) SetNameError!void {
const file = try std.fs.cwd().openFile(path, .{ .mode = .write_only });
defer file.close();
- try file.writer().writeAll(name);
+ try file.deprecatedWriter().writeAll(name);
return;
},
.windows => {
@@ -281,7 +281,7 @@ pub fn getName(self: Thread, buffer_ptr: *[max_name_len:0]u8) GetNameError!?[]co
const file = try std.fs.cwd().openFile(path, .{});
defer file.close();
- const data_len = try file.reader().readAll(buffer_ptr[0 .. max_name_len + 1]);
+ const data_len = try file.deprecatedReader().readAll(buffer_ptr[0 .. max_name_len + 1]);
return if (data_len >= 1) buffer[0 .. data_len - 1] else null;
},
@@ -1163,7 +1163,7 @@ const LinuxThreadImpl = struct {
fn getCurrentId() Id {
return tls_thread_id orelse {
- const tid = @as(u32, @bitCast(linux.gettid()));
+ const tid: u32 = @bitCast(linux.gettid());
tls_thread_id = tid;
return tid;
};
lib/std/unicode.zig
@@ -9,6 +9,7 @@ const native_endian = builtin.cpu.arch.endian();
///
/// See also: https://en.wikipedia.org/wiki/Specials_(Unicode_block)#Replacement_character
pub const replacement_character: u21 = 0xFFFD;
+pub const replacement_character_utf8: [3]u8 = utf8EncodeComptime(replacement_character);
/// Returns how many bytes the UTF-8 representation would require
/// for the given codepoint.
@@ -802,14 +803,7 @@ fn testDecode(bytes: []const u8) !u21 {
/// Ill-formed UTF-8 byte sequences are replaced by the replacement character (U+FFFD)
/// according to "U+FFFD Substitution of Maximal Subparts" from Chapter 3 of
/// the Unicode standard, and as specified by https://encoding.spec.whatwg.org/#utf-8-decoder
-fn formatUtf8(
- utf8: []const u8,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
-) !void {
- _ = fmt;
- _ = options;
+fn formatUtf8(utf8: []const u8, writer: *std.io.Writer) std.io.Writer.Error!void {
var buf: [300]u8 = undefined; // just an arbitrary size
var u8len: usize = 0;
@@ -898,27 +892,27 @@ fn formatUtf8(
/// Ill-formed UTF-8 byte sequences are replaced by the replacement character (U+FFFD)
/// according to "U+FFFD Substitution of Maximal Subparts" from Chapter 3 of
/// the Unicode standard, and as specified by https://encoding.spec.whatwg.org/#utf-8-decoder
-pub fn fmtUtf8(utf8: []const u8) std.fmt.Formatter(formatUtf8) {
+pub fn fmtUtf8(utf8: []const u8) std.fmt.Formatter([]const u8, formatUtf8) {
return .{ .data = utf8 };
}
test fmtUtf8 {
const expectFmt = testing.expectFmt;
- try expectFmt("", "{}", .{fmtUtf8("")});
- try expectFmt("foo", "{}", .{fmtUtf8("foo")});
- try expectFmt("๐ท", "{}", .{fmtUtf8("๐ท")});
+ try expectFmt("", "{f}", .{fmtUtf8("")});
+ try expectFmt("foo", "{f}", .{fmtUtf8("foo")});
+ try expectFmt("๐ท", "{f}", .{fmtUtf8("๐ท")});
// Table 3-8. U+FFFD for Non-Shortest Form Sequences
- try expectFmt("๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝA", "{}", .{fmtUtf8("\xC0\xAF\xE0\x80\xBF\xF0\x81\x82A")});
+ try expectFmt("๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝA", "{f}", .{fmtUtf8("\xC0\xAF\xE0\x80\xBF\xF0\x81\x82A")});
// Table 3-9. U+FFFD for Ill-Formed Sequences for Surrogates
- try expectFmt("๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝA", "{}", .{fmtUtf8("\xED\xA0\x80\xED\xBF\xBF\xED\xAFA")});
+ try expectFmt("๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝA", "{f}", .{fmtUtf8("\xED\xA0\x80\xED\xBF\xBF\xED\xAFA")});
// Table 3-10. U+FFFD for Other Ill-Formed Sequences
- try expectFmt("๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝA๏ฟฝ๏ฟฝB", "{}", .{fmtUtf8("\xF4\x91\x92\x93\xFFA\x80\xBFB")});
+ try expectFmt("๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝA๏ฟฝ๏ฟฝB", "{f}", .{fmtUtf8("\xF4\x91\x92\x93\xFFA\x80\xBFB")});
// Table 3-11. U+FFFD for Truncated Sequences
- try expectFmt("๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝA", "{}", .{fmtUtf8("\xE1\x80\xE2\xF0\x91\x92\xF1\xBFA")});
+ try expectFmt("๏ฟฝ๏ฟฝ๏ฟฝ๏ฟฝA", "{f}", .{fmtUtf8("\xE1\x80\xE2\xF0\x91\x92\xF1\xBFA")});
}
fn utf16LeToUtf8ArrayListImpl(
@@ -1477,14 +1471,7 @@ test calcWtf16LeLen {
/// Print the given `utf16le` string, encoded as UTF-8 bytes.
/// Unpaired surrogates are replaced by the replacement character (U+FFFD).
-fn formatUtf16Le(
- utf16le: []const u16,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
-) !void {
- _ = fmt;
- _ = options;
+fn formatUtf16Le(utf16le: []const u16, writer: *std.io.Writer) std.io.Writer.Error!void {
var buf: [300]u8 = undefined; // just an arbitrary size
var it = Utf16LeIterator.init(utf16le);
var u8len: usize = 0;
@@ -1505,23 +1492,23 @@ pub const fmtUtf16le = @compileError("deprecated; renamed to fmtUtf16Le");
/// Return a Formatter for a (potentially ill-formed) UTF-16 LE string,
/// which will be converted to UTF-8 during formatting.
/// Unpaired surrogates are replaced by the replacement character (U+FFFD).
-pub fn fmtUtf16Le(utf16le: []const u16) std.fmt.Formatter(formatUtf16Le) {
+pub fn fmtUtf16Le(utf16le: []const u16) std.fmt.Formatter([]const u16, formatUtf16Le) {
return .{ .data = utf16le };
}
test fmtUtf16Le {
const expectFmt = testing.expectFmt;
- try expectFmt("", "{}", .{fmtUtf16Le(utf8ToUtf16LeStringLiteral(""))});
- try expectFmt("", "{}", .{fmtUtf16Le(wtf8ToWtf16LeStringLiteral(""))});
- try expectFmt("foo", "{}", .{fmtUtf16Le(utf8ToUtf16LeStringLiteral("foo"))});
- try expectFmt("foo", "{}", .{fmtUtf16Le(wtf8ToWtf16LeStringLiteral("foo"))});
- try expectFmt("๐ท", "{}", .{fmtUtf16Le(wtf8ToWtf16LeStringLiteral("๐ท"))});
- try expectFmt("ํฟ", "{}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\xff\xd7", native_endian)})});
- try expectFmt("๏ฟฝ", "{}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\x00\xd8", native_endian)})});
- try expectFmt("๏ฟฝ", "{}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\xff\xdb", native_endian)})});
- try expectFmt("๏ฟฝ", "{}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\x00\xdc", native_endian)})});
- try expectFmt("๏ฟฝ", "{}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\xff\xdf", native_endian)})});
- try expectFmt("๎", "{}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\x00\xe0", native_endian)})});
+ try expectFmt("", "{f}", .{fmtUtf16Le(utf8ToUtf16LeStringLiteral(""))});
+ try expectFmt("", "{f}", .{fmtUtf16Le(wtf8ToWtf16LeStringLiteral(""))});
+ try expectFmt("foo", "{f}", .{fmtUtf16Le(utf8ToUtf16LeStringLiteral("foo"))});
+ try expectFmt("foo", "{f}", .{fmtUtf16Le(wtf8ToWtf16LeStringLiteral("foo"))});
+ try expectFmt("๐ท", "{f}", .{fmtUtf16Le(wtf8ToWtf16LeStringLiteral("๐ท"))});
+ try expectFmt("ํฟ", "{f}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\xff\xd7", native_endian)})});
+ try expectFmt("๏ฟฝ", "{f}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\x00\xd8", native_endian)})});
+ try expectFmt("๏ฟฝ", "{f}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\xff\xdb", native_endian)})});
+ try expectFmt("๏ฟฝ", "{f}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\x00\xdc", native_endian)})});
+ try expectFmt("๏ฟฝ", "{f}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\xff\xdf", native_endian)})});
+ try expectFmt("๎", "{f}", .{fmtUtf16Le(&[_]u16{mem.readInt(u16, "\x00\xe0", native_endian)})});
}
fn testUtf8ToUtf16LeStringLiteral(utf8ToUtf16LeStringLiteral_: anytype) !void {
lib/std/Uri.zig
@@ -34,27 +34,22 @@ pub const Component = union(enum) {
return switch (component) {
.raw => |raw| raw,
.percent_encoded => |percent_encoded| if (std.mem.indexOfScalar(u8, percent_encoded, '%')) |_|
- try std.fmt.allocPrint(arena, "{raw}", .{component})
+ try std.fmt.allocPrint(arena, "{fraw}", .{component})
else
percent_encoded,
};
}
- pub fn format(
- component: Component,
- comptime fmt_str: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) @TypeOf(writer).Error!void {
+ pub fn format(component: Component, w: *std.io.Writer, comptime fmt_str: []const u8) std.io.Writer.Error!void {
if (fmt_str.len == 0) {
- try writer.print("std.Uri.Component{{ .{s} = \"{}\" }}", .{
+ try w.print("std.Uri.Component{{ .{s} = \"{f}\" }}", .{
@tagName(component),
- std.zig.fmtEscapes(switch (component) {
+ std.zig.fmtString(switch (component) {
.raw, .percent_encoded => |string| string,
}),
});
} else if (comptime std.mem.eql(u8, fmt_str, "raw")) switch (component) {
- .raw => |raw| try writer.writeAll(raw),
+ .raw => |raw| try w.writeAll(raw),
.percent_encoded => |percent_encoded| {
var start: usize = 0;
var index: usize = 0;
@@ -63,51 +58,47 @@ pub const Component = union(enum) {
if (percent_encoded.len - index < 2) continue;
const percent_encoded_char =
std.fmt.parseInt(u8, percent_encoded[index..][0..2], 16) catch continue;
- try writer.print("{s}{c}", .{
+ try w.print("{s}{c}", .{
percent_encoded[start..percent],
percent_encoded_char,
});
start = percent + 3;
index = percent + 3;
}
- try writer.writeAll(percent_encoded[start..]);
+ try w.writeAll(percent_encoded[start..]);
},
} else if (comptime std.mem.eql(u8, fmt_str, "%")) switch (component) {
- .raw => |raw| try percentEncode(writer, raw, isUnreserved),
- .percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded),
+ .raw => |raw| try percentEncode(w, raw, isUnreserved),
+ .percent_encoded => |percent_encoded| try w.writeAll(percent_encoded),
} else if (comptime std.mem.eql(u8, fmt_str, "user")) switch (component) {
- .raw => |raw| try percentEncode(writer, raw, isUserChar),
- .percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded),
+ .raw => |raw| try percentEncode(w, raw, isUserChar),
+ .percent_encoded => |percent_encoded| try w.writeAll(percent_encoded),
} else if (comptime std.mem.eql(u8, fmt_str, "password")) switch (component) {
- .raw => |raw| try percentEncode(writer, raw, isPasswordChar),
- .percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded),
+ .raw => |raw| try percentEncode(w, raw, isPasswordChar),
+ .percent_encoded => |percent_encoded| try w.writeAll(percent_encoded),
} else if (comptime std.mem.eql(u8, fmt_str, "host")) switch (component) {
- .raw => |raw| try percentEncode(writer, raw, isHostChar),
- .percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded),
+ .raw => |raw| try percentEncode(w, raw, isHostChar),
+ .percent_encoded => |percent_encoded| try w.writeAll(percent_encoded),
} else if (comptime std.mem.eql(u8, fmt_str, "path")) switch (component) {
- .raw => |raw| try percentEncode(writer, raw, isPathChar),
- .percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded),
+ .raw => |raw| try percentEncode(w, raw, isPathChar),
+ .percent_encoded => |percent_encoded| try w.writeAll(percent_encoded),
} else if (comptime std.mem.eql(u8, fmt_str, "query")) switch (component) {
- .raw => |raw| try percentEncode(writer, raw, isQueryChar),
- .percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded),
+ .raw => |raw| try percentEncode(w, raw, isQueryChar),
+ .percent_encoded => |percent_encoded| try w.writeAll(percent_encoded),
} else if (comptime std.mem.eql(u8, fmt_str, "fragment")) switch (component) {
- .raw => |raw| try percentEncode(writer, raw, isFragmentChar),
- .percent_encoded => |percent_encoded| try writer.writeAll(percent_encoded),
+ .raw => |raw| try percentEncode(w, raw, isFragmentChar),
+ .percent_encoded => |percent_encoded| try w.writeAll(percent_encoded),
} else @compileError("invalid format string '" ++ fmt_str ++ "'");
}
- pub fn percentEncode(
- writer: anytype,
- raw: []const u8,
- comptime isValidChar: fn (u8) bool,
- ) @TypeOf(writer).Error!void {
+ pub fn percentEncode(w: *std.io.Writer, raw: []const u8, comptime isValidChar: fn (u8) bool) std.io.Writer.Error!void {
var start: usize = 0;
for (raw, 0..) |char, index| {
if (isValidChar(char)) continue;
- try writer.print("{s}%{X:0>2}", .{ raw[start..index], char });
+ try w.print("{s}%{X:0>2}", .{ raw[start..index], char });
start = index + 1;
}
- try writer.writeAll(raw[start..]);
+ try w.writeAll(raw[start..]);
}
};
@@ -247,11 +238,7 @@ pub const WriteToStreamOptions = struct {
port: bool = true,
};
-pub fn writeToStream(
- uri: Uri,
- options: WriteToStreamOptions,
- writer: anytype,
-) @TypeOf(writer).Error!void {
+pub fn writeToStream(uri: Uri, writer: *std.io.Writer, options: WriteToStreamOptions) std.io.Writer.Error!void {
if (options.scheme) {
try writer.print("{s}:", .{uri.scheme});
if (options.authority and uri.host != null) {
@@ -261,39 +248,34 @@ pub fn writeToStream(
if (options.authority) {
if (options.authentication and uri.host != null) {
if (uri.user) |user| {
- try writer.print("{user}", .{user});
+ try writer.print("{fuser}", .{user});
if (uri.password) |password| {
- try writer.print(":{password}", .{password});
+ try writer.print(":{fpassword}", .{password});
}
try writer.writeByte('@');
}
}
if (uri.host) |host| {
- try writer.print("{host}", .{host});
+ try writer.print("{fhost}", .{host});
if (options.port) {
if (uri.port) |port| try writer.print(":{d}", .{port});
}
}
}
if (options.path) {
- try writer.print("{path}", .{
+ try writer.print("{fpath}", .{
if (uri.path.isEmpty()) Uri.Component{ .percent_encoded = "/" } else uri.path,
});
if (options.query) {
- if (uri.query) |query| try writer.print("?{query}", .{query});
+ if (uri.query) |query| try writer.print("?{fquery}", .{query});
}
if (options.fragment) {
- if (uri.fragment) |fragment| try writer.print("#{fragment}", .{fragment});
+ if (uri.fragment) |fragment| try writer.print("#{ffragment}", .{fragment});
}
}
}
-pub fn format(
- uri: Uri,
- comptime fmt_str: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
-) @TypeOf(writer).Error!void {
+pub fn format(uri: Uri, writer: *std.io.Writer, comptime fmt_str: []const u8) std.io.Writer.Error!void {
const scheme = comptime std.mem.indexOfScalar(u8, fmt_str, ';') != null or fmt_str.len == 0;
const authentication = comptime std.mem.indexOfScalar(u8, fmt_str, '@') != null or fmt_str.len == 0;
const authority = comptime std.mem.indexOfScalar(u8, fmt_str, '+') != null or fmt_str.len == 0;
@@ -301,14 +283,14 @@ pub fn format(
const query = comptime std.mem.indexOfScalar(u8, fmt_str, '?') != null or fmt_str.len == 0;
const fragment = comptime std.mem.indexOfScalar(u8, fmt_str, '#') != null or fmt_str.len == 0;
- return writeToStream(uri, .{
+ return writeToStream(uri, writer, .{
.scheme = scheme,
.authentication = authentication,
.authority = authority,
.path = path,
.query = query,
.fragment = fragment,
- }, writer);
+ });
}
/// Parses the URI or returns an error.
@@ -447,7 +429,7 @@ test remove_dot_segments {
fn merge_paths(base: Component, new: []u8, aux_buf: *[]u8) error{NoSpaceLeft}!Component {
var aux = std.io.fixedBufferStream(aux_buf.*);
if (!base.isEmpty()) {
- try aux.writer().print("{path}", .{base});
+ try aux.writer().print("{fpath}", .{base});
aux.pos = std.mem.lastIndexOfScalar(u8, aux.getWritten(), '/') orelse
return remove_dot_segments(new);
}
@@ -812,7 +794,7 @@ test "Special test" {
test "URI percent encoding" {
try std.testing.expectFmt(
"%5C%C3%B6%2F%20%C3%A4%C3%B6%C3%9F%20~~.adas-https%3A%2F%2Fcanvas%3A123%2F%23ads%26%26sad",
- "{%}",
+ "{f%}",
.{Component{ .raw = "\\รถ/ รครถร ~~.adas-https://canvas:123/#ads&&sad" }},
);
}
@@ -822,7 +804,7 @@ test "URI percent decoding" {
const expected = "\\รถ/ รครถร ~~.adas-https://canvas:123/#ads&&sad";
var input = "%5C%C3%B6%2F%20%C3%A4%C3%B6%C3%9F%20~~.adas-https%3A%2F%2Fcanvas%3A123%2F%23ads%26%26sad".*;
- try std.testing.expectFmt(expected, "{raw}", .{Component{ .percent_encoded = &input }});
+ try std.testing.expectFmt(expected, "{fraw}", .{Component{ .percent_encoded = &input }});
var output: [expected.len]u8 = undefined;
try std.testing.expectEqualStrings(percentDecodeBackwards(&output, &input), expected);
@@ -834,7 +816,7 @@ test "URI percent decoding" {
const expected = "/abc%";
var input = expected.*;
- try std.testing.expectFmt(expected, "{raw}", .{Component{ .percent_encoded = &input }});
+ try std.testing.expectFmt(expected, "{fraw}", .{Component{ .percent_encoded = &input }});
var output: [expected.len]u8 = undefined;
try std.testing.expectEqualStrings(percentDecodeBackwards(&output, &input), expected);
@@ -848,7 +830,7 @@ test "URI query encoding" {
const parsed = try Uri.parse(address);
// format the URI to percent encode it
- try std.testing.expectFmt("/?response-content-type=application%2Foctet-stream", "{/?}", .{parsed});
+ try std.testing.expectFmt("/?response-content-type=application%2Foctet-stream", "{f/?}", .{parsed});
}
test "format" {
@@ -862,7 +844,7 @@ test "format" {
.query = null,
.fragment = null,
};
- try std.testing.expectFmt("file:/foo/bar/baz", "{;/?#}", .{uri});
+ try std.testing.expectFmt("file:/foo/bar/baz", "{f;/?#}", .{uri});
}
test "URI malformed input" {
lib/std/zig.zig
@@ -363,149 +363,136 @@ const Allocator = std.mem.Allocator;
/// Return a Formatter for a Zig identifier, escaping it with `@""` syntax if needed.
///
-/// - An empty `{}` format specifier escapes invalid identifiers, identifiers that shadow primitives
-/// and the reserved `_` identifier.
-/// - Add `p` to the specifier to render identifiers that shadow primitives unescaped.
-/// - Add `_` to the specifier to render the reserved `_` identifier unescaped.
-/// - `p` and `_` can be combined, e.g. `{p_}`.
+/// See also `fmtIdFlags`.
+pub fn fmtId(bytes: []const u8) std.fmt.Formatter(FormatId, FormatId.render) {
+ return .{ .data = .{ .bytes = bytes, .flags = .{} } };
+}
+
+/// Return a Formatter for a Zig identifier, escaping it with `@""` syntax if needed.
///
-pub fn fmtId(bytes: []const u8) std.fmt.Formatter(formatId) {
- return .{ .data = bytes };
+/// See also `fmtId`.
+pub fn fmtIdFlags(bytes: []const u8, flags: FormatId.Flags) std.fmt.Formatter(FormatId, FormatId.render) {
+ return .{ .data = .{ .bytes = bytes, .flags = flags } };
+}
+
+pub fn fmtIdPU(bytes: []const u8) std.fmt.Formatter(FormatId, FormatId.render) {
+ return .{ .data = .{ .bytes = bytes, .flags = .{ .allow_primitive = true, .allow_underscore = true } } };
+}
+
+pub fn fmtIdP(bytes: []const u8) std.fmt.Formatter(FormatId, FormatId.render) {
+ return .{ .data = .{ .bytes = bytes, .flags = .{ .allow_primitive = true } } };
}
test fmtId {
const expectFmt = std.testing.expectFmt;
- try expectFmt("@\"while\"", "{}", .{fmtId("while")});
- try expectFmt("@\"while\"", "{p}", .{fmtId("while")});
- try expectFmt("@\"while\"", "{_}", .{fmtId("while")});
- try expectFmt("@\"while\"", "{p_}", .{fmtId("while")});
- try expectFmt("@\"while\"", "{_p}", .{fmtId("while")});
-
- try expectFmt("hello", "{}", .{fmtId("hello")});
- try expectFmt("hello", "{p}", .{fmtId("hello")});
- try expectFmt("hello", "{_}", .{fmtId("hello")});
- try expectFmt("hello", "{p_}", .{fmtId("hello")});
- try expectFmt("hello", "{_p}", .{fmtId("hello")});
-
- try expectFmt("@\"type\"", "{}", .{fmtId("type")});
- try expectFmt("type", "{p}", .{fmtId("type")});
- try expectFmt("@\"type\"", "{_}", .{fmtId("type")});
- try expectFmt("type", "{p_}", .{fmtId("type")});
- try expectFmt("type", "{_p}", .{fmtId("type")});
-
- try expectFmt("@\"_\"", "{}", .{fmtId("_")});
- try expectFmt("@\"_\"", "{p}", .{fmtId("_")});
- try expectFmt("_", "{_}", .{fmtId("_")});
- try expectFmt("_", "{p_}", .{fmtId("_")});
- try expectFmt("_", "{_p}", .{fmtId("_")});
-
- try expectFmt("@\"i123\"", "{}", .{fmtId("i123")});
- try expectFmt("i123", "{p}", .{fmtId("i123")});
- try expectFmt("@\"4four\"", "{}", .{fmtId("4four")});
- try expectFmt("_underscore", "{}", .{fmtId("_underscore")});
- try expectFmt("@\"11\\\"23\"", "{}", .{fmtId("11\"23")});
- try expectFmt("@\"11\\x0f23\"", "{}", .{fmtId("11\x0F23")});
+ try expectFmt("@\"while\"", "{f}", .{fmtId("while")});
+ try expectFmt("@\"while\"", "{f}", .{fmtIdFlags("while", .{ .allow_primitive = true })});
+ try expectFmt("@\"while\"", "{f}", .{fmtIdFlags("while", .{ .allow_underscore = true })});
+ try expectFmt("@\"while\"", "{f}", .{fmtIdFlags("while", .{ .allow_primitive = true, .allow_underscore = true })});
+
+ try expectFmt("hello", "{f}", .{fmtId("hello")});
+ try expectFmt("hello", "{f}", .{fmtIdFlags("hello", .{ .allow_primitive = true })});
+ try expectFmt("hello", "{f}", .{fmtIdFlags("hello", .{ .allow_underscore = true })});
+ try expectFmt("hello", "{f}", .{fmtIdFlags("hello", .{ .allow_primitive = true, .allow_underscore = true })});
+
+ try expectFmt("@\"type\"", "{f}", .{fmtId("type")});
+ try expectFmt("type", "{f}", .{fmtIdFlags("type", .{ .allow_primitive = true })});
+ try expectFmt("@\"type\"", "{f}", .{fmtIdFlags("type", .{ .allow_underscore = true })});
+ try expectFmt("type", "{f}", .{fmtIdFlags("type", .{ .allow_primitive = true, .allow_underscore = true })});
+
+ try expectFmt("@\"_\"", "{f}", .{fmtId("_")});
+ try expectFmt("@\"_\"", "{f}", .{fmtIdFlags("_", .{ .allow_primitive = true })});
+ try expectFmt("_", "{f}", .{fmtIdFlags("_", .{ .allow_underscore = true })});
+ try expectFmt("_", "{f}", .{fmtIdFlags("_", .{ .allow_primitive = true, .allow_underscore = true })});
+
+ try expectFmt("@\"i123\"", "{f}", .{fmtId("i123")});
+ try expectFmt("i123", "{f}", .{fmtIdFlags("i123", .{ .allow_primitive = true })});
+ try expectFmt("@\"4four\"", "{f}", .{fmtId("4four")});
+ try expectFmt("_underscore", "{f}", .{fmtId("_underscore")});
+ try expectFmt("@\"11\\\"23\"", "{f}", .{fmtId("11\"23")});
+ try expectFmt("@\"11\\x0f23\"", "{f}", .{fmtId("11\x0F23")});
// These are technically not currently legal in Zig.
- try expectFmt("@\"\"", "{}", .{fmtId("")});
- try expectFmt("@\"\\x00\"", "{}", .{fmtId("\x00")});
+ try expectFmt("@\"\"", "{f}", .{fmtId("")});
+ try expectFmt("@\"\\x00\"", "{f}", .{fmtId("\x00")});
}
-/// Print the string as a Zig identifier, escaping it with `@""` syntax if needed.
-fn formatId(
+pub const FormatId = struct {
bytes: []const u8,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
-) !void {
- const allow_primitive, const allow_underscore = comptime parse_fmt: {
- var allow_primitive = false;
- var allow_underscore = false;
- for (fmt) |char| {
- switch (char) {
- 'p' => if (!allow_primitive) {
- allow_primitive = true;
- continue;
- },
- '_' => if (!allow_underscore) {
- allow_underscore = true;
- continue;
- },
- else => {},
- }
- @compileError("expected {}, {p}, {_}, {p_} or {_p}, found {" ++ fmt ++ "}");
- }
- break :parse_fmt .{ allow_primitive, allow_underscore };
+ flags: Flags,
+ pub const Flags = struct {
+ allow_primitive: bool = false,
+ allow_underscore: bool = false,
};
- if (isValidId(bytes) and
- (allow_primitive or !std.zig.isPrimitive(bytes)) and
- (allow_underscore or !isUnderscore(bytes)))
- {
- return writer.writeAll(bytes);
+ /// Print the string as a Zig identifier, escaping it with `@""` syntax if needed.
+ fn render(ctx: FormatId, writer: *std.io.Writer) std.io.Writer.Error!void {
+ const bytes = ctx.bytes;
+ if (isValidId(bytes) and
+ (ctx.flags.allow_primitive or !std.zig.isPrimitive(bytes)) and
+ (ctx.flags.allow_underscore or !isUnderscore(bytes)))
+ {
+ return writer.writeAll(bytes);
+ }
+ try writer.writeAll("@\"");
+ try stringEscape(bytes, writer);
+ try writer.writeByte('"');
}
- try writer.writeAll("@\"");
- try stringEscape(bytes, "", options, writer);
- try writer.writeByte('"');
+};
+
+/// Return a formatter for escaping a double quoted Zig string.
+pub fn fmtString(bytes: []const u8) std.fmt.Formatter([]const u8, stringEscape) {
+ return .{ .data = bytes };
}
-/// Return a Formatter for Zig Escapes of a double quoted string.
-/// The format specifier must be one of:
-/// * `{}` treats contents as a double-quoted string.
-/// * `{'}` treats contents as a single-quoted string.
-pub fn fmtEscapes(bytes: []const u8) std.fmt.Formatter(stringEscape) {
+/// Return a formatter for escaping a single quoted Zig string.
+pub fn fmtChar(bytes: []const u8) std.fmt.Formatter([]const u8, charEscape) {
return .{ .data = bytes };
}
-test fmtEscapes {
- const expectFmt = std.testing.expectFmt;
- try expectFmt("\\x0f", "{}", .{fmtEscapes("\x0f")});
- try expectFmt(
- \\" \\ hi \x07 \x11 " derp \'"
- , "\"{'}\"", .{fmtEscapes(" \\ hi \x07 \x11 \" derp '")});
- try expectFmt(
+test fmtString {
+ try std.testing.expectFmt("\\x0f", "{f}", .{fmtString("\x0f")});
+ try std.testing.expectFmt(
\\" \\ hi \x07 \x11 \" derp '"
- , "\"{}\"", .{fmtEscapes(" \\ hi \x07 \x11 \" derp '")});
+ , "\"{f}\"", .{fmtString(" \\ hi \x07 \x11 \" derp '")});
}
-/// Print the string as escaped contents of a double quoted or single-quoted string.
-/// Format `{}` treats contents as a double-quoted string.
-/// Format `{'}` treats contents as a single-quoted string.
-pub fn stringEscape(
- bytes: []const u8,
- comptime f: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
-) !void {
- _ = options;
+test fmtChar {
+ try std.testing.expectFmt(
+ \\" \\ hi \x07 \x11 " derp \'"
+ , "\"{f}\"", .{fmtChar(" \\ hi \x07 \x11 \" derp '")});
+}
+
+/// Print the string as escaped contents of a double quoted string.
+pub fn stringEscape(bytes: []const u8, w: *std.io.Writer) std.io.Writer.Error!void {
for (bytes) |byte| switch (byte) {
- '\n' => try writer.writeAll("\\n"),
- '\r' => try writer.writeAll("\\r"),
- '\t' => try writer.writeAll("\\t"),
- '\\' => try writer.writeAll("\\\\"),
- '"' => {
- if (f.len == 1 and f[0] == '\'') {
- try writer.writeByte('"');
- } else if (f.len == 0) {
- try writer.writeAll("\\\"");
- } else {
- @compileError("expected {} or {'}, found {" ++ f ++ "}");
- }
- },
- '\'' => {
- if (f.len == 1 and f[0] == '\'') {
- try writer.writeAll("\\'");
- } else if (f.len == 0) {
- try writer.writeByte('\'');
- } else {
- @compileError("expected {} or {'}, found {" ++ f ++ "}");
- }
+ '\n' => try w.writeAll("\\n"),
+ '\r' => try w.writeAll("\\r"),
+ '\t' => try w.writeAll("\\t"),
+ '\\' => try w.writeAll("\\\\"),
+ '"' => try w.writeAll("\\\""),
+ '\'' => try w.writeByte('\''),
+ ' ', '!', '#'...'&', '('...'[', ']'...'~' => try w.writeByte(byte),
+ else => {
+ try w.writeAll("\\x");
+ try w.printIntOptions(byte, 16, .lower, .{ .width = 2, .fill = '0' });
},
- ' ', '!', '#'...'&', '('...'[', ']'...'~' => try writer.writeByte(byte),
- // Use hex escapes for rest any unprintable characters.
+ };
+}
+
+/// Print the string as escaped contents of a single-quoted string.
+pub fn charEscape(bytes: []const u8, w: *std.io.Writer) std.io.Writer.Error!void {
+ for (bytes) |byte| switch (byte) {
+ '\n' => try w.writeAll("\\n"),
+ '\r' => try w.writeAll("\\r"),
+ '\t' => try w.writeAll("\\t"),
+ '\\' => try w.writeAll("\\\\"),
+ '"' => try w.writeByte('"'),
+ '\'' => try w.writeAll("\\'"),
+ ' ', '!', '#'...'&', '('...'[', ']'...'~' => try w.writeByte(byte),
else => {
- try writer.writeAll("\\x");
- try std.fmt.formatInt(byte, 16, .lower, .{ .width = 2, .fill = '0' }, writer);
+ try w.writeAll("\\x");
+ try w.printIntOptions(byte, 16, .lower, .{ .width = 2, .fill = '0' });
},
};
}
lib/std/zip.zig
@@ -557,7 +557,7 @@ pub fn Iterator(comptime SeekableStream: type) type {
self.compression_method,
self.uncompressed_size,
limited_reader.reader(),
- out_file.writer(),
+ out_file.deprecatedWriter(),
);
if (limited_reader.bytes_left != 0)
return error.ZipDecompressTruncated;
lib/ubsan_rt.zig
@@ -119,12 +119,7 @@ const Value = extern struct {
}
}
- pub fn format(
- value: Value,
- comptime fmt: []const u8,
- _: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
+ pub fn format(value: Value, writer: *std.io.Writer, comptime fmt: []const u8) std.io.Writer.Error!void {
comptime assert(fmt.len == 0);
// Work around x86_64 backend limitation.
@@ -136,12 +131,12 @@ const Value = extern struct {
switch (value.td.kind) {
.integer => {
if (value.td.isSigned()) {
- try writer.print("{}", .{value.getSignedInteger()});
+ try writer.print("{d}", .{value.getSignedInteger()});
} else {
- try writer.print("{}", .{value.getUnsignedInteger()});
+ try writer.print("{d}", .{value.getUnsignedInteger()});
}
},
- .float => try writer.print("{}", .{value.getFloat()}),
+ .float => try writer.print("{d}", .{value.getFloat()}),
.unknown => try writer.writeAll("(unknown)"),
}
}
@@ -172,17 +167,12 @@ fn overflowHandler(
) callconv(.c) noreturn {
const lhs: Value = .{ .handle = lhs_handle, .td = data.td };
const rhs: Value = .{ .handle = rhs_handle, .td = data.td };
-
- const is_signed = data.td.isSigned();
- const fmt = "{s} integer overflow: " ++ "{} " ++
- operator ++ " {} cannot be represented in type {s}";
-
- panic(@returnAddress(), fmt, .{
- if (is_signed) "signed" else "unsigned",
- lhs,
- rhs,
- data.td.getName(),
- });
+ const signed_str = if (data.td.isSigned()) "signed" else "unsigned";
+ panic(
+ @returnAddress(),
+ "{s} integer overflow: {f} " ++ operator ++ " {f} cannot be represented in type {s}",
+ .{ signed_str, lhs, rhs, data.td.getName() },
+ );
}
};
@@ -201,11 +191,9 @@ fn negationHandler(
value_handle: ValueHandle,
) callconv(.c) noreturn {
const value: Value = .{ .handle = value_handle, .td = data.td };
- panic(
- @returnAddress(),
- "negation of {} cannot be represented in type {s}",
- .{ value, data.td.getName() },
- );
+ panic(@returnAddress(), "negation of {f} cannot be represented in type {s}", .{
+ value, data.td.getName(),
+ });
}
fn divRemHandlerAbort(
@@ -225,11 +213,9 @@ fn divRemHandler(
const rhs: Value = .{ .handle = rhs_handle, .td = data.td };
if (rhs.isMinusOne()) {
- panic(
- @returnAddress(),
- "division of {} by -1 cannot be represented in type {s}",
- .{ lhs, data.td.getName() },
- );
+ panic(@returnAddress(), "division of {f} by -1 cannot be represented in type {s}", .{
+ lhs, data.td.getName(),
+ });
} else panic(@returnAddress(), "division by zero", .{});
}
@@ -269,8 +255,8 @@ fn alignmentAssumptionHandler(
if (maybe_offset) |offset| {
panic(
@returnAddress(),
- "assumption of {} byte alignment (with offset of {} byte) for pointer of type {s} failed\n" ++
- "offset address is {} aligned, misalignment offset is {} bytes",
+ "assumption of {f} byte alignment (with offset of {d} byte) for pointer of type {s} failed\n" ++
+ "offset address is {d} aligned, misalignment offset is {d} bytes",
.{
alignment,
@intFromPtr(offset),
@@ -282,8 +268,8 @@ fn alignmentAssumptionHandler(
} else {
panic(
@returnAddress(),
- "assumption of {} byte alignment for pointer of type {s} failed\n" ++
- "address is {} aligned, misalignment offset is {} bytes",
+ "assumption of {f} byte alignment for pointer of type {s} failed\n" ++
+ "address is {d} aligned, misalignment offset is {d} bytes",
.{
alignment,
data.td.getName(),
@@ -320,21 +306,21 @@ fn shiftOob(
rhs.getPositiveInteger() >= data.lhs_type.getIntegerSize())
{
if (rhs.isNegative()) {
- panic(@returnAddress(), "shift exponent {} is negative", .{rhs});
+ panic(@returnAddress(), "shift exponent {f} is negative", .{rhs});
} else {
panic(
@returnAddress(),
- "shift exponent {} is too large for {}-bit type {s}",
+ "shift exponent {f} is too large for {d}-bit type {s}",
.{ rhs, data.lhs_type.getIntegerSize(), data.lhs_type.getName() },
);
}
} else {
if (lhs.isNegative()) {
- panic(@returnAddress(), "left shift of negative value {}", .{lhs});
+ panic(@returnAddress(), "left shift of negative value {f}", .{lhs});
} else {
panic(
@returnAddress(),
- "left shift of {} by {} places cannot be represented in type {s}",
+ "left shift of {f} by {f} places cannot be represented in type {s}",
.{ lhs, rhs, data.lhs_type.getName() },
);
}
@@ -359,11 +345,10 @@ fn outOfBounds(
index_handle: ValueHandle,
) callconv(.c) noreturn {
const index: Value = .{ .handle = index_handle, .td = data.index_type };
- panic(
- @returnAddress(),
- "index {} out of bounds for type {s}",
- .{ index, data.array_type.getName() },
- );
+ panic(@returnAddress(), "index {f} out of bounds for type {s}", .{
+ index,
+ data.array_type.getName(),
+ });
}
const PointerOverflowData = extern struct {
@@ -387,7 +372,7 @@ fn pointerOverflow(
if (result == 0) {
panic(@returnAddress(), "applying zero offset to null pointer", .{});
} else {
- panic(@returnAddress(), "applying non-zero offset {} to null pointer", .{result});
+ panic(@returnAddress(), "applying non-zero offset {d} to null pointer", .{result});
}
} else {
if (result == 0) {
@@ -483,7 +468,7 @@ fn typeMismatch(
} else if (!std.mem.isAligned(handle, alignment)) {
panic(
@returnAddress(),
- "{s} misaligned address 0x{x} for type {s}, which requires {} byte alignment",
+ "{s} misaligned address 0x{x} for type {s}, which requires {d} byte alignment",
.{ data.kind.getName(), handle, data.td.getName(), alignment },
);
} else {
@@ -531,7 +516,7 @@ fn nonNullArgAbort(data: *const NonNullArgData) callconv(.c) noreturn {
fn nonNullArg(data: *const NonNullArgData) callconv(.c) noreturn {
panic(
@returnAddress(),
- "null pointer passed as argument {}, which is declared to never be null",
+ "null pointer passed as argument {d}, which is declared to never be null",
.{data.arg_index},
);
}
@@ -553,11 +538,9 @@ fn loadInvalidValue(
value_handle: ValueHandle,
) callconv(.c) noreturn {
const value: Value = .{ .handle = value_handle, .td = data.td };
- panic(
- @returnAddress(),
- "load of value {}, which is not valid for type {s}",
- .{ value, data.td.getName() },
- );
+ panic(@returnAddress(), "load of value {f}, which is not valid for type {s}", .{
+ value, data.td.getName(),
+ });
}
const InvalidBuiltinData = extern struct {
@@ -596,11 +579,7 @@ fn vlaBoundNotPositive(
bound_handle: ValueHandle,
) callconv(.c) noreturn {
const bound: Value = .{ .handle = bound_handle, .td = data.td };
- panic(
- @returnAddress(),
- "variable length array bound evaluates to non-positive value {}",
- .{bound},
- );
+ panic(@returnAddress(), "variable length array bound evaluates to non-positive value {f}", .{bound});
}
const FloatCastOverflowData = extern struct {
@@ -631,13 +610,13 @@ fn floatCastOverflow(
if (@as(u16, ptr[0]) + @as(u16, ptr[1]) < 2 or ptr[0] == 0xFF or ptr[1] == 0xFF) {
const data: *const FloatCastOverflowData = @ptrCast(data_handle);
const from_value: Value = .{ .handle = from_handle, .td = data.from };
- panic(@returnAddress(), "{} is outside the range of representable values of type {s}", .{
+ panic(@returnAddress(), "{f} is outside the range of representable values of type {s}", .{
from_value, data.to.getName(),
});
} else {
const data: *const FloatCastOverflowDataV2 = @ptrCast(data_handle);
const from_value: Value = .{ .handle = from_handle, .td = data.from };
- panic(@returnAddress(), "{} is outside the range of representable values of type {s}", .{
+ panic(@returnAddress(), "{f} is outside the range of representable values of type {s}", .{
from_value, data.to.getName(),
});
}
src/Air/print.zig
@@ -73,11 +73,11 @@ pub fn writeInst(
}
pub fn dump(air: Air, pt: Zcu.PerThread, liveness: ?Air.Liveness) void {
- air.write(std.fs.File.stderr().writer(), pt, liveness);
+ air.write(std.fs.File.stderr().deprecatedWriter(), pt, liveness);
}
pub fn dumpInst(air: Air, inst: Air.Inst.Index, pt: Zcu.PerThread, liveness: ?Air.Liveness) void {
- air.writeInst(std.fs.File.stderr().writer(), inst, pt, liveness);
+ air.writeInst(std.fs.File.stderr().deprecatedWriter(), inst, pt, liveness);
}
const Writer = struct {
@@ -704,7 +704,7 @@ const Writer = struct {
}
}
const asm_source = std.mem.sliceAsBytes(w.air.extra.items[extra_i..])[0..extra.data.source_len];
- try s.print(", \"{}\"", .{std.zig.fmtEscapes(asm_source)});
+ try s.print(", \"{f}\"", .{std.zig.fmtString(asm_source)});
}
fn writeDbgStmt(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void {
@@ -716,7 +716,7 @@ const Writer = struct {
const pl_op = w.air.instructions.items(.data)[@intFromEnum(inst)].pl_op;
try w.writeOperand(s, inst, 0, pl_op.operand);
const name: Air.NullTerminatedString = @enumFromInt(pl_op.payload);
- try s.print(", \"{}\"", .{std.zig.fmtEscapes(name.toSlice(w.air))});
+ try s.print(", \"{f}\"", .{std.zig.fmtString(name.toSlice(w.air))});
}
fn writeCall(w: *Writer, s: anytype, inst: Air.Inst.Index) @TypeOf(s).Error!void {
src/arch/x86_64/encoder.zig
@@ -1205,9 +1205,9 @@ pub const Vex = struct {
fn expectEqualHexStrings(expected: []const u8, given: []const u8, assembly: []const u8) !void {
assert(expected.len > 0);
if (std.mem.eql(u8, expected, given)) return;
- const expected_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{std.fmt.fmtSliceHexLower(expected)});
+ const expected_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{expected});
defer testing.allocator.free(expected_fmt);
- const given_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{std.fmt.fmtSliceHexLower(given)});
+ const given_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{given});
defer testing.allocator.free(given_fmt);
const idx = std.mem.indexOfDiff(u8, expected_fmt, given_fmt).?;
const padding = try testing.allocator.alloc(u8, idx + 5);
src/arch/x86_64/Encoding.zig
@@ -187,7 +187,7 @@ pub fn format(
},
}
- try writer.print(".{}", .{std.fmt.fmtSliceHexUpper(opc[0 .. opc.len - 1])});
+ try writer.print(".{X}", .{opc[0 .. opc.len - 1]});
opc = opc[opc.len - 1 ..];
try writer.writeAll(".W");
src/codegen/llvm.zig
@@ -2486,7 +2486,7 @@ pub const Object = struct {
var union_name_buf: ?[:0]const u8 = null;
defer if (union_name_buf) |buf| gpa.free(buf);
const union_name = if (layout.tag_size == 0) name else name: {
- union_name_buf = try std.fmt.allocPrintZ(gpa, "{s}:Payload", .{name});
+ union_name_buf = try std.fmt.allocPrintSentinel(gpa, "{s}:Payload", .{name}, 0);
break :name union_name_buf.?;
};
src/libs/libtsan.zig
@@ -268,7 +268,7 @@ pub fn buildTsan(comp: *Compilation, prog_node: std.Progress.Node) BuildError!vo
const skip_linker_dependencies = !target.os.tag.isDarwin();
const linker_allow_shlib_undefined = target.os.tag.isDarwin();
const install_name = if (target.os.tag.isDarwin())
- try std.fmt.allocPrintZ(arena, "@rpath/{s}", .{basename})
+ try std.fmt.allocPrintSentinel(arena, "@rpath/{s}", .{basename}, 0)
else
null;
// Workaround for https://github.com/llvm/llvm-project/issues/97627
src/libs/mingw.zig
@@ -306,7 +306,7 @@ pub fn buildImportLib(comp: *Compilation, lib_name: []const u8) !void {
if (comp.verbose_cc) print: {
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
nosuspend stderr.print("def file: {s}\n", .{def_file_path}) catch break :print;
nosuspend stderr.print("include dir: {s}\n", .{include_dir}) catch break :print;
nosuspend stderr.print("output path: {s}\n", .{def_final_path}) catch break :print;
@@ -335,7 +335,7 @@ pub fn buildImportLib(comp: *Compilation, lib_name: []const u8) !void {
// new scope to ensure definition file is written before passing the path to WriteImportLibrary
const def_final_file = try o_dir.createFile(final_def_basename, .{ .truncate = true });
defer def_final_file.close();
- try pp.prettyPrintTokens(def_final_file.writer(), .result_only);
+ try pp.prettyPrintTokens(def_final_file.deprecatedWriter(), .result_only);
}
const lib_final_path = try std.fs.path.join(gpa, &.{ "o", &digest, final_lib_basename });
src/link/Elf/Archive.zig
@@ -44,8 +44,8 @@ pub fn parse(
pos += @sizeOf(elf.ar_hdr);
if (!mem.eql(u8, &hdr.ar_fmag, elf.ARFMAG)) {
- return diags.failParse(path, "invalid archive header delimiter: {s}", .{
- std.fmt.fmtSliceEscapeLower(&hdr.ar_fmag),
+ return diags.failParse(path, "invalid archive header delimiter: {f}", .{
+ std.ascii.hexEscape(&hdr.ar_fmag, .lower),
});
}
@@ -288,7 +288,7 @@ pub const ArStrtab = struct {
) !void {
_ = unused_fmt_string;
_ = options;
- try writer.print("{s}", .{std.fmt.fmtSliceEscapeLower(ar.buffer.items)});
+ try writer.print("{f}", .{std.ascii.hexEscape(ar.buffer.items, .lower)});
}
};
src/link/Elf/gc.zig
@@ -163,7 +163,7 @@ fn prune(elf_file: *Elf) void {
}
pub fn dumpPrunedAtoms(elf_file: *Elf) !void {
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
for (elf_file.objects.items) |index| {
const file = elf_file.file(index).?;
for (file.atoms()) |atom_index| {
src/link/Elf/LinkerDefined.zig
@@ -147,9 +147,9 @@ pub fn initStartStopSymbols(self: *LinkerDefined, elf_file: *Elf) !void {
for (slice.items(.shdr)) |shdr| {
// TODO use getOrPut for incremental so that we don't create duplicates
if (elf_file.getStartStopBasename(shdr)) |name| {
- const start_name = try std.fmt.allocPrintZ(gpa, "__start_{s}", .{name});
+ const start_name = try std.fmt.allocPrintSentinel(gpa, "__start_{s}", .{name}, 0);
defer gpa.free(start_name);
- const stop_name = try std.fmt.allocPrintZ(gpa, "__stop_{s}", .{name});
+ const stop_name = try std.fmt.allocPrintSentinel(gpa, "__stop_{s}", .{name}, 0);
defer gpa.free(stop_name);
for (&[_][]const u8{ start_name, stop_name }) |nn| {
src/link/Elf/ZigObject.zig
@@ -803,9 +803,9 @@ pub fn initRelaSections(self: *ZigObject, elf_file: *Elf) !void {
const out_shndx = atom_ptr.output_section_index;
const out_shdr = elf_file.sections.items(.shdr)[out_shndx];
if (out_shdr.sh_type == elf.SHT_NOBITS) continue;
- const rela_sect_name = try std.fmt.allocPrintZ(gpa, ".rela{s}", .{
+ const rela_sect_name = try std.fmt.allocPrintSentinel(gpa, ".rela{s}", .{
elf_file.getShString(out_shdr.sh_name),
- });
+ }, 0);
defer gpa.free(rela_sect_name);
_ = elf_file.sectionByName(rela_sect_name) orelse
try elf_file.addRelaShdr(try elf_file.insertShString(rela_sect_name), out_shndx);
@@ -824,9 +824,9 @@ pub fn addAtomsToRelaSections(self: *ZigObject, elf_file: *Elf) !void {
const out_shndx = atom_ptr.output_section_index;
const out_shdr = elf_file.sections.items(.shdr)[out_shndx];
if (out_shdr.sh_type == elf.SHT_NOBITS) continue;
- const rela_sect_name = try std.fmt.allocPrintZ(gpa, ".rela{s}", .{
+ const rela_sect_name = try std.fmt.allocPrintSentinel(gpa, ".rela{s}", .{
elf_file.getShString(out_shdr.sh_name),
- });
+ }, 0);
defer gpa.free(rela_sect_name);
const out_rela_shndx = elf_file.sectionByName(rela_sect_name).?;
const out_rela_shdr = &elf_file.sections.items(.shdr)[out_rela_shndx];
src/link/MachO/dyld_info/Trie.zig
@@ -336,9 +336,9 @@ const Edge = struct {
fn expectEqualHexStrings(expected: []const u8, given: []const u8) !void {
assert(expected.len > 0);
if (mem.eql(u8, expected, given)) return;
- const expected_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{std.fmt.fmtSliceHexLower(expected)});
+ const expected_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{expected});
defer testing.allocator.free(expected_fmt);
- const given_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{std.fmt.fmtSliceHexLower(given)});
+ const given_fmt = try std.fmt.allocPrint(testing.allocator, "{x}", .{given});
defer testing.allocator.free(given_fmt);
const idx = mem.indexOfDiff(u8, expected_fmt, given_fmt).?;
const padding = try testing.allocator.alloc(u8, idx + 5);
src/link/MachO/Archive.zig
@@ -29,8 +29,8 @@ pub fn unpack(self: *Archive, macho_file: *MachO, path: Path, handle_index: File
pos += @sizeOf(ar_hdr);
if (!mem.eql(u8, &hdr.ar_fmag, ARFMAG)) {
- return diags.failParse(path, "invalid header delimiter: expected '{s}', found '{s}'", .{
- std.fmt.fmtSliceEscapeLower(ARFMAG), std.fmt.fmtSliceEscapeLower(&hdr.ar_fmag),
+ return diags.failParse(path, "invalid header delimiter: expected '{f}', found '{f}'", .{
+ std.ascii.hexEscape(ARFMAG, .lower), std.ascii.hexEscape(&hdr.ar_fmag, .lower),
});
}
src/link/MachO/Object.zig
@@ -308,7 +308,7 @@ fn initSubsections(self: *Object, allocator: Allocator, nlists: anytype) !void {
} else nlists.len;
if (nlist_start == nlist_end or nlists[nlist_start].nlist.n_value > sect.addr) {
- const name = try std.fmt.allocPrintZ(allocator, "{s}${s}$begin", .{ sect.segName(), sect.sectName() });
+ const name = try std.fmt.allocPrintSentinel(allocator, "{s}${s}$begin", .{ sect.segName(), sect.sectName() }, 0);
defer allocator.free(name);
const size = if (nlist_start == nlist_end) sect.size else nlists[nlist_start].nlist.n_value - sect.addr;
const atom_index = try self.addAtom(allocator, .{
@@ -364,7 +364,7 @@ fn initSubsections(self: *Object, allocator: Allocator, nlists: anytype) !void {
// which cannot be contained in any non-zero atom (since then this atom
// would exceed section boundaries). In order to facilitate this behaviour,
// we create a dummy zero-sized atom at section end (addr + size).
- const name = try std.fmt.allocPrintZ(allocator, "{s}${s}$end", .{ sect.segName(), sect.sectName() });
+ const name = try std.fmt.allocPrintSentinel(allocator, "{s}${s}$end", .{ sect.segName(), sect.sectName() }, 0);
defer allocator.free(name);
const atom_index = try self.addAtom(allocator, .{
.name = try self.addString(allocator, name),
@@ -394,7 +394,7 @@ fn initSections(self: *Object, allocator: Allocator, nlists: anytype) !void {
if (isFixedSizeLiteral(sect)) continue;
if (isPtrLiteral(sect)) continue;
- const name = try std.fmt.allocPrintZ(allocator, "{s}${s}", .{ sect.segName(), sect.sectName() });
+ const name = try std.fmt.allocPrintSentinel(allocator, "{s}${s}", .{ sect.segName(), sect.sectName() }, 0);
defer allocator.free(name);
const atom_index = try self.addAtom(allocator, .{
@@ -462,7 +462,7 @@ fn initCstringLiterals(self: *Object, allocator: Allocator, file: File.Handle, m
}
end += 1;
- const name = try std.fmt.allocPrintZ(allocator, "l._str{d}", .{count});
+ const name = try std.fmt.allocPrintSentinel(allocator, "l._str{d}", .{count}, 0);
defer allocator.free(name);
const name_str = try self.addString(allocator, name);
@@ -529,7 +529,7 @@ fn initFixedSizeLiterals(self: *Object, allocator: Allocator, macho_file: *MachO
pos += rec_size;
count += 1;
}) {
- const name = try std.fmt.allocPrintZ(allocator, "l._literal{d}", .{count});
+ const name = try std.fmt.allocPrintSentinel(allocator, "l._literal{d}", .{count}, 0);
defer allocator.free(name);
const name_str = try self.addString(allocator, name);
@@ -587,7 +587,7 @@ fn initPointerLiterals(self: *Object, allocator: Allocator, macho_file: *MachO)
for (0..num_ptrs) |i| {
const pos: u32 = @as(u32, @intCast(i)) * rec_size;
- const name = try std.fmt.allocPrintZ(allocator, "l._ptr{d}", .{i});
+ const name = try std.fmt.allocPrintSentinel(allocator, "l._ptr{d}", .{i}, 0);
defer allocator.free(name);
const name_str = try self.addString(allocator, name);
@@ -1558,7 +1558,7 @@ pub fn convertTentativeDefinitions(self: *Object, macho_file: *MachO) !void {
const nlist = &self.symtab.items(.nlist)[nlist_idx];
const nlist_atom = &self.symtab.items(.atom)[nlist_idx];
- const name = try std.fmt.allocPrintZ(gpa, "__DATA$__common${s}", .{sym.getName(macho_file)});
+ const name = try std.fmt.allocPrintSentinel(gpa, "__DATA$__common${s}", .{sym.getName(macho_file)}, 0);
defer gpa.free(name);
const alignment = (nlist.n_desc >> 8) & 0x0f;
src/link/MachO/ZigObject.zig
@@ -959,7 +959,7 @@ fn updateNavCode(
sym.out_n_sect = sect_index;
atom.out_n_sect = sect_index;
- const sym_name = try std.fmt.allocPrintZ(gpa, "_{s}", .{nav.fqn.toSlice(ip)});
+ const sym_name = try std.fmt.allocPrintSentinel(gpa, "_{s}", .{nav.fqn.toSlice(ip)}, 0);
defer gpa.free(sym_name);
sym.name = try self.addString(gpa, sym_name);
atom.setAlive(true);
src/link/tapi/parse.zig
@@ -57,14 +57,9 @@ pub const Node = struct {
}
}
- pub fn format(
- self: *const Node,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
+ pub fn format(self: *const Node, writer: *std.io.Writer, comptime fmt: []const u8) std.io.Writer.Error!void {
switch (self.tag) {
- inline else => |tag| return @as(*tag.Type(), @fieldParentPtr("base", self)).format(fmt, options, writer),
+ inline else => |tag| return @as(*tag.Type(), @fieldParentPtr("base", self)).format(writer, fmt),
}
}
@@ -86,14 +81,8 @@ pub const Node = struct {
}
}
- pub fn format(
- self: *const Doc,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
- _ = options;
- _ = fmt;
+ pub fn format(self: *const Doc, writer: *std.io.Writer, comptime fmt: []const u8) std.io.Writer.Error!void {
+ comptime assert(fmt.len == 0);
if (self.directive) |id| {
try std.fmt.format(writer, "{{ ", .{});
const directive = self.base.tree.getRaw(id, id);
@@ -133,14 +122,8 @@ pub const Node = struct {
self.values.deinit(allocator);
}
- pub fn format(
- self: *const Map,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
- _ = options;
- _ = fmt;
+ pub fn format(self: *const Map, writer: *std.io.Writer, comptime fmt: []const u8) std.io.Writer.Error!void {
+ comptime assert(fmt.len == 0);
try std.fmt.format(writer, "{{ ", .{});
for (self.values.items) |entry| {
const key = self.base.tree.getRaw(entry.key, entry.key);
@@ -172,14 +155,8 @@ pub const Node = struct {
self.values.deinit(allocator);
}
- pub fn format(
- self: *const List,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
- _ = options;
- _ = fmt;
+ pub fn format(self: *const List, writer: *std.io.Writer, comptime fmt: []const u8) std.io.Writer.Error!void {
+ comptime assert(fmt.len == 0);
try std.fmt.format(writer, "[ ", .{});
for (self.values.items) |node| {
try std.fmt.format(writer, "{}, ", .{node});
@@ -203,14 +180,8 @@ pub const Node = struct {
self.string_value.deinit(allocator);
}
- pub fn format(
- self: *const Value,
- comptime fmt: []const u8,
- options: std.fmt.FormatOptions,
- writer: anytype,
- ) !void {
- _ = options;
- _ = fmt;
+ pub fn format(self: *const Value, writer: *std.io.Writer, comptime fmt: []const u8) std.io.Writer.Error!void {
+ comptime assert(fmt.len == 0);
const raw = self.base.tree.getRaw(self.base.start, self.base.end);
return std.fmt.format(writer, "{s}", .{raw});
}
src/link/Wasm/Flush.zig
@@ -1035,20 +1035,14 @@ pub fn finish(f: *Flush, wasm: *Wasm) !void {
var id: [16]u8 = undefined;
std.crypto.hash.sha3.TurboShake128(null).hash(binary_bytes.items, &id, .{});
var uuid: [36]u8 = undefined;
- _ = try std.fmt.bufPrint(&uuid, "{s}-{s}-{s}-{s}-{s}", .{
- std.fmt.fmtSliceHexLower(id[0..4]),
- std.fmt.fmtSliceHexLower(id[4..6]),
- std.fmt.fmtSliceHexLower(id[6..8]),
- std.fmt.fmtSliceHexLower(id[8..10]),
- std.fmt.fmtSliceHexLower(id[10..]),
+ _ = try std.fmt.bufPrint(&uuid, "{x}-{x}-{x}-{x}-{x}", .{
+ id[0..4], id[4..6], id[6..8], id[8..10], id[10..],
});
try emitBuildIdSection(gpa, binary_bytes, &uuid);
},
.hexstring => |hs| {
var buffer: [32 * 2]u8 = undefined;
- const str = std.fmt.bufPrint(&buffer, "{s}", .{
- std.fmt.fmtSliceHexLower(hs.toSlice()),
- }) catch unreachable;
+ const str = std.fmt.bufPrint(&buffer, "{x}", .{hs.toSlice()}) catch unreachable;
try emitBuildIdSection(gpa, binary_bytes, str);
},
else => |mode| {
src/link/Coff.zig
@@ -830,8 +830,8 @@ fn debugMem(allocator: Allocator, handle: std.process.Child.Id, pvaddr: std.os.w
const buffer = try allocator.alloc(u8, code.len);
defer allocator.free(buffer);
const memread = try std.os.windows.ReadProcessMemory(handle, pvaddr, buffer);
- log.debug("to write: {x}", .{std.fmt.fmtSliceHexLower(code)});
- log.debug("in memory: {x}", .{std.fmt.fmtSliceHexLower(memread)});
+ log.debug("to write: {x}", .{code});
+ log.debug("in memory: {x}", .{memread});
}
fn writeMemProtected(handle: std.process.Child.Id, pvaddr: std.os.windows.LPVOID, code: []const u8) !void {
src/link/LdScript.zig
@@ -41,8 +41,8 @@ pub fn parse(
try line_col.append(gpa, .{ .line = line, .column = column });
switch (tok.id) {
.invalid => {
- return diags.failParse(path, "invalid token in LD script: '{s}' ({d}:{d})", .{
- std.fmt.fmtSliceEscapeLower(tok.get(data)), line, column,
+ return diags.failParse(path, "invalid token in LD script: '{f}' ({d}:{d})", .{
+ std.ascii.hexEscape(tok.get(data), .lower), line, column,
});
},
.new_line => {
src/link/Lld.zig
@@ -933,9 +933,7 @@ fn elfLink(lld: *Lld, arena: Allocator) !void {
.fast, .uuid, .sha1, .md5 => try argv.append(try std.fmt.allocPrint(arena, "--build-id={s}", .{
@tagName(base.build_id),
})),
- .hexstring => |hs| try argv.append(try std.fmt.allocPrint(arena, "--build-id=0x{s}", .{
- std.fmt.fmtSliceHexLower(hs.toSlice()),
- })),
+ .hexstring => |hs| try argv.append(try std.fmt.allocPrint(arena, "--build-id=0x{x}", .{hs.toSlice()})),
}
try argv.append(try std.fmt.allocPrint(arena, "--image-base={d}", .{elf.image_base}));
@@ -1511,9 +1509,7 @@ fn wasmLink(lld: *Lld, arena: Allocator) !void {
.fast, .uuid, .sha1 => try argv.append(try std.fmt.allocPrint(arena, "--build-id={s}", .{
@tagName(base.build_id),
})),
- .hexstring => |hs| try argv.append(try std.fmt.allocPrint(arena, "--build-id=0x{s}", .{
- std.fmt.fmtSliceHexLower(hs.toSlice()),
- })),
+ .hexstring => |hs| try argv.append(try std.fmt.allocPrint(arena, "--build-id=0x{x}", .{hs.toSlice()})),
.md5 => {},
}
@@ -1667,7 +1663,7 @@ fn spawnLld(
log.warn("failed to delete response file {s}: {s}", .{ rsp_path, @errorName(err) });
{
defer rsp_file.close();
- var rsp_buf = std.io.bufferedWriter(rsp_file.writer());
+ var rsp_buf = std.io.bufferedWriter(rsp_file.deprecatedWriter());
const rsp_writer = rsp_buf.writer();
for (argv[2..]) |arg| {
try rsp_writer.writeByte('"');
src/Package/Fetch/git.zig
@@ -127,7 +127,7 @@ pub const Oid = union(Format) {
) @TypeOf(writer).Error!void {
_ = fmt;
_ = options;
- try writer.print("{}", .{std.fmt.fmtSliceHexLower(oid.slice())});
+ try writer.print("{x}", .{oid.slice()});
}
pub fn slice(oid: *const Oid) []const u8 {
@@ -353,7 +353,7 @@ const Odb = struct {
fn init(allocator: Allocator, format: Oid.Format, pack_file: std.fs.File, index_file: std.fs.File) !Odb {
try pack_file.seekTo(0);
try index_file.seekTo(0);
- const index_header = try IndexHeader.read(index_file.reader());
+ const index_header = try IndexHeader.read(index_file.deprecatedReader());
return .{
.format = format,
.pack_file = pack_file,
@@ -377,7 +377,7 @@ const Odb = struct {
const base_object = while (true) {
if (odb.cache.get(base_offset)) |base_object| break base_object;
- base_header = try EntryHeader.read(odb.format, odb.pack_file.reader());
+ base_header = try EntryHeader.read(odb.format, odb.pack_file.deprecatedReader());
switch (base_header) {
.ofs_delta => |ofs_delta| {
try delta_offsets.append(odb.allocator, base_offset);
@@ -390,7 +390,7 @@ const Odb = struct {
base_offset = try odb.pack_file.getPos();
},
else => {
- const base_data = try readObjectRaw(odb.allocator, odb.pack_file.reader(), base_header.uncompressedLength());
+ const base_data = try readObjectRaw(odb.allocator, odb.pack_file.deprecatedReader(), base_header.uncompressedLength());
errdefer odb.allocator.free(base_data);
const base_object: Object = .{ .type = base_header.objectType(), .data = base_data };
try odb.cache.put(odb.allocator, base_offset, base_object);
@@ -420,7 +420,7 @@ const Odb = struct {
const found_index = while (start_index < end_index) {
const mid_index = start_index + (end_index - start_index) / 2;
try odb.index_file.seekTo(IndexHeader.size + mid_index * oid_length);
- const mid_oid = try Oid.readBytes(odb.format, odb.index_file.reader());
+ const mid_oid = try Oid.readBytes(odb.format, odb.index_file.deprecatedReader());
switch (mem.order(u8, mid_oid.slice(), oid.slice())) {
.lt => start_index = mid_index + 1,
.gt => end_index = mid_index,
@@ -431,12 +431,12 @@ const Odb = struct {
const n_objects = odb.index_header.fan_out_table[255];
const offset_values_start = IndexHeader.size + n_objects * (oid_length + 4);
try odb.index_file.seekTo(offset_values_start + found_index * 4);
- const l1_offset: packed struct { value: u31, big: bool } = @bitCast(try odb.index_file.reader().readInt(u32, .big));
+ const l1_offset: packed struct { value: u31, big: bool } = @bitCast(try odb.index_file.deprecatedReader().readInt(u32, .big));
const pack_offset = pack_offset: {
if (l1_offset.big) {
const l2_offset_values_start = offset_values_start + n_objects * 4;
try odb.index_file.seekTo(l2_offset_values_start + l1_offset.value * 4);
- break :pack_offset try odb.index_file.reader().readInt(u64, .big);
+ break :pack_offset try odb.index_file.deprecatedReader().readInt(u64, .big);
} else {
break :pack_offset l1_offset.value;
}
@@ -1561,7 +1561,7 @@ fn runRepositoryTest(comptime format: Oid.Format, head_commit: []const u8) !void
var index_file = try git_dir.dir.createFile("testrepo.idx", .{ .read = true });
defer index_file.close();
- try indexPack(testing.allocator, format, pack_file, index_file.writer());
+ try indexPack(testing.allocator, format, pack_file, index_file.deprecatedWriter());
// Arbitrary size limit on files read while checking the repository contents
// (all files in the test repo are known to be smaller than this)
@@ -1678,7 +1678,7 @@ pub fn main() !void {
std.debug.print("Starting index...\n", .{});
var index_file = try git_dir.createFile("idx", .{ .read = true });
defer index_file.close();
- var index_buffered_writer = std.io.bufferedWriter(index_file.writer());
+ var index_buffered_writer = std.io.bufferedWriter(index_file.deprecatedWriter());
try indexPack(allocator, format, pack_file, index_buffered_writer.writer());
try index_buffered_writer.flush();
try index_file.sync();
src/Package/Fetch.zig
@@ -201,7 +201,7 @@ pub const JobQueue = struct {
const hash_slice = hash.toSlice();
try buf.writer().print(
- \\ pub const {} = struct {{
+ \\ pub const {f} = struct {{
\\
, .{std.zig.fmtId(hash_slice)});
@@ -233,9 +233,9 @@ pub const JobQueue = struct {
if (fetch.has_build_zig) {
try buf.writer().print(
- \\ pub const build_zig = @import("{}");
+ \\ pub const build_zig = @import("{f}");
\\
- , .{std.zig.fmtEscapes(hash_slice)});
+ , .{std.zig.fmtString(hash_slice)});
}
if (fetch.manifest) |*manifest| {
@@ -246,8 +246,8 @@ pub const JobQueue = struct {
for (manifest.dependencies.keys(), manifest.dependencies.values()) |name, dep| {
const h = depDigest(fetch.package_root, jq.global_cache, dep) orelse continue;
try buf.writer().print(
- " .{{ \"{}\", \"{}\" }},\n",
- .{ std.zig.fmtEscapes(name), std.zig.fmtEscapes(h.toSlice()) },
+ " .{{ \"{f}\", \"{f}\" }},\n",
+ .{ std.zig.fmtString(name), std.zig.fmtString(h.toSlice()) },
);
}
@@ -278,8 +278,8 @@ pub const JobQueue = struct {
for (root_manifest.dependencies.keys(), root_manifest.dependencies.values()) |name, dep| {
const h = depDigest(root_fetch.package_root, jq.global_cache, dep) orelse continue;
try buf.writer().print(
- " .{{ \"{}\", \"{}\" }},\n",
- .{ std.zig.fmtEscapes(name), std.zig.fmtEscapes(h.toSlice()) },
+ " .{{ \"{f}\", \"{f}\" }},\n",
+ .{ std.zig.fmtString(name), std.zig.fmtString(h.toSlice()) },
);
}
try buf.appendSlice("};\n");
@@ -1321,7 +1321,7 @@ fn unzip(f: *Fetch, out_dir: fs.Dir, reader: anytype) RunError!UnpackResult {
.{@errorName(err)},
));
if (len == 0) break;
- zip_file.writer().writeAll(buf[0..len]) catch |err| return f.fail(f.location_tok, try eb.printString(
+ zip_file.deprecatedWriter().writeAll(buf[0..len]) catch |err| return f.fail(f.location_tok, try eb.printString(
"write temporary zip file failed: {s}",
.{@errorName(err)},
));
@@ -1374,7 +1374,7 @@ fn unpackGitPack(f: *Fetch, out_dir: fs.Dir, resource: *Resource.Git) anyerror!U
var pack_file = try pack_dir.createFile("pkg.pack", .{ .read = true });
defer pack_file.close();
var fifo = std.fifo.LinearFifo(u8, .{ .Static = 4096 }).init();
- try fifo.pump(resource.fetch_stream.reader(), pack_file.writer());
+ try fifo.pump(resource.fetch_stream.reader(), pack_file.deprecatedWriter());
try pack_file.sync();
var index_file = try pack_dir.createFile("pkg.idx", .{ .read = true });
@@ -1382,7 +1382,7 @@ fn unpackGitPack(f: *Fetch, out_dir: fs.Dir, resource: *Resource.Git) anyerror!U
{
const index_prog_node = f.prog_node.start("Index pack", 0);
defer index_prog_node.end();
- var index_buffered_writer = std.io.bufferedWriter(index_file.writer());
+ var index_buffered_writer = std.io.bufferedWriter(index_file.deprecatedWriter());
try git.indexPack(gpa, object_format, pack_file, index_buffered_writer.writer());
try index_buffered_writer.flush();
try index_file.sync();
@@ -1655,13 +1655,13 @@ fn computeHash(f: *Fetch, pkg_path: Cache.Path, filter: Filter) RunError!Compute
fn dumpHashInfo(all_files: []const *const HashedFile) !void {
const stdout: std.fs.File = .stdout();
- var bw = std.io.bufferedWriter(stdout.writer());
+ var bw = std.io.bufferedWriter(stdout.deprecatedWriter());
const w = bw.writer();
for (all_files) |hashed_file| {
- try w.print("{s}: {s}: {s}\n", .{
+ try w.print("{s}: {x}: {s}\n", .{
@tagName(hashed_file.kind),
- std.fmt.fmtSliceHexLower(&hashed_file.hash),
+ &hashed_file.hash,
hashed_file.normalized_path,
});
}
@@ -2074,7 +2074,7 @@ test "zip" {
{
var zip_file = try tmp.dir.createFile("test.zip", .{});
defer zip_file.close();
- var bw = std.io.bufferedWriter(zip_file.writer());
+ var bw = std.io.bufferedWriter(zip_file.deprecatedWriter());
var store: [test_files.len]std.zip.testutil.FileStore = undefined;
try std.zip.testutil.writeZip(bw.writer(), &test_files, &store, .{});
try bw.flush();
@@ -2107,7 +2107,7 @@ test "zip with one root folder" {
{
var zip_file = try tmp.dir.createFile("test.zip", .{});
defer zip_file.close();
- var bw = std.io.bufferedWriter(zip_file.writer());
+ var bw = std.io.bufferedWriter(zip_file.deprecatedWriter());
var store: [test_files.len]std.zip.testutil.FileStore = undefined;
try std.zip.testutil.writeZip(bw.writer(), &test_files, &store, .{});
try bw.flush();
src/Package/Manifest.zig
@@ -401,7 +401,7 @@ const Parse = struct {
return fail(p, main_token, "name must be a valid bare zig identifier (hint: switch from string to enum literal)", .{});
if (name.len > max_name_len)
- return fail(p, main_token, "name '{}' exceeds max length of {d}", .{
+ return fail(p, main_token, "name '{f}' exceeds max length of {d}", .{
std.zig.fmtId(name), max_name_len,
});
@@ -416,7 +416,7 @@ const Parse = struct {
return fail(p, main_token, "name must be a valid bare zig identifier", .{});
if (ident_name.len > max_name_len)
- return fail(p, main_token, "name '{}' exceeds max length of {d}", .{
+ return fail(p, main_token, "name '{f}' exceeds max length of {d}", .{
std.zig.fmtId(ident_name), max_name_len,
});
src/Sema/LowerZon.zig
@@ -661,7 +661,7 @@ fn lowerEnum(self: *LowerZon, node: Zoir.Node.Index, res_ty: Type) !InternPool.I
const field_index = res_ty.enumFieldIndex(field_name_interned, self.sema.pt.zcu) orelse {
return self.fail(
node,
- "enum {} has no member named '{}'",
+ "enum {f} has no member named '{f}'",
.{
res_ty.fmt(self.sema.pt),
std.zig.fmtId(field_name.get(self.file.zoir.?)),
src/Zcu/PerThread.zig
@@ -341,7 +341,7 @@ fn loadZirZoirCache(
};
// First we read the header to determine the lengths of arrays.
- const header = cache_file.reader().readStruct(Header) catch |err| switch (err) {
+ const header = cache_file.deprecatedReader().readStruct(Header) catch |err| switch (err) {
// This can happen if Zig bails out of this function between creating
// the cached file and writing it.
error.EndOfStream => return .invalid,
@@ -477,11 +477,11 @@ pub fn updateZirRefs(pt: Zcu.PerThread) Allocator.Error!void {
if (std.zig.srcHashEql(old_hash, new_hash)) {
break :hash_changed;
}
- log.debug("hash for (%{d} -> %{d}) changed: {} -> {}", .{
+ log.debug("hash for (%{d} -> %{d}) changed: {x} -> {x}", .{
old_inst,
new_inst,
- std.fmt.fmtSliceHexLower(&old_hash),
- std.fmt.fmtSliceHexLower(&new_hash),
+ &old_hash,
+ &new_hash,
});
}
// The source hash associated with this instruction changed - invalidate relevant dependencies.
@@ -4378,7 +4378,7 @@ fn runCodegenInner(pt: Zcu.PerThread, func_index: InternPool.Index, air: *Air) e
if (build_options.enable_debug_extensions and comp.verbose_air) {
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
stderr.print("# Begin Function AIR: {}:\n", .{fqn.fmt(ip)}) catch {};
air.write(stderr, pt, liveness);
stderr.print("# End Function AIR: {}\n\n", .{fqn.fmt(ip)}) catch {};
src/Builtin.zig
@@ -51,60 +51,60 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void {
const zig_backend = opts.zig_backend;
@setEvalBranchQuota(4000);
- try buffer.writer().print(
+ try buffer.print(
\\const std = @import("std");
\\/// Zig version. When writing code that supports multiple versions of Zig, prefer
\\/// feature detection (i.e. with `@hasDecl` or `@hasField`) over version checks.
\\pub const zig_version = std.SemanticVersion.parse(zig_version_string) catch unreachable;
\\pub const zig_version_string = "{s}";
- \\pub const zig_backend = std.builtin.CompilerBackend.{p_};
+ \\pub const zig_backend = std.builtin.CompilerBackend.{f};
\\
- \\pub const output_mode: std.builtin.OutputMode = .{p_};
- \\pub const link_mode: std.builtin.LinkMode = .{p_};
- \\pub const unwind_tables: std.builtin.UnwindTables = .{p_};
+ \\pub const output_mode: std.builtin.OutputMode = .{f};
+ \\pub const link_mode: std.builtin.LinkMode = .{f};
+ \\pub const unwind_tables: std.builtin.UnwindTables = .{f};
\\pub const is_test = {};
\\pub const single_threaded = {};
- \\pub const abi: std.Target.Abi = .{p_};
+ \\pub const abi: std.Target.Abi = .{f};
\\pub const cpu: std.Target.Cpu = .{{
- \\ .arch = .{p_},
- \\ .model = &std.Target.{p_}.cpu.{p_},
- \\ .features = std.Target.{p_}.featureSet(&.{{
+ \\ .arch = .{f},
+ \\ .model = &std.Target.{f}.cpu.{f},
+ \\ .features = std.Target.{f}.featureSet(&.{{
\\
, .{
build_options.version,
- std.zig.fmtId(@tagName(zig_backend)),
- std.zig.fmtId(@tagName(opts.output_mode)),
- std.zig.fmtId(@tagName(opts.link_mode)),
- std.zig.fmtId(@tagName(opts.unwind_tables)),
+ std.zig.fmtIdPU(@tagName(zig_backend)),
+ std.zig.fmtIdPU(@tagName(opts.output_mode)),
+ std.zig.fmtIdPU(@tagName(opts.link_mode)),
+ std.zig.fmtIdPU(@tagName(opts.unwind_tables)),
opts.is_test,
opts.single_threaded,
- std.zig.fmtId(@tagName(target.abi)),
- std.zig.fmtId(@tagName(target.cpu.arch)),
- std.zig.fmtId(arch_family_name),
- std.zig.fmtId(target.cpu.model.name),
- std.zig.fmtId(arch_family_name),
+ std.zig.fmtIdPU(@tagName(target.abi)),
+ std.zig.fmtIdPU(@tagName(target.cpu.arch)),
+ std.zig.fmtIdPU(arch_family_name),
+ std.zig.fmtIdPU(target.cpu.model.name),
+ std.zig.fmtIdPU(arch_family_name),
});
for (target.cpu.arch.allFeaturesList(), 0..) |feature, index_usize| {
const index = @as(std.Target.Cpu.Feature.Set.Index, @intCast(index_usize));
const is_enabled = target.cpu.features.isEnabled(index);
if (is_enabled) {
- try buffer.writer().print(" .{p_},\n", .{std.zig.fmtId(feature.name)});
+ try buffer.print(" .{f},\n", .{std.zig.fmtIdPU(feature.name)});
}
}
- try buffer.writer().print(
+ try buffer.print(
\\ }}),
\\}};
\\pub const os: std.Target.Os = .{{
- \\ .tag = .{p_},
+ \\ .tag = .{f},
\\ .version_range = .{{
,
- .{std.zig.fmtId(@tagName(target.os.tag))},
+ .{std.zig.fmtIdPU(@tagName(target.os.tag))},
);
switch (target.os.versionRange()) {
.none => try buffer.appendSlice(" .none = {} },\n"),
- .semver => |semver| try buffer.writer().print(
+ .semver => |semver| try buffer.print(
\\ .semver = .{{
\\ .min = .{{
\\ .major = {},
@@ -127,7 +127,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void {
semver.max.minor,
semver.max.patch,
}),
- .linux => |linux| try buffer.writer().print(
+ .linux => |linux| try buffer.print(
\\ .linux = .{{
\\ .range = .{{
\\ .min = .{{
@@ -164,7 +164,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void {
linux.android,
}),
- .hurd => |hurd| try buffer.writer().print(
+ .hurd => |hurd| try buffer.print(
\\ .hurd = .{{
\\ .range = .{{
\\ .min = .{{
@@ -198,10 +198,10 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void {
hurd.glibc.minor,
hurd.glibc.patch,
}),
- .windows => |windows| try buffer.writer().print(
+ .windows => |windows| try buffer.print(
\\ .windows = .{{
- \\ .min = {c},
- \\ .max = {c},
+ \\ .min = {fc},
+ \\ .max = {fc},
\\ }}}},
\\
, .{ windows.min, windows.max }),
@@ -217,7 +217,7 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void {
);
if (target.dynamic_linker.get()) |dl| {
- try buffer.writer().print(
+ try buffer.print(
\\ .dynamic_linker = .init("{s}"),
\\}};
\\
@@ -237,9 +237,9 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void {
// knows libc will provide it, and likewise c.zig will not export memcpy.
const link_libc = opts.link_libc;
- try buffer.writer().print(
- \\pub const object_format: std.Target.ObjectFormat = .{p_};
- \\pub const mode: std.builtin.OptimizeMode = .{p_};
+ try buffer.print(
+ \\pub const object_format: std.Target.ObjectFormat = .{f};
+ \\pub const mode: std.builtin.OptimizeMode = .{f};
\\pub const link_libc = {};
\\pub const link_libcpp = {};
\\pub const have_error_return_tracing = {};
@@ -249,12 +249,12 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void {
\\pub const position_independent_code = {};
\\pub const position_independent_executable = {};
\\pub const strip_debug_info = {};
- \\pub const code_model: std.builtin.CodeModel = .{p_};
+ \\pub const code_model: std.builtin.CodeModel = .{f};
\\pub const omit_frame_pointer = {};
\\
, .{
- std.zig.fmtId(@tagName(target.ofmt)),
- std.zig.fmtId(@tagName(opts.optimize_mode)),
+ std.zig.fmtIdPU(@tagName(target.ofmt)),
+ std.zig.fmtIdPU(@tagName(opts.optimize_mode)),
link_libc,
opts.link_libcpp,
opts.error_tracing,
@@ -264,15 +264,15 @@ pub fn append(opts: @This(), buffer: *std.ArrayList(u8)) Allocator.Error!void {
opts.pic,
opts.pie,
opts.strip,
- std.zig.fmtId(@tagName(opts.code_model)),
+ std.zig.fmtIdPU(@tagName(opts.code_model)),
opts.omit_frame_pointer,
});
if (target.os.tag == .wasi) {
- try buffer.writer().print(
- \\pub const wasi_exec_model: std.builtin.WasiExecModel = .{p_};
+ try buffer.print(
+ \\pub const wasi_exec_model: std.builtin.WasiExecModel = .{f};
\\
- , .{std.zig.fmtId(@tagName(opts.wasi_exec_model))});
+ , .{std.zig.fmtIdPU(@tagName(opts.wasi_exec_model))});
}
if (opts.is_test) {
@@ -317,7 +317,7 @@ pub fn updateFileOnDisk(file: *File, comp: *Compilation) !void {
if (root_dir.statFile(sub_path)) |stat| {
if (stat.size != file.source.?.len) {
std.log.warn(
- "the cached file '{}' had the wrong size. Expected {d}, found {d}. " ++
+ "the cached file '{f}{s}' had the wrong size. Expected {d}, found {d}. " ++
"Overwriting with correct file contents now",
.{ file.path.fmt(comp), file.source.?.len, stat.size },
);
src/Compilation.zig
@@ -1001,7 +1001,7 @@ pub const CObject = struct {
var line = std.ArrayList(u8).init(eb.gpa);
defer line.deinit();
- file.reader().readUntilDelimiterArrayList(&line, '\n', 1 << 10) catch break :source_line 0;
+ file.deprecatedReader().readUntilDelimiterArrayList(&line, '\n', 1 << 10) catch break :source_line 0;
break :source_line try eb.addString(line.items);
};
@@ -1069,7 +1069,7 @@ pub const CObject = struct {
const file = try std.fs.cwd().openFile(path, .{});
defer file.close();
- var br = std.io.bufferedReader(file.reader());
+ var br = std.io.bufferedReader(file.deprecatedReader());
const reader = br.reader();
var bc = std.zig.llvm.BitcodeReader.init(gpa, .{ .reader = reader.any() });
defer bc.deinit();
@@ -1875,7 +1875,7 @@ pub fn create(gpa: Allocator, arena: Allocator, options: CreateOptions) !*Compil
if (options.root_mod.resolved_target.llvm_cpu_features) |cf| print: {
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
nosuspend {
stderr.print("compilation: {s}\n", .{options.root_name}) catch break :print;
stderr.print(" target: {s}\n", .{try target.zigTriple(arena)}) catch break :print;
@@ -3932,7 +3932,7 @@ pub fn getAllErrorsAlloc(comp: *Compilation) !ErrorBundle {
// This AU is referenced and has a transitive compile error, meaning it referenced something with a compile error.
// However, we haven't reported any such error.
// This is a compiler bug.
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
try stderr.writeAll("referenced transitive analysis errors, but none actually emitted\n");
try stderr.print("{} [transitive failure]\n", .{zcu.fmtAnalUnit(failed_unit)});
while (ref) |r| {
@@ -4894,7 +4894,7 @@ fn docsCopyModule(comp: *Compilation, module: *Package.Module, name: []const u8,
var walker = try mod_dir.walk(comp.gpa);
defer walker.deinit();
- var archiver = std.tar.writer(tar_file.writer().any());
+ var archiver = std.tar.writer(tar_file.deprecatedWriter().any());
archiver.prefix = name;
while (try walker.next()) |entry| {
@@ -7214,7 +7214,7 @@ pub fn lockAndSetMiscFailure(
pub fn dump_argv(argv: []const []const u8) void {
std.debug.lockStdErr();
defer std.debug.unlockStdErr();
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
for (argv[0 .. argv.len - 1]) |arg| {
nosuspend stderr.print("{s} ", .{arg}) catch return;
}
src/crash_report.zig
@@ -80,7 +80,7 @@ fn dumpStatusReport() !void {
var fba = std.heap.FixedBufferAllocator.init(&crash_heap);
const allocator = fba.allocator();
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
const block: *Sema.Block = anal.block;
const zcu = anal.sema.pt.zcu;
@@ -271,7 +271,7 @@ const StackContext = union(enum) {
debug.dumpStackTraceFromBase(context);
},
.not_supported => {
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
stderr.writeAll("Stack trace not supported on this platform.\n") catch {};
},
}
@@ -379,7 +379,7 @@ const PanicSwitch = struct {
state.recover_stage = .release_mutex;
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
if (builtin.single_threaded) {
stderr.print("panic: ", .{}) catch goTo(releaseMutex, .{state});
} else {
@@ -406,7 +406,7 @@ const PanicSwitch = struct {
recover(state, trace, stack, msg);
state.recover_stage = .release_mutex;
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
stderr.writeAll("\nOriginal Error:\n") catch {};
goTo(reportStack, .{state});
}
@@ -477,7 +477,7 @@ const PanicSwitch = struct {
recover(state, trace, stack, msg);
state.recover_stage = .silent_abort;
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
stderr.writeAll("Aborting...\n") catch {};
goTo(abort, .{});
}
@@ -505,7 +505,7 @@ const PanicSwitch = struct {
// lower the verbosity, and restore it at the end if we don't panic.
state.recover_verbosity = .message_only;
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
stderr.writeAll("\nPanicked during a panic: ") catch {};
stderr.writeAll(msg) catch {};
stderr.writeAll("\nInner panic stack:\n") catch {};
@@ -519,7 +519,7 @@ const PanicSwitch = struct {
.message_only => {
state.recover_verbosity = .silent;
- const stderr = std.fs.File.stderr().writer();
+ const stderr = std.fs.File.stderr().deprecatedWriter();
stderr.writeAll("\nPanicked while dumping inner panic stack: ") catch {};
stderr.writeAll(msg) catch {};
stderr.writeAll("\n") catch {};
src/fmt.zig
@@ -60,7 +60,7 @@ pub fn run(
const arg = args[i];
if (mem.startsWith(u8, arg, "-")) {
if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) {
- const stdout = std.fs.File.stdout().writer();
+ const stdout = std.fs.File.stdout().deprecatedWriter();
try stdout.writeAll(usage_fmt);
return process.cleanExit();
} else if (mem.eql(u8, arg, "--color")) {
@@ -371,7 +371,7 @@ fn fmtPathFile(
return;
if (check_mode) {
- const stdout = std.fs.File.stdout().writer();
+ const stdout = std.fs.File.stdout().deprecatedWriter();
try stdout.print("{s}\n", .{file_path});
fmt.any_error = true;
} else {
@@ -380,7 +380,7 @@ fn fmtPathFile(
try af.file.writeAll(fmt.out_buffer.items);
try af.finish();
- const stdout = std.fs.File.stdout().writer();
+ const stdout = std.fs.File.stdout().deprecatedWriter();
try stdout.print("{s}\n", .{file_path});
}
}
src/InternPool.zig
@@ -1892,7 +1892,7 @@ pub const NullTerminatedString = enum(u32) {
if (comptime std.mem.eql(u8, specifier, "")) {
try writer.writeAll(slice);
} else if (comptime std.mem.eql(u8, specifier, "i")) {
- try writer.print("{p}", .{std.zig.fmtId(slice)});
+ try writer.print("{f}", .{std.zig.fmtIdP(slice)});
} else @compileError("invalid format string '" ++ specifier ++ "' for '" ++ @typeName(NullTerminatedString) ++ "'");
}
@@ -11259,7 +11259,7 @@ fn dumpStatsFallible(ip: *const InternPool, arena: Allocator) anyerror!void {
}
fn dumpAllFallible(ip: *const InternPool) anyerror!void {
- var bw = std.io.bufferedWriter(std.fs.File.stderr().writer());
+ var bw = std.io.bufferedWriter(std.fs.File.stderr().deprecatedWriter());
const w = bw.writer();
for (ip.locals, 0..) |*local, tid| {
const items = local.shared.items.view();
@@ -11369,7 +11369,7 @@ pub fn dumpGenericInstancesFallible(ip: *const InternPool, allocator: Allocator)
defer arena_allocator.deinit();
const arena = arena_allocator.allocator();
- var bw = std.io.bufferedWriter(std.fs.File.stderr().writer());
+ var bw = std.io.bufferedWriter(std.fs.File.stderr().deprecatedWriter());
const w = bw.writer();
var instances: std.AutoArrayHashMapUnmanaged(Index, std.ArrayListUnmanaged(Index)) = .empty;
src/main.zig
@@ -340,7 +340,7 @@ fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
} else if (mem.eql(u8, cmd, "targets")) {
dev.check(.targets_command);
const host = std.zig.resolveTargetQueryOrFatal(.{});
- const stdout = fs.File.stdout().writer();
+ const stdout = fs.File.stdout().deprecatedWriter();
return @import("print_targets.zig").cmdTargets(arena, cmd_args, stdout, &host);
} else if (mem.eql(u8, cmd, "version")) {
dev.check(.version_command);
@@ -352,7 +352,7 @@ fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
} else if (mem.eql(u8, cmd, "env")) {
dev.check(.env_command);
verifyLibcxxCorrectlyLinked();
- return @import("print_env.zig").cmdEnv(arena, cmd_args, fs.File.stdout().writer());
+ return @import("print_env.zig").cmdEnv(arena, cmd_args, fs.File.stdout().deprecatedWriter());
} else if (mem.eql(u8, cmd, "reduce")) {
return jitCmd(gpa, arena, cmd_args, .{
.cmd_name = "reduce",
@@ -3333,9 +3333,8 @@ fn buildOutputType(
var bin_digest: Cache.BinDigest = undefined;
hasher.final(&bin_digest);
- const sub_path = try std.fmt.allocPrint(arena, "tmp" ++ sep ++ "{s}-stdin{s}", .{
- std.fmt.fmtSliceHexLower(&bin_digest),
- ext.canonicalName(target),
+ const sub_path = try std.fmt.allocPrint(arena, "tmp" ++ sep ++ "{x}-stdin{s}", .{
+ &bin_digest, ext.canonicalName(target),
});
try dirs.local_cache.handle.rename(dump_path, sub_path);
@@ -6110,7 +6109,7 @@ fn cmdAstCheck(
const stdout = fs.File.stdout();
const fmtIntSizeBin = std.fmt.fmtIntSizeBin;
// zig fmt: off
- try stdout.writer().print(
+ try stdout.deprecatedWriter().print(
\\# Source bytes: {}
\\# Tokens: {} ({})
\\# AST Nodes: {} ({})
@@ -6186,7 +6185,7 @@ fn cmdDetectCpu(args: []const []const u8) !void {
const arg = args[i];
if (mem.startsWith(u8, arg, "-")) {
if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) {
- const stdout = fs.File.stdout().writer();
+ const stdout = fs.File.stdout().deprecatedWriter();
try stdout.writeAll(detect_cpu_usage);
return cleanExit();
} else if (mem.eql(u8, arg, "--llvm")) {
@@ -6279,7 +6278,7 @@ fn detectNativeCpuWithLLVM(
}
fn printCpu(cpu: std.Target.Cpu) !void {
- var bw = io.bufferedWriter(fs.File.stdout().writer());
+ var bw = io.bufferedWriter(fs.File.stdout().deprecatedWriter());
const stdout = bw.writer();
if (cpu.model.llvm_name) |llvm_name| {
@@ -6328,7 +6327,7 @@ fn cmdDumpLlvmInts(
const dl = tm.createTargetDataLayout();
const context = llvm.Context.create();
- var bw = io.bufferedWriter(fs.File.stdout().writer());
+ var bw = io.bufferedWriter(fs.File.stdout().deprecatedWriter());
const stdout = bw.writer();
for ([_]u16{ 1, 8, 16, 32, 64, 128, 256 }) |bits| {
@@ -6371,7 +6370,7 @@ fn cmdDumpZir(
const stdout = fs.File.stdout();
const fmtIntSizeBin = std.fmt.fmtIntSizeBin;
// zig fmt: off
- try stdout.writer().print(
+ try stdout.deprecatedWriter().print(
\\# Total ZIR bytes: {}
\\# Instructions: {d} ({})
\\# String Table Bytes: {}
@@ -6444,7 +6443,7 @@ fn cmdChangelist(
var inst_map: std.AutoHashMapUnmanaged(Zir.Inst.Index, Zir.Inst.Index) = .empty;
try Zcu.mapOldZirToNew(arena, old_zir, new_zir, &inst_map);
- var bw = io.bufferedWriter(fs.File.stdout().writer());
+ var bw = io.bufferedWriter(fs.File.stdout().deprecatedWriter());
const stdout = bw.writer();
{
try stdout.print("Instruction mappings:\n", .{});
@@ -6794,7 +6793,7 @@ fn cmdFetch(
const arg = args[i];
if (mem.startsWith(u8, arg, "-")) {
if (mem.eql(u8, arg, "-h") or mem.eql(u8, arg, "--help")) {
- const stdout = fs.File.stdout().writer();
+ const stdout = fs.File.stdout().deprecatedWriter();
try stdout.writeAll(usage_fetch);
return cleanExit();
} else if (mem.eql(u8, arg, "--global-cache-dir")) {
@@ -6908,7 +6907,7 @@ fn cmdFetch(
const name = switch (save) {
.no => {
- try fs.File.stdout().writer().print("{s}\n", .{package_hash_slice});
+ try fs.File.stdout().deprecatedWriter().print("{s}\n", .{package_hash_slice});
return cleanExit();
},
.yes, .exact => |name| name: {
@@ -6973,16 +6972,16 @@ fn cmdFetch(
const new_node_init = try std.fmt.allocPrint(arena,
\\.{{
- \\ .url = "{}",
- \\ .hash = "{}",
+ \\ .url = "{f}",
+ \\ .hash = "{f}",
\\ }}
, .{
- std.zig.fmtEscapes(saved_path_or_url),
- std.zig.fmtEscapes(package_hash_slice),
+ std.zig.fmtString(saved_path_or_url),
+ std.zig.fmtString(package_hash_slice),
});
- const new_node_text = try std.fmt.allocPrint(arena, ".{p_} = {s},\n", .{
- std.zig.fmtId(name), new_node_init,
+ const new_node_text = try std.fmt.allocPrint(arena, ".{f} = {s},\n", .{
+ std.zig.fmtIdPU(name), new_node_init,
});
const dependencies_init = try std.fmt.allocPrint(arena, ".{{\n {s} }}", .{
@@ -7008,13 +7007,13 @@ fn cmdFetch(
const location_replace = try std.fmt.allocPrint(
arena,
- "\"{}\"",
- .{std.zig.fmtEscapes(saved_path_or_url)},
+ "\"{f}\"",
+ .{std.zig.fmtString(saved_path_or_url)},
);
const hash_replace = try std.fmt.allocPrint(
arena,
- "\"{}\"",
- .{std.zig.fmtEscapes(package_hash_slice)},
+ "\"{f}\"",
+ .{std.zig.fmtString(package_hash_slice)},
);
warn("overwriting existing dependency named '{s}'", .{name});
src/Package.zig
@@ -134,7 +134,7 @@ pub const Hash = struct {
}
var bin_digest: [Algo.digest_length]u8 = undefined;
Algo.hash(sub_path, &bin_digest, .{});
- _ = std.fmt.bufPrint(result.bytes[i..], "{}", .{std.fmt.fmtSliceHexLower(&bin_digest)}) catch unreachable;
+ _ = std.fmt.bufPrint(result.bytes[i..], "{x}", .{&bin_digest}) catch unreachable;
return result;
}
};
src/print_value.zig
@@ -232,7 +232,7 @@ fn printAggregate(
const len = ty.arrayLenIncludingSentinel(zcu);
if (len == 0) break :string;
const slice = bytes.toSlice(if (bytes.at(len - 1, ip) == 0) len - 1 else len, ip);
- try writer.print("\"{}\"", .{std.zig.fmtEscapes(slice)});
+ try writer.print("\"{f}\"", .{std.zig.fmtString(slice)});
if (!is_ref) try writer.writeAll(".*");
return;
},
@@ -249,7 +249,7 @@ fn printAggregate(
const elem_val = Value.fromInterned(aggregate.storage.values()[0]);
if (elem_val.isUndef(zcu)) break :one_byte_str;
const byte = elem_val.toUnsignedInt(zcu);
- try writer.print("\"{}\"", .{std.zig.fmtEscapes(&.{@intCast(byte)})});
+ try writer.print("\"{f}\"", .{std.zig.fmtString(&.{@intCast(byte)})});
if (!is_ref) try writer.writeAll(".*");
return;
},
src/print_zir.zig
@@ -30,7 +30,7 @@ pub fn renderAsTextToFile(
.recurse_blocks = true,
};
- var raw_stream = std.io.bufferedWriter(fs_file.writer());
+ var raw_stream = std.io.bufferedWriter(fs_file.deprecatedWriter());
const stream = raw_stream.writer();
const main_struct_inst: Zir.Inst.Index = .main_struct_inst;
@@ -49,8 +49,8 @@ pub fn renderAsTextToFile(
extra_index = item.end;
const import_path = zir.nullTerminatedString(item.data.name);
- try stream.print(" @import(\"{}\") ", .{
- std.zig.fmtEscapes(import_path),
+ try stream.print(" @import(\"{f}\") ", .{
+ std.zig.fmtString(import_path),
});
try writer.writeSrcTokAbs(stream, item.data.token);
try stream.writeAll("\n");
@@ -789,7 +789,7 @@ const Writer = struct {
) (@TypeOf(stream).Error || error{OutOfMemory})!void {
const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].str;
const str = inst_data.get(self.code);
- try stream.print("\"{}\")", .{std.zig.fmtEscapes(str)});
+ try stream.print("\"{f}\")", .{std.zig.fmtString(str)});
}
fn writeSliceStart(self: *Writer, stream: anytype, inst: Zir.Inst.Index) !void {
@@ -932,8 +932,8 @@ const Writer = struct {
const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].pl_tok;
const extra = self.code.extraData(Zir.Inst.Param, inst_data.payload_index);
const body = self.code.bodySlice(extra.end, extra.data.type.body_len);
- try stream.print("\"{}\", ", .{
- std.zig.fmtEscapes(self.code.nullTerminatedString(extra.data.name)),
+ try stream.print("\"{f}\", ", .{
+ std.zig.fmtString(self.code.nullTerminatedString(extra.data.name)),
});
if (extra.data.type.is_generic) try stream.writeAll("[generic] ");
@@ -1203,7 +1203,7 @@ const Writer = struct {
try stream.writeAll(", ");
} else {
const asm_source = self.code.nullTerminatedString(extra.data.asm_source);
- try stream.print("\"{}\", ", .{std.zig.fmtEscapes(asm_source)});
+ try stream.print("\"{f}\", ", .{std.zig.fmtString(asm_source)});
}
try stream.writeAll(", ");
@@ -1220,8 +1220,8 @@ const Writer = struct {
const name = self.code.nullTerminatedString(output.data.name);
const constraint = self.code.nullTerminatedString(output.data.constraint);
- try stream.print("output({p}, \"{}\", ", .{
- std.zig.fmtId(name), std.zig.fmtEscapes(constraint),
+ try stream.print("output({f}, \"{f}\", ", .{
+ std.zig.fmtIdFlags(name, .{ .allow_primitive = true }), std.zig.fmtString(constraint),
});
try self.writeFlag(stream, "->", is_type);
try self.writeInstRef(stream, output.data.operand);
@@ -1239,8 +1239,8 @@ const Writer = struct {
const name = self.code.nullTerminatedString(input.data.name);
const constraint = self.code.nullTerminatedString(input.data.constraint);
- try stream.print("input({p}, \"{}\", ", .{
- std.zig.fmtId(name), std.zig.fmtEscapes(constraint),
+ try stream.print("input({f}, \"{f}\", ", .{
+ std.zig.fmtIdFlags(name, .{ .allow_primitive = true }), std.zig.fmtString(constraint),
});
try self.writeInstRef(stream, input.data.operand);
try stream.writeAll(")");
@@ -1255,7 +1255,7 @@ const Writer = struct {
const str_index = self.code.extra[extra_i];
extra_i += 1;
const clobber = self.code.nullTerminatedString(@enumFromInt(str_index));
- try stream.print("{p}", .{std.zig.fmtId(clobber)});
+ try stream.print("{f}", .{std.zig.fmtIdFlags(clobber, .{ .allow_primitive = true })});
if (i + 1 < clobbers_len) {
try stream.writeAll(", ");
}
@@ -1299,7 +1299,7 @@ const Writer = struct {
.field => {
const field_name = self.code.nullTerminatedString(extra.data.field_name_start);
try self.writeInstRef(stream, extra.data.obj_ptr);
- try stream.print(", \"{}\"", .{std.zig.fmtEscapes(field_name)});
+ try stream.print(", \"{f}\"", .{std.zig.fmtString(field_name)});
},
}
try stream.writeAll(", [");
@@ -1388,7 +1388,7 @@ const Writer = struct {
extra.data.fields_hash_3,
});
- try stream.print("hash({}) ", .{std.fmt.fmtSliceHexLower(&fields_hash)});
+ try stream.print("hash({x}) ", .{&fields_hash});
var extra_index: usize = extra.end;
@@ -1519,7 +1519,7 @@ const Writer = struct {
try self.writeFlag(stream, "comptime ", field.is_comptime);
if (field.name != .empty) {
const field_name = self.code.nullTerminatedString(field.name);
- try stream.print("{p}: ", .{std.zig.fmtId(field_name)});
+ try stream.print("{f}: ", .{std.zig.fmtIdFlags(field_name, .{ .allow_primitive = true })});
} else {
try stream.print("@\"{d}\": ", .{i});
}
@@ -1580,7 +1580,7 @@ const Writer = struct {
extra.data.fields_hash_3,
});
- try stream.print("hash({}) ", .{std.fmt.fmtSliceHexLower(&fields_hash)});
+ try stream.print("hash({x}) ", .{&fields_hash});
var extra_index: usize = extra.end;
@@ -1682,7 +1682,7 @@ const Writer = struct {
extra_index += 1;
try stream.writeByteNTimes(' ', self.indent);
- try stream.print("{p}", .{std.zig.fmtId(field_name)});
+ try stream.print("{f}", .{std.zig.fmtIdFlags(field_name, .{ .allow_primitive = true })});
if (has_type) {
const field_type = @as(Zir.Inst.Ref, @enumFromInt(self.code.extra[extra_index]));
@@ -1731,7 +1731,7 @@ const Writer = struct {
extra.data.fields_hash_3,
});
- try stream.print("hash({}) ", .{std.fmt.fmtSliceHexLower(&fields_hash)});
+ try stream.print("hash({x}) ", .{&fields_hash});
var extra_index: usize = extra.end;
@@ -1816,7 +1816,7 @@ const Writer = struct {
extra_index += 1;
try stream.writeByteNTimes(' ', self.indent);
- try stream.print("{p}", .{std.zig.fmtId(field_name)});
+ try stream.print("{f}", .{std.zig.fmtIdFlags(field_name, .{ .allow_primitive = true })});
if (has_tag_value) {
const tag_value_ref = @as(Zir.Inst.Ref, @enumFromInt(self.code.extra[extra_index]));
@@ -1921,7 +1921,7 @@ const Writer = struct {
const name_index: Zir.NullTerminatedString = @enumFromInt(self.code.extra[extra_index]);
const name = self.code.nullTerminatedString(name_index);
try stream.writeByteNTimes(' ', self.indent);
- try stream.print("{p},\n", .{std.zig.fmtId(name)});
+ try stream.print("{f},\n", .{std.zig.fmtIdFlags(name, .{ .allow_primitive = true })});
}
self.indent -= 2;
@@ -2203,7 +2203,7 @@ const Writer = struct {
const extra = self.code.extraData(Zir.Inst.Field, inst_data.payload_index).data;
const name = self.code.nullTerminatedString(extra.field_name_start);
try self.writeInstRef(stream, extra.lhs);
- try stream.print(", \"{}\") ", .{std.zig.fmtEscapes(name)});
+ try stream.print(", \"{f}\") ", .{std.zig.fmtString(name)});
try self.writeSrcNode(stream, inst_data.src_node);
}
@@ -2244,7 +2244,7 @@ const Writer = struct {
) (@TypeOf(stream).Error || error{OutOfMemory})!void {
const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].str_tok;
const str = inst_data.get(self.code);
- try stream.print("\"{}\") ", .{std.zig.fmtEscapes(str)});
+ try stream.print("\"{f}\") ", .{std.zig.fmtString(str)});
try self.writeSrcTok(stream, inst_data.src_tok);
}
@@ -2252,7 +2252,7 @@ const Writer = struct {
const inst_data = self.code.instructions.items(.data)[@intFromEnum(inst)].str_op;
const str = inst_data.getStr(self.code);
try self.writeInstRef(stream, inst_data.operand);
- try stream.print(", \"{}\")", .{std.zig.fmtEscapes(str)});
+ try stream.print(", \"{f}\")", .{std.zig.fmtString(str)});
}
fn writeFunc(
@@ -2594,11 +2594,7 @@ const Writer = struct {
},
}
const src_hash = self.code.getAssociatedSrcHash(inst).?;
- try stream.print(" line({d}) column({d}) hash({})", .{
- decl.src_line,
- decl.src_column,
- std.fmt.fmtSliceHexLower(&src_hash),
- });
+ try stream.print(" line({d}) column({d}) hash({x})", .{ decl.src_line, decl.src_column, &src_hash });
{
if (decl.type_body) |b| {
@@ -2694,11 +2690,11 @@ const Writer = struct {
try stream.writeAll("load ");
try self.writeInstIndex(stream, ptr_inst);
},
- .decl_val => |str| try stream.print("decl_val \"{}\"", .{
- std.zig.fmtEscapes(self.code.nullTerminatedString(str)),
+ .decl_val => |str| try stream.print("decl_val \"{f}\"", .{
+ std.zig.fmtString(self.code.nullTerminatedString(str)),
}),
- .decl_ref => |str| try stream.print("decl_ref \"{}\"", .{
- std.zig.fmtEscapes(self.code.nullTerminatedString(str)),
+ .decl_ref => |str| try stream.print("decl_ref \"{f}\"", .{
+ std.zig.fmtString(self.code.nullTerminatedString(str)),
}),
}
}
@@ -2831,7 +2827,7 @@ const Writer = struct {
const extra = self.code.extraData(Zir.Inst.Import, inst_data.payload_index).data;
try self.writeInstRef(stream, extra.res_ty);
const import_path = self.code.nullTerminatedString(extra.path);
- try stream.print(", \"{}\") ", .{std.zig.fmtEscapes(import_path)});
+ try stream.print(", \"{f}\") ", .{std.zig.fmtString(import_path)});
try self.writeSrcTok(stream, inst_data.src_tok);
}
};
src/print_zoir.zig
@@ -77,8 +77,8 @@ const PrintZon = struct {
},
.float_literal => |x| try pz.w.print("float({d})", .{x}),
.char_literal => |x| try pz.w.print("char({d})", .{x}),
- .enum_literal => |x| try pz.w.print("enum_literal({p})", .{std.zig.fmtId(x.get(zoir))}),
- .string_literal => |x| try pz.w.print("str(\"{}\")", .{std.zig.fmtEscapes(x)}),
+ .enum_literal => |x| try pz.w.print("enum_literal({f})", .{std.zig.fmtIdP(x.get(zoir))}),
+ .string_literal => |x| try pz.w.print("str(\"{f}\")", .{std.zig.fmtString(x)}),
.empty_literal => try pz.w.writeAll("empty_literal(.{})"),
.array_literal => |vals| {
try pz.w.writeAll("array_literal({");
@@ -97,7 +97,7 @@ const PrintZon = struct {
pz.indent += 1;
for (s.names, 0..s.vals.len) |name, idx| {
try pz.newline();
- try pz.w.print("[{p}] ", .{std.zig.fmtId(name.get(zoir))});
+ try pz.w.print("[{f}] ", .{std.zig.fmtIdP(name.get(zoir))});
try pz.renderNode(s.vals.at(@intCast(idx)));
try pz.w.writeByte(',');
}
src/Sema.zig
@@ -5,6 +5,39 @@
//! Does type checking, comptime control flow, and safety-check generation.
//! This is the the heart of the Zig compiler.
+const std = @import("std");
+const math = std.math;
+const mem = std.mem;
+const Allocator = mem.Allocator;
+const assert = std.debug.assert;
+const log = std.log.scoped(.sema);
+
+const Sema = @This();
+const Value = @import("Value.zig");
+const MutableValue = @import("mutable_value.zig").MutableValue;
+const Type = @import("Type.zig");
+const Air = @import("Air.zig");
+const Zir = std.zig.Zir;
+const Zcu = @import("Zcu.zig");
+const trace = @import("tracy.zig").trace;
+const Namespace = Zcu.Namespace;
+const CompileError = Zcu.CompileError;
+const SemaError = Zcu.SemaError;
+const LazySrcLoc = Zcu.LazySrcLoc;
+const RangeSet = @import("RangeSet.zig");
+const target_util = @import("target.zig");
+const Package = @import("Package.zig");
+const crash_report = @import("crash_report.zig");
+const build_options = @import("build_options");
+const Compilation = @import("Compilation.zig");
+const InternPool = @import("InternPool.zig");
+const Alignment = InternPool.Alignment;
+const AnalUnit = InternPool.AnalUnit;
+const ComptimeAllocIndex = InternPool.ComptimeAllocIndex;
+const Cache = std.Build.Cache;
+const LowerZon = @import("Sema/LowerZon.zig");
+const arith = @import("Sema/arith.zig");
+
pt: Zcu.PerThread,
/// Alias to `zcu.gpa`.
gpa: Allocator,
@@ -157,39 +190,6 @@ pub fn getComptimeAlloc(sema: *Sema, idx: ComptimeAllocIndex) *ComptimeAlloc {
return &sema.comptime_allocs.items[@intFromEnum(idx)];
}
-const std = @import("std");
-const math = std.math;
-const mem = std.mem;
-const Allocator = mem.Allocator;
-const assert = std.debug.assert;
-const log = std.log.scoped(.sema);
-
-const Sema = @This();
-const Value = @import("Value.zig");
-const MutableValue = @import("mutable_value.zig").MutableValue;
-const Type = @import("Type.zig");
-const Air = @import("Air.zig");
-const Zir = std.zig.Zir;
-const Zcu = @import("Zcu.zig");
-const trace = @import("tracy.zig").trace;
-const Namespace = Zcu.Namespace;
-const CompileError = Zcu.CompileError;
-const SemaError = Zcu.SemaError;
-const LazySrcLoc = Zcu.LazySrcLoc;
-const RangeSet = @import("RangeSet.zig");
-const target_util = @import("target.zig");
-const Package = @import("Package.zig");
-const crash_report = @import("crash_report.zig");
-const build_options = @import("build_options");
-const Compilation = @import("Compilation.zig");
-const InternPool = @import("InternPool.zig");
-const Alignment = InternPool.Alignment;
-const AnalUnit = InternPool.AnalUnit;
-const ComptimeAllocIndex = InternPool.ComptimeAllocIndex;
-const Cache = std.Build.Cache;
-const LowerZon = @import("Sema/LowerZon.zig");
-const arith = @import("Sema/arith.zig");
-
pub const default_branch_quota = 1000;
pub const InferredErrorSet = struct {
@@ -888,7 +888,7 @@ const ComptimeReason = union(enum) {
/// Evaluating at comptime because of a comptime-only type. This field is separate so that
/// the type in question can be included in the error message. AstGen could never emit this
/// reason, because it knows nothing of types.
- /// The format string looks like "foo '{}' bar", where "{}" is the comptime-only type.
+ /// The format string looks like "foo '{f}' bar", where "{f}" is the comptime-only type.
/// We will then explain why this type is comptime-only.
comptime_only: struct {
ty: Type,
@@ -930,17 +930,17 @@ const ComptimeReason = union(enum) {
.struct_init => .{ "initializer of comptime-only struct", "must be comptime-known" },
.tuple_init => .{ "initializer of comptime-only tuple", "must be comptime-known" },
};
- try sema.errNote(src, err_msg, "{s} '{}' {s}", .{ pre, co.ty.fmt(sema.pt), post });
+ try sema.errNote(src, err_msg, "{s} '{f}' {s}", .{ pre, co.ty.fmt(sema.pt), post });
try sema.explainWhyTypeIsComptime(err_msg, src, co.ty);
},
.comptime_only_param_ty => |co| {
- try sema.errNote(src, err_msg, "argument to parameter with comptime-only type '{}' must be comptime-known", .{co.ty.fmt(sema.pt)});
+ try sema.errNote(src, err_msg, "argument to parameter with comptime-only type '{f}' must be comptime-known", .{co.ty.fmt(sema.pt)});
try sema.errNote(co.param_ty_src, err_msg, "parameter type declared here", .{});
try sema.explainWhyTypeIsComptime(err_msg, src, co.ty);
},
.comptime_only_ret_ty => |co| {
const function_with: []const u8 = if (co.is_generic_inst) "generic function instantiated with" else "function with";
- try sema.errNote(src, err_msg, "call to {s} comptime-only return type '{}' is evaluated at comptime", .{ function_with, co.ty.fmt(sema.pt) });
+ try sema.errNote(src, err_msg, "call to {s} comptime-only return type '{f}' is evaluated at comptime", .{ function_with, co.ty.fmt(sema.pt) });
try sema.errNote(co.ret_ty_src, err_msg, "return type declared here", .{});
try sema.explainWhyTypeIsComptime(err_msg, src, co.ty);
},
@@ -1905,7 +1905,7 @@ fn analyzeBodyInner(
const err_union = try sema.resolveInst(extra.data.operand);
const err_union_ty = sema.typeOf(err_union);
if (err_union_ty.zigTypeTag(zcu) != .error_union) {
- return sema.fail(block, operand_src, "expected error union type, found '{}'", .{
+ return sema.fail(block, operand_src, "expected error union type, found '{f}'", .{
err_union_ty.fmt(pt),
});
}
@@ -2339,7 +2339,7 @@ pub fn failWithDivideByZero(sema: *Sema, block: *Block, src: LazySrcLoc) Compile
fn failWithModRemNegative(sema: *Sema, block: *Block, src: LazySrcLoc, lhs_ty: Type, rhs_ty: Type) CompileError {
const pt = sema.pt;
- return sema.fail(block, src, "remainder division with '{}' and '{}': signed integers and floats must use @rem or @mod", .{
+ return sema.fail(block, src, "remainder division with '{f}' and '{f}': signed integers and floats must use @rem or @mod", .{
lhs_ty.fmt(pt), rhs_ty.fmt(pt),
});
}
@@ -2347,7 +2347,7 @@ fn failWithModRemNegative(sema: *Sema, block: *Block, src: LazySrcLoc, lhs_ty: T
fn failWithExpectedOptionalType(sema: *Sema, block: *Block, src: LazySrcLoc, non_optional_ty: Type) CompileError {
const pt = sema.pt;
const msg = msg: {
- const msg = try sema.errMsg(src, "expected optional type, found '{}'", .{
+ const msg = try sema.errMsg(src, "expected optional type, found '{f}'", .{
non_optional_ty.fmt(pt),
});
errdefer msg.destroy(sema.gpa);
@@ -2363,12 +2363,12 @@ fn failWithExpectedOptionalType(sema: *Sema, block: *Block, src: LazySrcLoc, non
fn failWithArrayInitNotSupported(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) CompileError {
const pt = sema.pt;
const msg = msg: {
- const msg = try sema.errMsg(src, "type '{}' does not support array initialization syntax", .{
+ const msg = try sema.errMsg(src, "type '{f}' does not support array initialization syntax", .{
ty.fmt(pt),
});
errdefer msg.destroy(sema.gpa);
if (ty.isSlice(pt.zcu)) {
- try sema.errNote(src, msg, "inferred array length is specified with an underscore: '[_]{}'", .{ty.elemType2(pt.zcu).fmt(pt)});
+ try sema.errNote(src, msg, "inferred array length is specified with an underscore: '[_]{f}'", .{ty.elemType2(pt.zcu).fmt(pt)});
}
break :msg msg;
};
@@ -2377,7 +2377,7 @@ fn failWithArrayInitNotSupported(sema: *Sema, block: *Block, src: LazySrcLoc, ty
fn failWithStructInitNotSupported(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) CompileError {
const pt = sema.pt;
- return sema.fail(block, src, "type '{}' does not support struct initialization syntax", .{
+ return sema.fail(block, src, "type '{f}' does not support struct initialization syntax", .{
ty.fmt(pt),
});
}
@@ -2390,7 +2390,7 @@ fn failWithErrorSetCodeMissing(
src_err_set_ty: Type,
) CompileError {
const pt = sema.pt;
- return sema.fail(block, src, "expected type '{}', found type '{}'", .{
+ return sema.fail(block, src, "expected type '{f}', found type '{f}'", .{
dest_err_set_ty.fmt(pt), src_err_set_ty.fmt(pt),
});
}
@@ -2398,7 +2398,7 @@ fn failWithErrorSetCodeMissing(
pub fn failWithIntegerOverflow(sema: *Sema, block: *Block, src: LazySrcLoc, int_ty: Type, val: Value, vector_index: ?usize) CompileError {
const pt = sema.pt;
return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(src, "overflow of integer type '{}' with value '{}'", .{
+ const msg = try sema.errMsg(src, "overflow of integer type '{f}' with value '{f}'", .{
int_ty.fmt(pt), val.fmtValueSema(pt, sema),
});
errdefer msg.destroy(sema.gpa);
@@ -2448,7 +2448,7 @@ fn failWithInvalidFieldAccess(
const child_ty = inner_ty.optionalChild(zcu);
if (!typeSupportsFieldAccess(zcu, child_ty, field_name)) break :opt;
const msg = msg: {
- const msg = try sema.errMsg(src, "optional type '{}' does not support field access", .{object_ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "optional type '{f}' does not support field access", .{object_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.errNote(src, msg, "consider using '.?', 'orelse', or 'if'", .{});
break :msg msg;
@@ -2458,14 +2458,14 @@ fn failWithInvalidFieldAccess(
const child_ty = inner_ty.errorUnionPayload(zcu);
if (!typeSupportsFieldAccess(zcu, child_ty, field_name)) break :err;
const msg = msg: {
- const msg = try sema.errMsg(src, "error union type '{}' does not support field access", .{object_ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "error union type '{f}' does not support field access", .{object_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.errNote(src, msg, "consider using 'try', 'catch', or 'if'", .{});
break :msg msg;
};
return sema.failWithOwnedErrorMsg(block, msg);
}
- return sema.fail(block, src, "type '{}' does not support field access", .{object_ty.fmt(pt)});
+ return sema.fail(block, src, "type '{f}' does not support field access", .{object_ty.fmt(pt)});
}
fn typeSupportsFieldAccess(zcu: *const Zcu, ty: Type, field_name: InternPool.NullTerminatedString) bool {
@@ -2494,7 +2494,7 @@ fn failWithComptimeErrorRetTrace(
const pt = sema.pt;
const zcu = pt.zcu;
const msg = msg: {
- const msg = try sema.errMsg(src, "caught unexpected error '{}'", .{name.fmt(&zcu.intern_pool)});
+ const msg = try sema.errMsg(src, "caught unexpected error '{f}'", .{name.fmt(&zcu.intern_pool)});
errdefer msg.destroy(sema.gpa);
for (sema.comptime_err_ret_trace.items) |src_loc| {
@@ -3005,7 +3005,7 @@ pub fn createTypeName(
inst: ?Zir.Inst.Index,
/// This is used purely to give the type a unique name in the `anon` case.
type_index: InternPool.Index,
-) !struct {
+) CompileError!struct {
name: InternPool.NullTerminatedString,
nav: InternPool.Nav.Index.Optional,
} {
@@ -3024,11 +3024,10 @@ pub fn createTypeName(
const fn_info = sema.code.getFnInfo(ip.funcZirBodyInst(sema.func_index).resolve(ip) orelse return error.AnalysisFail);
const zir_tags = sema.code.instructions.items(.tag);
- var buf: std.ArrayListUnmanaged(u8) = .empty;
- defer buf.deinit(gpa);
-
- const writer = buf.writer(gpa);
- try writer.print("{}(", .{block.type_name_ctx.fmt(ip)});
+ var aw: std.io.Writer.Allocating = .init(gpa);
+ defer aw.deinit();
+ const bw = &aw.interface;
+ bw.print("{f}(", .{block.type_name_ctx.fmt(ip)}) catch return error.OutOfMemory;
var arg_i: usize = 0;
for (fn_info.param_body) |zir_inst| switch (zir_tags[@intFromEnum(zir_inst)]) {
@@ -3041,18 +3040,18 @@ pub fn createTypeName(
// result in a compile error.
const arg_val = try sema.resolveValue(arg) orelse break :func_strat; // fall through to anon strat
- if (arg_i != 0) try writer.writeByte(',');
+ if (arg_i != 0) bw.writeByte(',') catch return error.OutOfMemory;
// Limiting the depth here helps avoid type names getting too long, which
// in turn helps to avoid unreasonably long symbol names for namespaced
// symbols. Such names should ideally be human-readable, and additionally,
// some tooling may not support very long symbol names.
- try writer.print("{}", .{Value.fmtValueSemaFull(.{
+ bw.print("{f}", .{Value.fmtValueSemaFull(.{
.val = arg_val,
.pt = pt,
.opt_sema = sema,
.depth = 1,
- })});
+ })}) catch return error.OutOfMemory;
arg_i += 1;
continue;
@@ -3060,9 +3059,9 @@ pub fn createTypeName(
else => continue,
};
- try writer.writeByte(')');
+ try bw.writeByte(')');
return .{
- .name = try ip.getOrPutString(gpa, pt.tid, buf.items, .no_embedded_nulls),
+ .name = try ip.getOrPutString(gpa, pt.tid, aw.getWritten(), .no_embedded_nulls),
.nav = .none,
};
},
@@ -3074,7 +3073,7 @@ pub fn createTypeName(
for (@intFromEnum(inst.?)..zir_tags.len) |i| switch (zir_tags[i]) {
.dbg_var_ptr, .dbg_var_val => if (zir_data[i].str_op.operand == ref) {
return .{
- .name = try ip.getOrPutStringFmt(gpa, pt.tid, "{}.{s}", .{
+ .name = try ip.getOrPutStringFmt(gpa, pt.tid, "{f}.{s}", .{
block.type_name_ctx.fmt(ip), zir_data[i].str_op.getStr(sema.code),
}, .no_embedded_nulls),
.nav = .none,
@@ -3097,7 +3096,7 @@ pub fn createTypeName(
// that builtin from the language, we can consider this.
return .{
- .name = try ip.getOrPutStringFmt(gpa, pt.tid, "{}__{s}_{d}", .{
+ .name = try ip.getOrPutStringFmt(gpa, pt.tid, "{f}__{s}_{d}", .{
block.type_name_ctx.fmt(ip), anon_prefix, @intFromEnum(type_index),
}, .no_embedded_nulls),
.nav = .none,
@@ -3581,7 +3580,7 @@ fn ensureResultUsed(
},
else => {
const msg = msg: {
- const msg = try sema.errMsg(src, "value of type '{}' ignored", .{ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "value of type '{f}' ignored", .{ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.errNote(src, msg, "all non-void values must be used", .{});
try sema.errNote(src, msg, "to discard the value, assign it to '_'", .{});
@@ -3851,7 +3850,7 @@ fn zirMakePtrConst(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErro
// The value was initialized through RLS, so we didn't detect the runtime condition earlier.
// TODO: source location of runtime control flow
const init_src = block.src(.{ .node_offset_var_decl_init = inst_data.src_node });
- return sema.fail(block, init_src, "value with comptime-only type '{}' depends on runtime control flow", .{elem_ty.fmt(pt)});
+ return sema.fail(block, init_src, "value with comptime-only type '{f}' depends on runtime control flow", .{elem_ty.fmt(pt)});
}
// This is a runtime value.
@@ -4348,7 +4347,7 @@ fn zirResolveInferredAlloc(sema: *Sema, block: *Block, inst: Zir.Inst.Index) Com
// The alloc wasn't comptime-known per the above logic, so the
// type cannot be comptime-only.
// TODO: source location of runtime control flow
- return sema.fail(block, src, "value with comptime-only type '{}' depends on runtime control flow", .{final_elem_ty.fmt(pt)});
+ return sema.fail(block, src, "value with comptime-only type '{f}' depends on runtime control flow", .{final_elem_ty.fmt(pt)});
}
if (sema.func_is_naked and try final_elem_ty.hasRuntimeBitsSema(pt)) {
const mut_src = block.src(.{ .node_offset_store_ptr = inst_data.src_node });
@@ -4445,7 +4444,7 @@ fn zirForLen(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
if (!object_ty.isIndexable(zcu)) {
// Instead of using checkIndexable we customize this error.
const msg = msg: {
- const msg = try sema.errMsg(arg_src, "type '{}' is not indexable and not a range", .{object_ty.fmt(pt)});
+ const msg = try sema.errMsg(arg_src, "type '{f}' is not indexable and not a range", .{object_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.errNote(arg_src, msg, "for loop operand must be a range, array, slice, tuple, or vector", .{});
@@ -4480,10 +4479,10 @@ fn zirForLen(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
.for_node_offset = inst_data.src_node,
.input_index = len_idx,
} });
- try sema.errNote(a_src, msg, "length {} here", .{
+ try sema.errNote(a_src, msg, "length {f} here", .{
v.fmtValueSema(pt, sema),
});
- try sema.errNote(arg_src, msg, "length {} here", .{
+ try sema.errNote(arg_src, msg, "length {f} here", .{
arg_val.fmtValueSema(pt, sema),
});
break :msg msg;
@@ -4515,7 +4514,7 @@ fn zirForLen(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
.for_node_offset = inst_data.src_node,
.input_index = i,
} });
- try sema.errNote(arg_src, msg, "type '{}' has no upper bound", .{
+ try sema.errNote(arg_src, msg, "type '{f}' has no upper bound", .{
object_ty.fmt(pt),
});
}
@@ -4591,7 +4590,7 @@ fn zirCoercePtrElemTy(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileE
switch (val_ty.zigTypeTag(zcu)) {
.array, .vector => {},
else => if (!val_ty.isTuple(zcu)) {
- return sema.fail(block, src, "expected array of '{}', found '{}'", .{ elem_ty.fmt(pt), val_ty.fmt(pt) });
+ return sema.fail(block, src, "expected array of '{f}', found '{f}'", .{ elem_ty.fmt(pt), val_ty.fmt(pt) });
},
}
const want_ty = try pt.arrayType(.{
@@ -4665,7 +4664,7 @@ fn zirValidateRefTy(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErr
const ty_operand = try sema.resolveTypeOrPoison(block, src, un_tok.operand) orelse return;
if (ty_operand.optEuBaseType(zcu).zigTypeTag(zcu) != .pointer) {
return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(src, "expected type '{}', found pointer", .{ty_operand.fmt(pt)});
+ const msg = try sema.errMsg(src, "expected type '{f}', found pointer", .{ty_operand.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.errNote(src, msg, "address-of operator always returns a pointer", .{});
break :msg msg;
@@ -5074,7 +5073,7 @@ fn validateStructInit(
}
continue;
};
- const template = "missing struct field: {}";
+ const template = "missing struct field: {f}";
const args = .{field_name.fmt(ip)};
if (root_msg) |msg| {
try sema.errNote(init_src, msg, template, args);
@@ -5204,7 +5203,7 @@ fn validateStructInit(
}
continue;
};
- const template = "missing struct field: {}";
+ const template = "missing struct field: {f}";
const args = .{field_name.fmt(ip)};
if (root_msg) |msg| {
try sema.errNote(init_src, msg, template, args);
@@ -5508,11 +5507,11 @@ fn zirValidateDeref(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErr
const operand_ty = sema.typeOf(operand);
if (operand_ty.zigTypeTag(zcu) != .pointer) {
- return sema.fail(block, src, "cannot dereference non-pointer type '{}'", .{operand_ty.fmt(pt)});
+ return sema.fail(block, src, "cannot dereference non-pointer type '{f}'", .{operand_ty.fmt(pt)});
} else switch (operand_ty.ptrSize(zcu)) {
.one, .c => {},
- .many => return sema.fail(block, src, "index syntax required for unknown-length pointer type '{}'", .{operand_ty.fmt(pt)}),
- .slice => return sema.fail(block, src, "index syntax required for slice type '{}'", .{operand_ty.fmt(pt)}),
+ .many => return sema.fail(block, src, "index syntax required for unknown-length pointer type '{f}'", .{operand_ty.fmt(pt)}),
+ .slice => return sema.fail(block, src, "index syntax required for slice type '{f}'", .{operand_ty.fmt(pt)}),
}
if ((try sema.typeHasOnePossibleValue(operand_ty.childType(zcu))) != null) {
@@ -5529,7 +5528,7 @@ fn zirValidateDeref(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErr
const msg = msg: {
const msg = try sema.errMsg(
src,
- "values of type '{}' must be comptime-known, but operand value is runtime-known",
+ "values of type '{f}' must be comptime-known, but operand value is runtime-known",
.{elem_ty.fmt(pt)},
);
errdefer msg.destroy(sema.gpa);
@@ -5561,7 +5560,7 @@ fn zirValidateDestructure(sema: *Sema, block: *Block, inst: Zir.Inst.Index) Comp
if (!typeIsDestructurable(operand_ty, zcu)) {
return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(src, "type '{}' cannot be destructured", .{operand_ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "type '{f}' cannot be destructured", .{operand_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.errNote(destructure_src, msg, "result destructured here", .{});
if (operand_ty.zigTypeTag(pt.zcu) == .error_union) {
@@ -5604,12 +5603,12 @@ fn failWithBadMemberAccess(
else => unreachable,
};
if (agg_ty.typeDeclInst(zcu)) |inst| if ((inst.resolve(ip) orelse return error.AnalysisFail) == .main_struct_inst) {
- return sema.fail(block, field_src, "root source file struct '{}' has no member named '{}'", .{
+ return sema.fail(block, field_src, "root source file struct '{f}' has no member named '{f}'", .{
agg_ty.fmt(pt), field_name.fmt(ip),
});
};
- return sema.fail(block, field_src, "{s} '{}' has no member named '{}'", .{
+ return sema.fail(block, field_src, "{s} '{f}' has no member named '{f}'", .{
kw_name, agg_ty.fmt(pt), field_name.fmt(ip),
});
}
@@ -5629,7 +5628,7 @@ fn failWithBadStructFieldAccess(
const msg = msg: {
const msg = try sema.errMsg(
field_src,
- "no field named '{}' in struct '{}'",
+ "no field named '{f}' in struct '{f}'",
.{ field_name.fmt(ip), struct_type.name.fmt(ip) },
);
errdefer msg.destroy(sema.gpa);
@@ -5655,7 +5654,7 @@ fn failWithBadUnionFieldAccess(
const msg = msg: {
const msg = try sema.errMsg(
field_src,
- "no field named '{}' in union '{}'",
+ "no field named '{f}' in union '{f}'",
.{ field_name.fmt(ip), union_obj.name.fmt(ip) },
);
errdefer msg.destroy(gpa);
@@ -5907,30 +5906,30 @@ fn zirCompileLog(
const zcu = pt.zcu;
const gpa = zcu.gpa;
- var buf: std.ArrayListUnmanaged(u8) = .empty;
- defer buf.deinit(gpa);
-
- const writer = buf.writer(gpa);
+ var aw: std.io.Writer.Allocating = .init(gpa);
+ defer aw.deinit();
+ const bw = &aw.interface;
const extra = sema.code.extraData(Zir.Inst.NodeMultiOp, extended.operand);
const src_node = extra.data.src_node;
const args = sema.code.refSlice(extra.end, extended.small);
for (args, 0..) |arg_ref, i| {
- if (i != 0) try writer.print(", ", .{});
+ if (i != 0) bw.writeAll(", ") catch return error.OutOfMemory;
const arg = try sema.resolveInst(arg_ref);
const arg_ty = sema.typeOf(arg);
if (try sema.resolveValueResolveLazy(arg)) |val| {
- try writer.print("@as({}, {})", .{
+ bw.print("@as({f}, {f})", .{
arg_ty.fmt(pt), val.fmtValueSema(pt, sema),
- });
+ }) catch return error.OutOfMemory;
} else {
- try writer.print("@as({}, [runtime value])", .{arg_ty.fmt(pt)});
+ bw.print("@as({f}, [runtime value])", .{arg_ty.fmt(pt)}) catch return error.OutOfMemory;
}
}
+ bw.writeByte('\n') catch return error.OutOfMemory;
- const line_data = try zcu.intern_pool.getOrPutString(gpa, pt.tid, buf.items, .no_embedded_nulls);
+ const line_data = try zcu.intern_pool.getOrPutString(gpa, pt.tid, aw.getWritten(), .no_embedded_nulls);
const line_idx: Zcu.CompileLogLine.Index = if (zcu.free_compile_log_lines.pop()) |idx| idx: {
zcu.compile_log_lines.items[@intFromEnum(idx)] = .{
@@ -6472,7 +6471,7 @@ fn resolveAnalyzedBlock(
const type_src = src; // TODO: better source location
if (try resolved_ty.comptimeOnlySema(pt)) {
const msg = msg: {
- const msg = try sema.errMsg(type_src, "value with comptime-only type '{}' depends on runtime control flow", .{resolved_ty.fmt(pt)});
+ const msg = try sema.errMsg(type_src, "value with comptime-only type '{f}' depends on runtime control flow", .{resolved_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
const runtime_src = child_block.runtime_cond orelse child_block.runtime_loop.?;
@@ -6588,7 +6587,7 @@ fn zirExport(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void
{
if (ptr_ty.zigTypeTag(zcu) != .pointer) {
- return sema.fail(block, ptr_src, "expected pointer type, found '{}'", .{ptr_ty.fmt(pt)});
+ return sema.fail(block, ptr_src, "expected pointer type, found '{f}'", .{ptr_ty.fmt(pt)});
}
const ptr_ty_info = ptr_ty.ptrInfo(zcu);
if (ptr_ty_info.flags.size == .slice) {
@@ -6611,7 +6610,7 @@ fn zirExport(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void
const export_ty = Value.fromInterned(uav.val).typeOf(zcu);
if (!try sema.validateExternType(export_ty, .other)) {
return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(src, "unable to export type '{}'", .{export_ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "unable to export type '{f}'", .{export_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.explainWhyTypeIsNotExtern(msg, src, export_ty, .other);
try sema.addDeclaredHereNote(msg, export_ty);
@@ -6663,7 +6662,7 @@ pub fn analyzeExport(
if (!try sema.validateExternType(export_ty, .other)) {
return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(src, "unable to export type '{}'", .{export_ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "unable to export type '{f}'", .{export_ty.fmt(pt)});
errdefer msg.destroy(gpa);
try sema.explainWhyTypeIsNotExtern(msg, src, export_ty, .other);
@@ -7287,7 +7286,7 @@ fn checkCallArgumentCount(
opt_child.childType(zcu).zigTypeTag(zcu) == .@"fn"))
{
const msg = msg: {
- const msg = try sema.errMsg(func_src, "cannot call optional type '{}'", .{
+ const msg = try sema.errMsg(func_src, "cannot call optional type '{f}'", .{
callee_ty.fmt(pt),
});
errdefer msg.destroy(sema.gpa);
@@ -7299,7 +7298,7 @@ fn checkCallArgumentCount(
},
else => {},
}
- return sema.fail(block, func_src, "type '{}' not a function", .{callee_ty.fmt(pt)});
+ return sema.fail(block, func_src, "type '{f}' not a function", .{callee_ty.fmt(pt)});
};
const func_ty_info = zcu.typeToFunc(func_ty).?;
@@ -7362,7 +7361,7 @@ fn callBuiltin(
},
else => {},
}
- std.debug.panic("type '{}' is not a function calling builtin fn", .{callee_ty.fmt(pt)});
+ std.debug.panic("type '{f}' is not a function calling builtin fn", .{callee_ty.fmt(pt)});
};
const func_ty_info = zcu.typeToFunc(func_ty).?;
@@ -7746,7 +7745,7 @@ fn analyzeCall(
if (!param_ty.isValidParamType(zcu)) {
const opaque_str = if (param_ty.zigTypeTag(zcu) == .@"opaque") "opaque " else "";
- return sema.fail(block, param_src, "parameter of {s}type '{}' not allowed", .{
+ return sema.fail(block, param_src, "parameter of {s}type '{f}' not allowed", .{
opaque_str, param_ty.fmt(pt),
});
}
@@ -7843,7 +7842,7 @@ fn analyzeCall(
if (!full_ty.isValidReturnType(zcu)) {
const opaque_str = if (full_ty.zigTypeTag(zcu) == .@"opaque") "opaque " else "";
- return sema.fail(block, func_ret_ty_src, "{s}return type '{}' not allowed", .{
+ return sema.fail(block, func_ret_ty_src, "{s}return type '{f}' not allowed", .{
opaque_str, full_ty.fmt(pt),
});
}
@@ -8301,7 +8300,7 @@ fn handleTailCall(sema: *Sema, block: *Block, call_src: LazySrcLoc, func_ty: Typ
}
const owner_func_ty: Type = .fromInterned(zcu.funcInfo(sema.owner.unwrap().func).ty);
if (owner_func_ty.toIntern() != func_ty.toIntern()) {
- return sema.fail(block, call_src, "unable to perform tail call: type of function being called '{}' does not match type of calling function '{}'", .{
+ return sema.fail(block, call_src, "unable to perform tail call: type of function being called '{f}' does not match type of calling function '{f}'", .{
func_ty.fmt(pt), owner_func_ty.fmt(pt),
});
}
@@ -8325,9 +8324,9 @@ fn zirOptionalType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileErro
const operand_src = block.src(.{ .node_offset_un_op = inst_data.src_node });
const child_type = try sema.resolveType(block, operand_src, inst_data.operand);
if (child_type.zigTypeTag(zcu) == .@"opaque") {
- return sema.fail(block, operand_src, "opaque type '{}' cannot be optional", .{child_type.fmt(pt)});
+ return sema.fail(block, operand_src, "opaque type '{f}' cannot be optional", .{child_type.fmt(pt)});
} else if (child_type.zigTypeTag(zcu) == .null) {
- return sema.fail(block, operand_src, "type '{}' cannot be optional", .{child_type.fmt(pt)});
+ return sema.fail(block, operand_src, "type '{f}' cannot be optional", .{child_type.fmt(pt)});
}
const opt_type = try pt.optionalType(child_type.toIntern());
@@ -8388,7 +8387,7 @@ fn zirVecArrElemType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileEr
const vec_ty = try sema.resolveTypeOrPoison(block, LazySrcLoc.unneeded, un_node.operand) orelse return .generic_poison_type;
switch (vec_ty.zigTypeTag(zcu)) {
.array, .vector => {},
- else => return sema.fail(block, block.nodeOffset(un_node.src_node), "expected array or vector type, found '{}'", .{vec_ty.fmt(pt)}),
+ else => return sema.fail(block, block.nodeOffset(un_node.src_node), "expected array or vector type, found '{f}'", .{vec_ty.fmt(pt)}),
}
return Air.internedToRef(vec_ty.childType(zcu).toIntern());
}
@@ -8456,7 +8455,7 @@ fn validateArrayElemType(sema: *Sema, block: *Block, elem_type: Type, elem_src:
const pt = sema.pt;
const zcu = pt.zcu;
if (elem_type.zigTypeTag(zcu) == .@"opaque") {
- return sema.fail(block, elem_src, "array of opaque type '{}' not allowed", .{elem_type.fmt(pt)});
+ return sema.fail(block, elem_src, "array of opaque type '{f}' not allowed", .{elem_type.fmt(pt)});
} else if (elem_type.zigTypeTag(zcu) == .noreturn) {
return sema.fail(block, elem_src, "array of 'noreturn' not allowed", .{});
}
@@ -8492,7 +8491,7 @@ fn zirErrorUnionType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileEr
const payload = try sema.resolveType(block, rhs_src, extra.rhs);
if (error_set.zigTypeTag(zcu) != .error_set) {
- return sema.fail(block, lhs_src, "expected error set type, found '{}'", .{
+ return sema.fail(block, lhs_src, "expected error set type, found '{f}'", .{
error_set.fmt(pt),
});
}
@@ -8505,11 +8504,11 @@ fn validateErrorUnionPayloadType(sema: *Sema, block: *Block, payload_ty: Type, p
const pt = sema.pt;
const zcu = pt.zcu;
if (payload_ty.zigTypeTag(zcu) == .@"opaque") {
- return sema.fail(block, payload_src, "error union with payload of opaque type '{}' not allowed", .{
+ return sema.fail(block, payload_src, "error union with payload of opaque type '{f}' not allowed", .{
payload_ty.fmt(pt),
});
} else if (payload_ty.zigTypeTag(zcu) == .error_set) {
- return sema.fail(block, payload_src, "error union with payload of error set type '{}' not allowed", .{
+ return sema.fail(block, payload_src, "error union with payload of error set type '{f}' not allowed", .{
payload_ty.fmt(pt),
});
}
@@ -8647,9 +8646,9 @@ fn zirMergeErrorSets(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileEr
const lhs_ty = try sema.analyzeAsType(block, lhs_src, lhs);
const rhs_ty = try sema.analyzeAsType(block, rhs_src, rhs);
if (lhs_ty.zigTypeTag(zcu) != .error_set)
- return sema.fail(block, lhs_src, "expected error set type, found '{}'", .{lhs_ty.fmt(pt)});
+ return sema.fail(block, lhs_src, "expected error set type, found '{f}'", .{lhs_ty.fmt(pt)});
if (rhs_ty.zigTypeTag(zcu) != .error_set)
- return sema.fail(block, rhs_src, "expected error set type, found '{}'", .{rhs_ty.fmt(pt)});
+ return sema.fail(block, rhs_src, "expected error set type, found '{f}'", .{rhs_ty.fmt(pt)});
// Anything merged with anyerror is anyerror.
if (lhs_ty.toIntern() == .anyerror_type or rhs_ty.toIntern() == .anyerror_type) {
@@ -8759,7 +8758,7 @@ fn zirIntFromEnum(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
return sema.fail(
block,
operand_src,
- "untagged union '{}' cannot be converted to integer",
+ "untagged union '{f}' cannot be converted to integer",
.{operand_ty.fmt(pt)},
);
};
@@ -8767,7 +8766,7 @@ fn zirIntFromEnum(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
break :blk try sema.unionToTag(block, tag_ty, operand, operand_src);
},
else => {
- return sema.fail(block, operand_src, "expected enum or tagged union, found '{}'", .{
+ return sema.fail(block, operand_src, "expected enum or tagged union, found '{f}'", .{
operand_ty.fmt(pt),
});
},
@@ -8778,7 +8777,7 @@ fn zirIntFromEnum(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
// TODO: use correct solution
// https://github.com/ziglang/zig/issues/15909
if (enum_tag_ty.enumFieldCount(zcu) == 0 and !enum_tag_ty.isNonexhaustiveEnum(zcu)) {
- return sema.fail(block, operand_src, "cannot use @intFromEnum on empty enum '{}'", .{
+ return sema.fail(block, operand_src, "cannot use @intFromEnum on empty enum '{f}'", .{
enum_tag_ty.fmt(pt),
});
}
@@ -8812,7 +8811,7 @@ fn zirEnumFromInt(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
const operand_ty = sema.typeOf(operand);
if (dest_ty.zigTypeTag(zcu) != .@"enum") {
- return sema.fail(block, src, "expected enum, found '{}'", .{dest_ty.fmt(pt)});
+ return sema.fail(block, src, "expected enum, found '{f}'", .{dest_ty.fmt(pt)});
}
_ = try sema.checkIntType(block, operand_src, operand_ty);
@@ -8822,7 +8821,7 @@ fn zirEnumFromInt(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
if (try sema.intFitsInType(int_val, int_tag_ty, null)) {
return Air.internedToRef((try pt.getCoerced(int_val, dest_ty)).toIntern());
}
- return sema.fail(block, src, "int value '{}' out of range of non-exhaustive enum '{}'", .{
+ return sema.fail(block, src, "int value '{f}' out of range of non-exhaustive enum '{f}'", .{
int_val.fmtValueSema(pt, sema), dest_ty.fmt(pt),
});
}
@@ -8830,7 +8829,7 @@ fn zirEnumFromInt(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
return sema.failWithUseOfUndef(block, operand_src);
}
if (!(try sema.enumHasInt(dest_ty, int_val))) {
- return sema.fail(block, src, "enum '{}' has no tag with value '{}'", .{
+ return sema.fail(block, src, "enum '{f}' has no tag with value '{f}'", .{
dest_ty.fmt(pt), int_val.fmtValueSema(pt, sema),
});
}
@@ -9024,7 +9023,7 @@ fn zirErrUnionPayload(
const operand_src = src;
const err_union_ty = sema.typeOf(operand);
if (err_union_ty.zigTypeTag(zcu) != .error_union) {
- return sema.fail(block, operand_src, "expected error union type, found '{}'", .{
+ return sema.fail(block, operand_src, "expected error union type, found '{f}'", .{
err_union_ty.fmt(pt),
});
}
@@ -9092,7 +9091,7 @@ fn analyzeErrUnionPayloadPtr(
assert(operand_ty.zigTypeTag(zcu) == .pointer);
if (operand_ty.childType(zcu).zigTypeTag(zcu) != .error_union) {
- return sema.fail(block, src, "expected error union type, found '{}'", .{
+ return sema.fail(block, src, "expected error union type, found '{f}'", .{
operand_ty.childType(zcu).fmt(pt),
});
}
@@ -9169,7 +9168,7 @@ fn analyzeErrUnionCode(sema: *Sema, block: *Block, src: LazySrcLoc, operand: Air
const zcu = pt.zcu;
const operand_ty = sema.typeOf(operand);
if (operand_ty.zigTypeTag(zcu) != .error_union) {
- return sema.fail(block, src, "expected error union type, found '{}'", .{
+ return sema.fail(block, src, "expected error union type, found '{f}'", .{
operand_ty.fmt(pt),
});
}
@@ -9205,7 +9204,7 @@ fn analyzeErrUnionCodePtr(sema: *Sema, block: *Block, src: LazySrcLoc, operand:
assert(operand_ty.zigTypeTag(zcu) == .pointer);
if (operand_ty.childType(zcu).zigTypeTag(zcu) != .error_union) {
- return sema.fail(block, src, "expected error union type, found '{}'", .{
+ return sema.fail(block, src, "expected error union type, found '{f}'", .{
operand_ty.childType(zcu).fmt(pt),
});
}
@@ -9450,19 +9449,18 @@ fn callConvSupportsVarArgs(cc: std.builtin.CallingConvention.Tag) bool {
fn checkCallConvSupportsVarArgs(sema: *Sema, block: *Block, src: LazySrcLoc, cc: std.builtin.CallingConvention.Tag) CompileError!void {
const CallingConventionsSupportingVarArgsList = struct {
arch: std.Target.Cpu.Arch,
- pub fn format(ctx: @This(), comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void {
- _ = fmt;
- _ = options;
+ pub fn format(ctx: @This(), w: *std.io.Writer, comptime fmt: []const u8) std.io.Writer.Error!void {
+ comptime assert(fmt.len == 0);
var first = true;
for (calling_conventions_supporting_var_args) |cc_inner| {
for (std.Target.Cpu.Arch.fromCallingConvention(cc_inner)) |supported_arch| {
if (supported_arch == ctx.arch) break;
} else continue; // callconv not supported by this arch
if (!first) {
- try writer.writeAll(", ");
+ try w.writeAll(", ");
}
first = false;
- try writer.print("'{s}'", .{@tagName(cc_inner)});
+ try w.print("'{s}'", .{@tagName(cc_inner)});
}
}
};
@@ -9472,7 +9470,7 @@ fn checkCallConvSupportsVarArgs(sema: *Sema, block: *Block, src: LazySrcLoc, cc:
const msg = try sema.errMsg(src, "variadic function does not support '{s}' calling convention", .{@tagName(cc)});
errdefer msg.destroy(sema.gpa);
const target = sema.pt.zcu.getTarget();
- try sema.errNote(src, msg, "supported calling conventions: {}", .{CallingConventionsSupportingVarArgsList{ .arch = target.cpu.arch }});
+ try sema.errNote(src, msg, "supported calling conventions: {f}", .{CallingConventionsSupportingVarArgsList{ .arch = target.cpu.arch }});
break :msg msg;
});
}
@@ -9520,7 +9518,7 @@ fn checkMergeAllowed(sema: *Sema, block: *Block, src: LazySrcLoc, peer_ty: Type)
}
return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(src, "value with non-mergable pointer type '{}' depends on runtime control flow", .{peer_ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "value with non-mergable pointer type '{f}' depends on runtime control flow", .{peer_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
const runtime_src = block.runtime_cond orelse block.runtime_loop.?;
@@ -9598,13 +9596,13 @@ fn funcCommon(
}
if (!param_ty.isValidParamType(zcu)) {
const opaque_str = if (param_ty.zigTypeTag(zcu) == .@"opaque") "opaque " else "";
- return sema.fail(block, param_src, "parameter of {s}type '{}' not allowed", .{
+ return sema.fail(block, param_src, "parameter of {s}type '{f}' not allowed", .{
opaque_str, param_ty.fmt(pt),
});
}
if (!param_ty_generic and !target_util.fnCallConvAllowsZigTypes(cc) and !try sema.validateExternType(param_ty, .param_ty)) {
const msg = msg: {
- const msg = try sema.errMsg(param_src, "parameter of type '{}' not allowed in function with calling convention '{s}'", .{
+ const msg = try sema.errMsg(param_src, "parameter of type '{f}' not allowed in function with calling convention '{s}'", .{
param_ty.fmt(pt), @tagName(cc),
});
errdefer msg.destroy(sema.gpa);
@@ -9618,7 +9616,7 @@ fn funcCommon(
}
if (param_ty_comptime and !param_is_comptime and has_body and !block.isComptime()) {
const msg = msg: {
- const msg = try sema.errMsg(param_src, "parameter of type '{}' must be declared comptime", .{
+ const msg = try sema.errMsg(param_src, "parameter of type '{f}' must be declared comptime", .{
param_ty.fmt(pt),
});
errdefer msg.destroy(sema.gpa);
@@ -9798,7 +9796,7 @@ fn finishFunc(
if (!return_type.isValidReturnType(zcu)) {
const opaque_str = if (return_type.zigTypeTag(zcu) == .@"opaque") "opaque " else "";
- return sema.fail(block, ret_ty_src, "{s}return type '{}' not allowed", .{
+ return sema.fail(block, ret_ty_src, "{s}return type '{f}' not allowed", .{
opaque_str, return_type.fmt(pt),
});
}
@@ -9806,7 +9804,7 @@ fn finishFunc(
!try sema.validateExternType(return_type, .ret_ty))
{
const msg = msg: {
- const msg = try sema.errMsg(ret_ty_src, "return type '{}' not allowed in function with calling convention '{s}'", .{
+ const msg = try sema.errMsg(ret_ty_src, "return type '{f}' not allowed in function with calling convention '{s}'", .{
return_type.fmt(pt), @tagName(cc_resolved),
});
errdefer msg.destroy(gpa);
@@ -9828,7 +9826,7 @@ fn finishFunc(
const msg = try sema.errMsg(
ret_ty_src,
- "function with comptime-only return type '{}' requires all parameters to be comptime",
+ "function with comptime-only return type '{f}' requires all parameters to be comptime",
.{return_type.fmt(pt)},
);
errdefer msg.destroy(sema.gpa);
@@ -9897,17 +9895,16 @@ fn finishFunc(
.bad_arch => |allowed_archs| {
const ArchListFormatter = struct {
archs: []const std.Target.Cpu.Arch,
- pub fn format(formatter: @This(), comptime fmt: []const u8, options: std.fmt.FormatOptions, writer: anytype) !void {
- _ = fmt;
- _ = options;
+ pub fn format(formatter: @This(), w: *std.io.Writer, comptime fmt: []const u8) std.io.Writer.Error!void {
+ comptime assert(fmt.len == 0);
for (formatter.archs, 0..) |arch, i| {
if (i != 0)
- try writer.writeAll(", ");
- try writer.print("'{s}'", .{@tagName(arch)});
+ try w.writeAll(", ");
+ try w.print("'{s}'", .{@tagName(arch)});
}
}
};
- return sema.fail(block, cc_src, "calling convention '{s}' only available on architectures {}", .{
+ return sema.fail(block, cc_src, "calling convention '{s}' only available on architectures {f}", .{
@tagName(cc_resolved),
ArchListFormatter{ .archs = allowed_archs },
});
@@ -10008,7 +10005,7 @@ fn analyzeAs(
const operand = try sema.resolveInst(zir_operand);
const dest_ty = try sema.resolveTypeOrPoison(block, src, zir_dest_type) orelse return operand;
switch (dest_ty.zigTypeTag(zcu)) {
- .@"opaque" => return sema.fail(block, src, "cannot cast to opaque type '{}'", .{dest_ty.fmt(pt)}),
+ .@"opaque" => return sema.fail(block, src, "cannot cast to opaque type '{f}'", .{dest_ty.fmt(pt)}),
.noreturn => return sema.fail(block, src, "cannot cast to noreturn", .{}),
else => {},
}
@@ -10036,12 +10033,12 @@ fn zirIntFromPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!
const ptr_ty = operand_ty.scalarType(zcu);
const is_vector = operand_ty.zigTypeTag(zcu) == .vector;
if (!ptr_ty.isPtrAtRuntime(zcu)) {
- return sema.fail(block, ptr_src, "expected pointer, found '{}'", .{ptr_ty.fmt(pt)});
+ return sema.fail(block, ptr_src, "expected pointer, found '{f}'", .{ptr_ty.fmt(pt)});
}
const pointee_ty = ptr_ty.childType(zcu);
if (try ptr_ty.comptimeOnlySema(pt)) {
const msg = msg: {
- const msg = try sema.errMsg(ptr_src, "comptime-only type '{}' has no pointer address", .{pointee_ty.fmt(pt)});
+ const msg = try sema.errMsg(ptr_src, "comptime-only type '{f}' has no pointer address", .{pointee_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.explainWhyTypeIsComptime(msg, ptr_src, pointee_ty);
break :msg msg;
@@ -10289,14 +10286,14 @@ fn zirBitcast(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
.type,
.undefined,
.void,
- => return sema.fail(block, src, "cannot @bitCast to '{}'", .{dest_ty.fmt(pt)}),
+ => return sema.fail(block, src, "cannot @bitCast to '{f}'", .{dest_ty.fmt(pt)}),
.@"enum" => {
const msg = msg: {
- const msg = try sema.errMsg(src, "cannot @bitCast to '{}'", .{dest_ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "cannot @bitCast to '{f}'", .{dest_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
switch (operand_ty.zigTypeTag(zcu)) {
- .int, .comptime_int => try sema.errNote(src, msg, "use @enumFromInt to cast from '{}'", .{operand_ty.fmt(pt)}),
+ .int, .comptime_int => try sema.errNote(src, msg, "use @enumFromInt to cast from '{f}'", .{operand_ty.fmt(pt)}),
else => {},
}
@@ -10307,11 +10304,11 @@ fn zirBitcast(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
.pointer => {
const msg = msg: {
- const msg = try sema.errMsg(src, "cannot @bitCast to '{}'", .{dest_ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "cannot @bitCast to '{f}'", .{dest_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
switch (operand_ty.zigTypeTag(zcu)) {
- .int, .comptime_int => try sema.errNote(src, msg, "use @ptrFromInt to cast from '{}'", .{operand_ty.fmt(pt)}),
- .pointer => try sema.errNote(src, msg, "use @ptrCast to cast from '{}'", .{operand_ty.fmt(pt)}),
+ .int, .comptime_int => try sema.errNote(src, msg, "use @ptrFromInt to cast from '{f}'", .{operand_ty.fmt(pt)}),
+ .pointer => try sema.errNote(src, msg, "use @ptrCast to cast from '{f}'", .{operand_ty.fmt(pt)}),
else => {},
}
@@ -10325,7 +10322,7 @@ fn zirBitcast(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
.@"union" => "union",
else => unreachable,
};
- return sema.fail(block, src, "cannot @bitCast to '{}'; {s} does not have a guaranteed in-memory layout", .{
+ return sema.fail(block, src, "cannot @bitCast to '{f}'; {s} does not have a guaranteed in-memory layout", .{
dest_ty.fmt(pt), container,
});
},
@@ -10353,14 +10350,14 @@ fn zirBitcast(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
.type,
.undefined,
.void,
- => return sema.fail(block, operand_src, "cannot @bitCast from '{}'", .{operand_ty.fmt(pt)}),
+ => return sema.fail(block, operand_src, "cannot @bitCast from '{f}'", .{operand_ty.fmt(pt)}),
.@"enum" => {
const msg = msg: {
- const msg = try sema.errMsg(operand_src, "cannot @bitCast from '{}'", .{operand_ty.fmt(pt)});
+ const msg = try sema.errMsg(operand_src, "cannot @bitCast from '{f}'", .{operand_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
switch (dest_ty.zigTypeTag(zcu)) {
- .int, .comptime_int => try sema.errNote(operand_src, msg, "use @intFromEnum to cast to '{}'", .{dest_ty.fmt(pt)}),
+ .int, .comptime_int => try sema.errNote(operand_src, msg, "use @intFromEnum to cast to '{f}'", .{dest_ty.fmt(pt)}),
else => {},
}
@@ -10370,11 +10367,11 @@ fn zirBitcast(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
},
.pointer => {
const msg = msg: {
- const msg = try sema.errMsg(operand_src, "cannot @bitCast from '{}'", .{operand_ty.fmt(pt)});
+ const msg = try sema.errMsg(operand_src, "cannot @bitCast from '{f}'", .{operand_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
switch (dest_ty.zigTypeTag(zcu)) {
- .int, .comptime_int => try sema.errNote(operand_src, msg, "use @intFromPtr to cast to '{}'", .{dest_ty.fmt(pt)}),
- .pointer => try sema.errNote(operand_src, msg, "use @ptrCast to cast to '{}'", .{dest_ty.fmt(pt)}),
+ .int, .comptime_int => try sema.errNote(operand_src, msg, "use @intFromPtr to cast to '{f}'", .{dest_ty.fmt(pt)}),
+ .pointer => try sema.errNote(operand_src, msg, "use @ptrCast to cast to '{f}'", .{dest_ty.fmt(pt)}),
else => {},
}
@@ -10388,7 +10385,7 @@ fn zirBitcast(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
.@"union" => "union",
else => unreachable,
};
- return sema.fail(block, operand_src, "cannot @bitCast from '{}'; {s} does not have a guaranteed in-memory layout", .{
+ return sema.fail(block, operand_src, "cannot @bitCast from '{f}'; {s} does not have a guaranteed in-memory layout", .{
operand_ty.fmt(pt), container,
});
},
@@ -10431,7 +10428,7 @@ fn zirFloatCast(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!A
else => return sema.fail(
block,
src,
- "expected float or vector type, found '{}'",
+ "expected float or vector type, found '{f}'",
.{dest_ty.fmt(pt)},
),
};
@@ -10441,7 +10438,7 @@ fn zirFloatCast(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!A
else => return sema.fail(
block,
operand_src,
- "expected float or vector type, found '{}'",
+ "expected float or vector type, found '{f}'",
.{operand_ty.fmt(pt)},
),
}
@@ -10525,7 +10522,7 @@ fn zirElemPtr(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
if (indexable_ty.zigTypeTag(zcu) != .pointer) {
const capture_src = block.src(.{ .for_capture_from_input = inst_data.src_node });
const msg = msg: {
- const msg = try sema.errMsg(capture_src, "pointer capture of non pointer type '{}'", .{
+ const msg = try sema.errMsg(capture_src, "pointer capture of non pointer type '{f}'", .{
indexable_ty.fmt(pt),
});
errdefer msg.destroy(sema.gpa);
@@ -10667,7 +10664,7 @@ fn zirSliceSentinelTy(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileE
const lhs_ptr_ty = sema.typeOf(try sema.resolveInst(inst_data.operand));
const lhs_ty = switch (lhs_ptr_ty.zigTypeTag(zcu)) {
.pointer => lhs_ptr_ty.childType(zcu),
- else => return sema.fail(block, ptr_src, "expected pointer, found '{}'", .{lhs_ptr_ty.fmt(pt)}),
+ else => return sema.fail(block, ptr_src, "expected pointer, found '{f}'", .{lhs_ptr_ty.fmt(pt)}),
};
const sentinel_ty: Type = switch (lhs_ty.zigTypeTag(zcu)) {
@@ -10682,7 +10679,7 @@ fn zirSliceSentinelTy(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileE
};
},
},
- else => return sema.fail(block, src, "slice of non-array type '{}'", .{lhs_ty.fmt(pt)}),
+ else => return sema.fail(block, src, "slice of non-array type '{f}'", .{lhs_ty.fmt(pt)}),
};
return Air.internedToRef(sentinel_ty.toIntern());
@@ -10877,7 +10874,7 @@ const SwitchProngAnalysis = struct {
.base_node_inst = capture_src.base_node_inst,
.offset = .{ .switch_tag_capture = capture_src.offset.switch_capture },
};
- return sema.fail(block, tag_capture_src, "cannot capture tag of non-union type '{}'", .{
+ return sema.fail(block, tag_capture_src, "cannot capture tag of non-union type '{f}'", .{
operand_ty.fmt(pt),
});
}
@@ -11309,7 +11306,7 @@ fn switchCond(
.@"enum",
=> {
if (operand_ty.isSlice(zcu)) {
- return sema.fail(block, src, "switch on type '{}'", .{operand_ty.fmt(pt)});
+ return sema.fail(block, src, "switch on type '{f}'", .{operand_ty.fmt(pt)});
}
if ((try sema.typeHasOnePossibleValue(operand_ty))) |opv| {
return Air.internedToRef(opv.toIntern());
@@ -11344,7 +11341,7 @@ fn switchCond(
.vector,
.frame,
.@"anyframe",
- => return sema.fail(block, src, "switch on type '{}'", .{operand_ty.fmt(pt)}),
+ => return sema.fail(block, src, "switch on type '{f}'", .{operand_ty.fmt(pt)}),
}
}
@@ -11445,7 +11442,7 @@ fn zirSwitchBlockErrUnion(sema: *Sema, block: *Block, inst: Zir.Inst.Index) Comp
operand_ty;
if (operand_err_set.zigTypeTag(zcu) != .error_union) {
- return sema.fail(block, switch_src, "expected error union type, found '{}'", .{
+ return sema.fail(block, switch_src, "expected error union type, found '{f}'", .{
operand_ty.fmt(pt),
});
}
@@ -11699,7 +11696,7 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index, operand_is_r
// Even if the operand is comptime-known, this `switch` is runtime.
if (try operand_ty.comptimeOnlySema(pt)) {
return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(operand_src, "operand of switch loop has comptime-only type '{}'", .{operand_ty.fmt(pt)});
+ const msg = try sema.errMsg(operand_src, "operand of switch loop has comptime-only type '{f}'", .{operand_ty.fmt(pt)});
errdefer msg.destroy(gpa);
try sema.errNote(operand_src, msg, "switch loops are evaluated at runtime outside of comptime scopes", .{});
break :msg msg;
@@ -11923,14 +11920,14 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index, operand_is_r
cond_ty,
i,
msg,
- "unhandled enumeration value: '{}'",
+ "unhandled enumeration value: '{f}'",
.{field_name.fmt(&zcu.intern_pool)},
);
}
try sema.errNote(
cond_ty.srcLoc(zcu),
msg,
- "enum '{}' declared here",
+ "enum '{f}' declared here",
.{cond_ty.fmt(pt)},
);
break :msg msg;
@@ -12142,7 +12139,7 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index, operand_is_r
return sema.fail(
block,
src,
- "else prong required when switching on type '{}'",
+ "else prong required when switching on type '{f}'",
.{cond_ty.fmt(pt)},
);
}
@@ -12218,7 +12215,7 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index, operand_is_r
.@"anyframe",
.comptime_float,
.float,
- => return sema.fail(block, operand_src, "invalid switch operand type '{}'", .{
+ => return sema.fail(block, operand_src, "invalid switch operand type '{f}'", .{
raw_operand_ty.fmt(pt),
}),
}
@@ -12747,7 +12744,7 @@ fn analyzeSwitchRuntimeBlock(
if (special.is_inline) switch (operand_ty.zigTypeTag(zcu)) {
.@"enum" => {
if (operand_ty.isNonexhaustiveEnum(zcu) and !union_originally) {
- return sema.fail(block, special_prong_src, "cannot enumerate values of type '{}' for 'inline else'", .{
+ return sema.fail(block, special_prong_src, "cannot enumerate values of type '{f}' for 'inline else'", .{
operand_ty.fmt(pt),
});
}
@@ -12803,7 +12800,7 @@ fn analyzeSwitchRuntimeBlock(
},
.error_set => {
if (operand_ty.isAnyError(zcu)) {
- return sema.fail(block, special_prong_src, "cannot enumerate values of type '{}' for 'inline else'", .{
+ return sema.fail(block, special_prong_src, "cannot enumerate values of type '{f}' for 'inline else'", .{
operand_ty.fmt(pt),
});
}
@@ -12964,7 +12961,7 @@ fn analyzeSwitchRuntimeBlock(
cases_extra.appendSliceAssumeCapacity(@ptrCast(case_block.instructions.items));
}
},
- else => return sema.fail(block, special_prong_src, "cannot enumerate values of type '{}' for 'inline else'", .{
+ else => return sema.fail(block, special_prong_src, "cannot enumerate values of type '{f}' for 'inline else'", .{
operand_ty.fmt(pt),
}),
};
@@ -13478,7 +13475,7 @@ fn validateErrSetSwitch(
try sema.errNote(
src,
msg,
- "unhandled error value: 'error.{}'",
+ "unhandled error value: 'error.{f}'",
.{error_name.fmt(ip)},
);
}
@@ -13704,7 +13701,7 @@ fn validateSwitchNoRange(
const msg = msg: {
const msg = try sema.errMsg(
operand_src,
- "ranges not allowed when switching on type '{}'",
+ "ranges not allowed when switching on type '{f}'",
.{operand_ty.fmt(sema.pt)},
);
errdefer msg.destroy(sema.gpa);
@@ -13862,7 +13859,7 @@ fn zirHasField(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
.array_type => break :hf field_name.eqlSlice("len", ip),
else => {},
}
- return sema.fail(block, ty_src, "type '{}' does not support '@hasField'", .{
+ return sema.fail(block, ty_src, "type '{f}' does not support '@hasField'", .{
ty.fmt(pt),
});
};
@@ -14050,7 +14047,7 @@ fn zirShl(
while (i < rhs_ty.vectorLen(zcu)) : (i += 1) {
const rhs_elem = try rhs_val.elemValue(pt, i);
if (rhs_elem.compareHetero(.gte, bit_value, zcu)) {
- return sema.fail(block, rhs_src, "shift amount '{}' at index '{d}' is too large for operand type '{}'", .{
+ return sema.fail(block, rhs_src, "shift amount '{f}' at index '{d}' is too large for operand type '{f}'", .{
rhs_elem.fmtValueSema(pt, sema),
i,
scalar_ty.fmt(pt),
@@ -14058,7 +14055,7 @@ fn zirShl(
}
}
} else if (rhs_val.compareHetero(.gte, bit_value, zcu)) {
- return sema.fail(block, rhs_src, "shift amount '{}' is too large for operand type '{}'", .{
+ return sema.fail(block, rhs_src, "shift amount '{f}' is too large for operand type '{f}'", .{
rhs_val.fmtValueSema(pt, sema),
scalar_ty.fmt(pt),
});
@@ -14069,14 +14066,14 @@ fn zirShl(
while (i < rhs_ty.vectorLen(zcu)) : (i += 1) {
const rhs_elem = try rhs_val.elemValue(pt, i);
if (rhs_elem.compareHetero(.lt, try pt.intValue(scalar_rhs_ty, 0), zcu)) {
- return sema.fail(block, rhs_src, "shift by negative amount '{}' at index '{d}'", .{
+ return sema.fail(block, rhs_src, "shift by negative amount '{f}' at index '{d}'", .{
rhs_elem.fmtValueSema(pt, sema),
i,
});
}
}
} else if (rhs_val.compareHetero(.lt, try pt.intValue(rhs_ty, 0), zcu)) {
- return sema.fail(block, rhs_src, "shift by negative amount '{}'", .{
+ return sema.fail(block, rhs_src, "shift by negative amount '{f}'", .{
rhs_val.fmtValueSema(pt, sema),
});
}
@@ -14231,7 +14228,7 @@ fn zirShr(
while (i < rhs_ty.vectorLen(zcu)) : (i += 1) {
const rhs_elem = try rhs_val.elemValue(pt, i);
if (rhs_elem.compareHetero(.gte, bit_value, zcu)) {
- return sema.fail(block, rhs_src, "shift amount '{}' at index '{d}' is too large for operand type '{}'", .{
+ return sema.fail(block, rhs_src, "shift amount '{f}' at index '{d}' is too large for operand type '{f}'", .{
rhs_elem.fmtValueSema(pt, sema),
i,
scalar_ty.fmt(pt),
@@ -14239,7 +14236,7 @@ fn zirShr(
}
}
} else if (rhs_val.compareHetero(.gte, bit_value, zcu)) {
- return sema.fail(block, rhs_src, "shift amount '{}' is too large for operand type '{}'", .{
+ return sema.fail(block, rhs_src, "shift amount '{f}' is too large for operand type '{f}'", .{
rhs_val.fmtValueSema(pt, sema),
scalar_ty.fmt(pt),
});
@@ -14250,14 +14247,14 @@ fn zirShr(
while (i < rhs_ty.vectorLen(zcu)) : (i += 1) {
const rhs_elem = try rhs_val.elemValue(pt, i);
if (rhs_elem.compareHetero(.lt, try pt.intValue(rhs_ty.childType(zcu), 0), zcu)) {
- return sema.fail(block, rhs_src, "shift by negative amount '{}' at index '{d}'", .{
+ return sema.fail(block, rhs_src, "shift by negative amount '{f}' at index '{d}'", .{
rhs_elem.fmtValueSema(pt, sema),
i,
});
}
}
} else if (rhs_val.compareHetero(.lt, try pt.intValue(rhs_ty, 0), zcu)) {
- return sema.fail(block, rhs_src, "shift by negative amount '{}'", .{
+ return sema.fail(block, rhs_src, "shift by negative amount '{f}'", .{
rhs_val.fmtValueSema(pt, sema),
});
}
@@ -14543,11 +14540,11 @@ fn zirArrayCat(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
const lhs_info = try sema.getArrayCatInfo(block, lhs_src, lhs, rhs_ty) orelse lhs_info: {
if (lhs_is_tuple) break :lhs_info undefined;
- return sema.fail(block, lhs_src, "expected indexable; found '{}'", .{lhs_ty.fmt(pt)});
+ return sema.fail(block, lhs_src, "expected indexable; found '{f}'", .{lhs_ty.fmt(pt)});
};
const rhs_info = try sema.getArrayCatInfo(block, rhs_src, rhs, lhs_ty) orelse {
assert(!rhs_is_tuple);
- return sema.fail(block, rhs_src, "expected indexable; found '{}'", .{rhs_ty.fmt(pt)});
+ return sema.fail(block, rhs_src, "expected indexable; found '{f}'", .{rhs_ty.fmt(pt)});
};
const resolved_elem_ty = t: {
@@ -15000,7 +14997,7 @@ fn zirArrayMul(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
// Analyze the lhs first, to catch the case that someone tried to do exponentiation
const lhs_info = try sema.getArrayCatInfo(block, lhs_src, lhs, lhs_ty) orelse {
const msg = msg: {
- const msg = try sema.errMsg(lhs_src, "expected indexable; found '{}'", .{lhs_ty.fmt(pt)});
+ const msg = try sema.errMsg(lhs_src, "expected indexable; found '{f}'", .{lhs_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
switch (lhs_ty.zigTypeTag(zcu)) {
.int, .float, .comptime_float, .comptime_int, .vector => {
@@ -15132,7 +15129,7 @@ fn zirNegate(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
.int, .comptime_int, .float, .comptime_float => false,
else => true,
}) {
- return sema.fail(block, src, "negation of type '{}'", .{rhs_ty.fmt(pt)});
+ return sema.fail(block, src, "negation of type '{f}'", .{rhs_ty.fmt(pt)});
}
if (rhs_scalar_ty.isAnyFloat()) {
@@ -15163,7 +15160,7 @@ fn zirNegateWrap(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!
switch (rhs_scalar_ty.zigTypeTag(zcu)) {
.int, .comptime_int, .float, .comptime_float => {},
- else => return sema.fail(block, src, "negation of type '{}'", .{rhs_ty.fmt(pt)}),
+ else => return sema.fail(block, src, "negation of type '{f}'", .{rhs_ty.fmt(pt)}),
}
const lhs = Air.internedToRef((try sema.splat(rhs_ty, try pt.intValue(rhs_scalar_ty, 0))).toIntern());
@@ -15237,7 +15234,7 @@ fn zirDiv(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Ins
return sema.fail(
block,
src,
- "ambiguous coercion of division operands '{}' and '{}'; non-zero remainder '{}'",
+ "ambiguous coercion of division operands '{f}' and '{f}'; non-zero remainder '{f}'",
.{ lhs_ty.fmt(pt), rhs_ty.fmt(pt), rem.fmtValueSema(pt, sema) },
);
}
@@ -15289,7 +15286,7 @@ fn zirDiv(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.Ins
return sema.fail(
block,
src,
- "division with '{}' and '{}': signed integers must use @divTrunc, @divFloor, or @divExact",
+ "division with '{f}' and '{f}': signed integers must use @divTrunc, @divFloor, or @divExact",
.{ lhs_ty.fmt(pt), rhs_ty.fmt(pt) },
);
}
@@ -15951,7 +15948,7 @@ fn zirOverflowArithmetic(
const rhs = try sema.coerce(block, rhs_dest_ty, uncasted_rhs, rhs_src);
if (dest_ty.scalarType(zcu).zigTypeTag(zcu) != .int) {
- return sema.fail(block, src, "expected vector of integers or integer tag type, found '{}'", .{dest_ty.fmt(pt)});
+ return sema.fail(block, src, "expected vector of integers or integer tag type, found '{f}'", .{dest_ty.fmt(pt)});
}
const maybe_lhs_val = try sema.resolveValue(lhs);
@@ -16157,14 +16154,14 @@ fn analyzeArithmetic(
return sema.failWithInvalidPtrArithmetic(block, src, "pointer-pointer", "subtraction");
}
if (!lhs_ty.elemType2(zcu).eql(rhs_ty.elemType2(zcu), zcu)) {
- return sema.fail(block, src, "incompatible pointer arithmetic operands '{}' and '{}'", .{
+ return sema.fail(block, src, "incompatible pointer arithmetic operands '{f}' and '{f}'", .{
lhs_ty.fmt(pt), rhs_ty.fmt(pt),
});
}
const elem_size = lhs_ty.elemType2(zcu).abiSize(zcu);
if (elem_size == 0) {
- return sema.fail(block, src, "pointer arithmetic requires element type '{}' to have runtime bits", .{
+ return sema.fail(block, src, "pointer arithmetic requires element type '{f}' to have runtime bits", .{
lhs_ty.elemType2(zcu).fmt(pt),
});
}
@@ -16215,7 +16212,7 @@ fn analyzeArithmetic(
};
if (!try lhs_ty.elemType2(zcu).hasRuntimeBitsSema(pt)) {
- return sema.fail(block, src, "pointer arithmetic requires element type '{}' to have runtime bits", .{
+ return sema.fail(block, src, "pointer arithmetic requires element type '{f}' to have runtime bits", .{
lhs_ty.elemType2(zcu).fmt(pt),
});
}
@@ -16619,7 +16616,7 @@ fn zirCmpEq(
if (lhs_ty_tag == .null or rhs_ty_tag == .null) {
const non_null_type = if (lhs_ty_tag == .null) rhs_ty else lhs_ty;
- return sema.fail(block, src, "comparison of '{}' with null", .{non_null_type.fmt(pt)});
+ return sema.fail(block, src, "comparison of '{f}' with null", .{non_null_type.fmt(pt)});
}
if (lhs_ty_tag == .@"union" and (rhs_ty_tag == .enum_literal or rhs_ty_tag == .@"enum")) {
@@ -16676,7 +16673,7 @@ fn analyzeCmpUnionTag(
const msg = msg: {
const msg = try sema.errMsg(un_src, "comparison of union and enum literal is only valid for tagged union types", .{});
errdefer msg.destroy(sema.gpa);
- try sema.errNote(union_ty.srcLoc(zcu), msg, "union '{}' is not a tagged union", .{union_ty.fmt(pt)});
+ try sema.errNote(union_ty.srcLoc(zcu), msg, "union '{f}' is not a tagged union", .{union_ty.fmt(pt)});
break :msg msg;
};
return sema.failWithOwnedErrorMsg(block, msg);
@@ -16762,7 +16759,7 @@ fn analyzeCmp(
const instructions = &[_]Air.Inst.Ref{ lhs, rhs };
const resolved_type = try sema.resolvePeerTypes(block, src, instructions, .{ .override = &[_]?LazySrcLoc{ lhs_src, rhs_src } });
if (!resolved_type.isSelfComparable(zcu, is_equality_cmp)) {
- return sema.fail(block, src, "operator {s} not allowed for type '{}'", .{
+ return sema.fail(block, src, "operator {s} not allowed for type '{f}'", .{
compareOperatorName(op), resolved_type.fmt(pt),
});
}
@@ -16871,7 +16868,7 @@ fn zirSizeOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
.undefined,
.null,
.@"opaque",
- => return sema.fail(block, operand_src, "no size available for type '{}'", .{ty.fmt(pt)}),
+ => return sema.fail(block, operand_src, "no size available for type '{f}'", .{ty.fmt(pt)}),
.type,
.enum_literal,
@@ -16912,7 +16909,7 @@ fn zirBitSizeOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!A
.undefined,
.null,
.@"opaque",
- => return sema.fail(block, operand_src, "no size available for type '{}'", .{operand_ty.fmt(pt)}),
+ => return sema.fail(block, operand_src, "no size available for type '{f}'", .{operand_ty.fmt(pt)}),
.type,
.enum_literal,
@@ -18212,7 +18209,7 @@ fn log2IntType(sema: *Sema, block: *Block, operand: Type, src: LazySrcLoc) Compi
return sema.fail(
block,
src,
- "bit shifting operation expected integer type, found '{}'",
+ "bit shifting operation expected integer type, found '{f}'",
.{operand.fmt(pt)},
);
}
@@ -18451,7 +18448,7 @@ fn checkSentinelType(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) !voi
const pt = sema.pt;
const zcu = pt.zcu;
if (!ty.isSelfComparable(zcu, true)) {
- return sema.fail(block, src, "non-scalar sentinel type '{}'", .{ty.fmt(pt)});
+ return sema.fail(block, src, "non-scalar sentinel type '{f}'", .{ty.fmt(pt)});
}
}
@@ -18501,7 +18498,7 @@ fn checkErrorType(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) !void {
const zcu = pt.zcu;
switch (ty.zigTypeTag(zcu)) {
.error_set, .error_union, .undefined => return,
- else => return sema.fail(block, src, "expected error union type, found '{}'", .{
+ else => return sema.fail(block, src, "expected error union type, found '{f}'", .{
ty.fmt(pt),
}),
}
@@ -18645,7 +18642,7 @@ fn zirTry(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileError!
const pt = sema.pt;
const zcu = pt.zcu;
if (err_union_ty.zigTypeTag(zcu) != .error_union) {
- return sema.fail(parent_block, operand_src, "expected error union type, found '{}'", .{
+ return sema.fail(parent_block, operand_src, "expected error union type, found '{f}'", .{
err_union_ty.fmt(pt),
});
}
@@ -18705,7 +18702,7 @@ fn zirTryPtr(sema: *Sema, parent_block: *Block, inst: Zir.Inst.Index) CompileErr
const pt = sema.pt;
const zcu = pt.zcu;
if (err_union_ty.zigTypeTag(zcu) != .error_union) {
- return sema.fail(parent_block, operand_src, "expected error union type, found '{}'", .{
+ return sema.fail(parent_block, operand_src, "expected error union type, found '{f}'", .{
err_union_ty.fmt(pt),
});
}
@@ -18903,7 +18900,7 @@ fn zirRetImplicit(
const base_tag = sema.fn_ret_ty.baseZigTypeTag(zcu);
if (base_tag == .noreturn) {
const msg = msg: {
- const msg = try sema.errMsg(ret_ty_src, "function declared '{}' implicitly returns", .{
+ const msg = try sema.errMsg(ret_ty_src, "function declared '{f}' implicitly returns", .{
sema.fn_ret_ty.fmt(pt),
});
errdefer msg.destroy(sema.gpa);
@@ -18913,7 +18910,7 @@ fn zirRetImplicit(
return sema.failWithOwnedErrorMsg(block, msg);
} else if (base_tag != .void) {
const msg = msg: {
- const msg = try sema.errMsg(ret_ty_src, "function with non-void return type '{}' implicitly returns", .{
+ const msg = try sema.errMsg(ret_ty_src, "function with non-void return type '{f}' implicitly returns", .{
sema.fn_ret_ty.fmt(pt),
});
errdefer msg.destroy(sema.gpa);
@@ -19302,13 +19299,13 @@ fn zirPtrType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
if (host_size != 0) {
if (bit_offset >= host_size * 8) {
- return sema.fail(block, bitoffset_src, "packed type '{}' at bit offset {} starts {} bits after the end of a {} byte host integer", .{
+ return sema.fail(block, bitoffset_src, "packed type '{f}' at bit offset {} starts {} bits after the end of a {} byte host integer", .{
elem_ty.fmt(pt), bit_offset, bit_offset - host_size * 8, host_size,
});
}
const elem_bit_size = try elem_ty.bitSizeSema(pt);
if (elem_bit_size > host_size * 8 - bit_offset) {
- return sema.fail(block, bitoffset_src, "packed type '{}' at bit offset {} ends {} bits after the end of a {} byte host integer", .{
+ return sema.fail(block, bitoffset_src, "packed type '{f}' at bit offset {} ends {} bits after the end of a {} byte host integer", .{
elem_ty.fmt(pt), bit_offset, elem_bit_size - (host_size * 8 - bit_offset), host_size,
});
}
@@ -19323,7 +19320,7 @@ fn zirPtrType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
} else if (inst_data.size == .c) {
if (!try sema.validateExternType(elem_ty, .other)) {
const msg = msg: {
- const msg = try sema.errMsg(elem_ty_src, "C pointers cannot point to non-C-ABI-compatible type '{}'", .{elem_ty.fmt(pt)});
+ const msg = try sema.errMsg(elem_ty_src, "C pointers cannot point to non-C-ABI-compatible type '{f}'", .{elem_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.explainWhyTypeIsNotExtern(msg, elem_ty_src, elem_ty, .other);
@@ -19340,7 +19337,7 @@ fn zirPtrType(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
if (host_size != 0 and !try sema.validatePackedType(elem_ty)) {
return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(elem_ty_src, "bit-pointer cannot refer to value of type '{}'", .{elem_ty.fmt(pt)});
+ const msg = try sema.errMsg(elem_ty_src, "bit-pointer cannot refer to value of type '{f}'", .{elem_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.explainWhyTypeIsNotPacked(msg, elem_ty_src, elem_ty);
break :msg msg;
@@ -19509,7 +19506,7 @@ fn zirUnionInit(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!A
const extra = sema.code.extraData(Zir.Inst.UnionInit, inst_data.payload_index).data;
const union_ty = try sema.resolveType(block, ty_src, extra.union_type);
if (union_ty.zigTypeTag(pt.zcu) != .@"union") {
- return sema.fail(block, ty_src, "expected union type, found '{}'", .{union_ty.fmt(pt)});
+ return sema.fail(block, ty_src, "expected union type, found '{f}'", .{union_ty.fmt(pt)});
}
const field_name = try sema.resolveConstStringIntern(block, field_src, extra.field_name, .{ .simple = .union_field_name });
const init = try sema.resolveInst(extra.init);
@@ -19672,7 +19669,7 @@ fn zirStructInit(
const msg = try sema.errMsg(src, "cannot initialize 'noreturn' field of union", .{});
errdefer msg.destroy(sema.gpa);
- try sema.addFieldErrNote(resolved_ty, field_index, msg, "field '{}' declared here", .{
+ try sema.addFieldErrNote(resolved_ty, field_index, msg, "field '{f}' declared here", .{
field_name.fmt(ip),
});
try sema.addDeclaredHereNote(msg, resolved_ty);
@@ -19791,7 +19788,7 @@ fn finishStructInit(
const field_init = struct_type.fieldInit(ip, i);
if (field_init == .none) {
const field_name = struct_type.field_names.get(ip)[i];
- const template = "missing struct field: {}";
+ const template = "missing struct field: {f}";
const args = .{field_name.fmt(ip)};
if (root_msg) |msg| {
try sema.errNote(init_src, msg, template, args);
@@ -20406,7 +20403,7 @@ fn fieldType(
},
else => {},
}
- return sema.fail(block, ty_src, "expected struct or union; found '{}'", .{
+ return sema.fail(block, ty_src, "expected struct or union; found '{f}'", .{
cur_ty.fmt(pt),
});
}
@@ -20453,7 +20450,7 @@ fn zirAlignOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
const operand_src = block.builtinCallArgSrc(inst_data.src_node, 0);
const ty = try sema.resolveType(block, operand_src, inst_data.operand);
if (ty.isNoReturn(zcu)) {
- return sema.fail(block, operand_src, "no align available for type '{}'", .{ty.fmt(sema.pt)});
+ return sema.fail(block, operand_src, "no align available for type '{f}'", .{ty.fmt(sema.pt)});
}
const val = try ty.lazyAbiAlignment(sema.pt);
return Air.internedToRef(val.toIntern());
@@ -20531,7 +20528,7 @@ fn zirAbs(
else => return sema.fail(
block,
operand_src,
- "expected integer, float, or vector of either integers or floats, found '{}'",
+ "expected integer, float, or vector of either integers or floats, found '{f}'",
.{operand_ty.fmt(pt)},
),
};
@@ -20600,7 +20597,7 @@ fn zirUnaryMath(
else => return sema.fail(
block,
operand_src,
- "expected vector of floats or float type, found '{}'",
+ "expected vector of floats or float type, found '{f}'",
.{operand_ty.fmt(pt)},
),
}
@@ -20629,8 +20626,8 @@ fn zirTagName(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
},
.@"enum" => operand_ty,
.@"union" => operand_ty.unionTagType(zcu) orelse
- return sema.fail(block, src, "union '{}' is untagged", .{operand_ty.fmt(pt)}),
- else => return sema.fail(block, operand_src, "expected enum or union; found '{}'", .{
+ return sema.fail(block, src, "union '{f}' is untagged", .{operand_ty.fmt(pt)}),
+ else => return sema.fail(block, operand_src, "expected enum or union; found '{f}'", .{
operand_ty.fmt(pt),
}),
};
@@ -20638,7 +20635,7 @@ fn zirTagName(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
// TODO I don't think this is the correct way to handle this but
// it prevents a crash.
// https://github.com/ziglang/zig/issues/15909
- return sema.fail(block, operand_src, "cannot get @tagName of empty enum '{}'", .{
+ return sema.fail(block, operand_src, "cannot get @tagName of empty enum '{f}'", .{
enum_ty.fmt(pt),
});
}
@@ -20646,7 +20643,7 @@ fn zirTagName(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
if (try sema.resolveDefinedValue(block, operand_src, casted_operand)) |val| {
const field_index = enum_ty.enumTagFieldIndex(val, zcu) orelse {
const msg = msg: {
- const msg = try sema.errMsg(src, "no field with value '{}' in enum '{}'", .{
+ const msg = try sema.errMsg(src, "no field with value '{f}' in enum '{f}'", .{
val.fmtValueSema(pt, sema), enum_ty.fmt(pt),
});
errdefer msg.destroy(sema.gpa);
@@ -20833,7 +20830,7 @@ fn zirReify(
} else if (ptr_size == .c) {
if (!try sema.validateExternType(elem_ty, .other)) {
const msg = msg: {
- const msg = try sema.errMsg(src, "C pointers cannot point to non-C-ABI-compatible type '{}'", .{elem_ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "C pointers cannot point to non-C-ABI-compatible type '{f}'", .{elem_ty.fmt(pt)});
errdefer msg.destroy(gpa);
try sema.explainWhyTypeIsNotExtern(msg, src, elem_ty, .other);
@@ -20946,7 +20943,7 @@ fn zirReify(
_ = try pt.getErrorValue(name);
const gop = names.getOrPutAssumeCapacity(name);
if (gop.found_existing) {
- return sema.fail(block, src, "duplicate error '{}'", .{
+ return sema.fail(block, src, "duplicate error '{f}'", .{
name.fmt(ip),
});
}
@@ -21294,7 +21291,7 @@ fn reifyEnum(
if (!try sema.intFitsInType(field_value_val, tag_ty, null)) {
// TODO: better source location
- return sema.fail(block, src, "field '{}' with enumeration value '{}' is too large for backing int type '{}'", .{
+ return sema.fail(block, src, "field '{f}' with enumeration value '{f}' is too large for backing int type '{f}'", .{
field_name.fmt(ip),
field_value_val.fmtValueSema(pt, sema),
tag_ty.fmt(pt),
@@ -21305,14 +21302,14 @@ fn reifyEnum(
if (wip_ty.nextField(ip, field_name, coerced_field_val.toIntern())) |conflict| {
return sema.failWithOwnedErrorMsg(block, switch (conflict.kind) {
.name => msg: {
- const msg = try sema.errMsg(src, "duplicate enum field '{}'", .{field_name.fmt(ip)});
+ const msg = try sema.errMsg(src, "duplicate enum field '{f}'", .{field_name.fmt(ip)});
errdefer msg.destroy(gpa);
_ = conflict.prev_field_idx; // TODO: this note is incorrect
try sema.errNote(src, msg, "other field here", .{});
break :msg msg;
},
.value => msg: {
- const msg = try sema.errMsg(src, "enum tag value {} already taken", .{field_value_val.fmtValueSema(pt, sema)});
+ const msg = try sema.errMsg(src, "enum tag value {f} already taken", .{field_value_val.fmtValueSema(pt, sema)});
errdefer msg.destroy(gpa);
_ = conflict.prev_field_idx; // TODO: this note is incorrect
try sema.errNote(src, msg, "other enum tag value here", .{});
@@ -21460,13 +21457,13 @@ fn reifyUnion(
const enum_index = enum_tag_ty.enumFieldIndex(field_name, zcu) orelse {
// TODO: better source location
- return sema.fail(block, src, "no field named '{}' in enum '{}'", .{
+ return sema.fail(block, src, "no field named '{f}' in enum '{f}'", .{
field_name.fmt(ip), enum_tag_ty.fmt(pt),
});
};
if (seen_tags.isSet(enum_index)) {
// TODO: better source location
- return sema.fail(block, src, "duplicate union field {}", .{field_name.fmt(ip)});
+ return sema.fail(block, src, "duplicate union field {f}", .{field_name.fmt(ip)});
}
seen_tags.set(enum_index);
@@ -21487,7 +21484,7 @@ fn reifyUnion(
var it = seen_tags.iterator(.{ .kind = .unset });
while (it.next()) |enum_index| {
const field_name = enum_tag_ty.enumFieldName(enum_index, zcu);
- try sema.addFieldErrNote(enum_tag_ty, enum_index, msg, "field '{}' missing, declared here", .{
+ try sema.addFieldErrNote(enum_tag_ty, enum_index, msg, "field '{f}' missing, declared here", .{
field_name.fmt(ip),
});
}
@@ -21512,7 +21509,7 @@ fn reifyUnion(
const gop = field_names.getOrPutAssumeCapacity(field_name);
if (gop.found_existing) {
// TODO: better source location
- return sema.fail(block, src, "duplicate union field {}", .{field_name.fmt(ip)});
+ return sema.fail(block, src, "duplicate union field {f}", .{field_name.fmt(ip)});
}
field_ty.* = field_type_val.toIntern();
@@ -21544,7 +21541,7 @@ fn reifyUnion(
}
if (layout == .@"extern" and !try sema.validateExternType(field_ty, .union_field)) {
return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(src, "extern unions cannot contain fields of type '{}'", .{field_ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "extern unions cannot contain fields of type '{f}'", .{field_ty.fmt(pt)});
errdefer msg.destroy(gpa);
try sema.explainWhyTypeIsNotExtern(msg, src, field_ty, .union_field);
@@ -21554,7 +21551,7 @@ fn reifyUnion(
});
} else if (layout == .@"packed" and !try sema.validatePackedType(field_ty)) {
return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(src, "packed unions cannot contain fields of type '{}'", .{field_ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "packed unions cannot contain fields of type '{f}'", .{field_ty.fmt(pt)});
errdefer msg.destroy(gpa);
try sema.explainWhyTypeIsNotPacked(msg, src, field_ty);
@@ -21636,7 +21633,7 @@ fn reifyTuple(
const field_name_index = field_name.toUnsigned(ip) orelse return sema.fail(
block,
src,
- "tuple cannot have non-numeric field '{}'",
+ "tuple cannot have non-numeric field '{f}'",
.{field_name.fmt(ip)},
);
if (field_name_index != field_idx) {
@@ -21814,7 +21811,7 @@ fn reifyStruct(
const field_name = try sema.sliceToIpString(block, src, field_name_val, undefined);
if (struct_type.addFieldName(ip, field_name)) |prev_index| {
_ = prev_index; // TODO: better source location
- return sema.fail(block, src, "duplicate struct field name {}", .{field_name.fmt(ip)});
+ return sema.fail(block, src, "duplicate struct field name {f}", .{field_name.fmt(ip)});
}
if (any_aligned_fields) {
@@ -21883,7 +21880,7 @@ fn reifyStruct(
}
if (layout == .@"extern" and !try sema.validateExternType(field_ty, .struct_field)) {
return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(src, "extern structs cannot contain fields of type '{}'", .{field_ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "extern structs cannot contain fields of type '{f}'", .{field_ty.fmt(pt)});
errdefer msg.destroy(gpa);
try sema.explainWhyTypeIsNotExtern(msg, src, field_ty, .struct_field);
@@ -21893,7 +21890,7 @@ fn reifyStruct(
});
} else if (layout == .@"packed" and !try sema.validatePackedType(field_ty)) {
return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(src, "packed structs cannot contain fields of type '{}'", .{field_ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "packed structs cannot contain fields of type '{f}'", .{field_ty.fmt(pt)});
errdefer msg.destroy(gpa);
try sema.explainWhyTypeIsNotPacked(msg, src, field_ty);
@@ -21970,7 +21967,7 @@ fn zirCVaArg(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) C
if (!try sema.validateExternType(arg_ty, .param_ty)) {
const msg = msg: {
- const msg = try sema.errMsg(ty_src, "cannot get '{}' from variadic argument", .{arg_ty.fmt(sema.pt)});
+ const msg = try sema.errMsg(ty_src, "cannot get '{f}' from variadic argument", .{arg_ty.fmt(sema.pt)});
errdefer msg.destroy(sema.gpa);
try sema.explainWhyTypeIsNotExtern(msg, ty_src, arg_ty, .param_ty);
@@ -22029,7 +22026,7 @@ fn zirTypeName(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
const ty_src = block.builtinCallArgSrc(inst_data.src_node, 0);
const ty = try sema.resolveType(block, ty_src, inst_data.operand);
- const type_name = try ip.getOrPutStringFmt(sema.gpa, pt.tid, "{}", .{ty.fmt(pt)}, .no_embedded_nulls);
+ const type_name = try ip.getOrPutStringFmt(sema.gpa, pt.tid, "{f}", .{ty.fmt(pt)}, .no_embedded_nulls);
return sema.addNullTerminatedStrLit(type_name);
}
@@ -22157,7 +22154,7 @@ fn zirPtrFromInt(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!
if (ptr_ty.isSlice(zcu)) {
const msg = msg: {
- const msg = try sema.errMsg(src, "integer cannot be converted to slice type '{}'", .{ptr_ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "integer cannot be converted to slice type '{f}'", .{ptr_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.errNote(src, msg, "slice length cannot be inferred from address", .{});
break :msg msg;
@@ -22184,7 +22181,7 @@ fn zirPtrFromInt(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!
}
if (try ptr_ty.comptimeOnlySema(pt)) {
return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(src, "pointer to comptime-only type '{}' must be comptime-known, but operand is runtime-known", .{ptr_ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "pointer to comptime-only type '{f}' must be comptime-known, but operand is runtime-known", .{ptr_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.explainWhyTypeIsComptime(msg, src, ptr_ty);
@@ -22241,7 +22238,7 @@ fn ptrFromIntVal(
}
const addr = try operand_val.toUnsignedIntSema(pt);
if (!ptr_ty.isAllowzeroPtr(zcu) and addr == 0)
- return sema.fail(block, operand_src, "pointer type '{}' does not allow address zero", .{ptr_ty.fmt(pt)});
+ return sema.fail(block, operand_src, "pointer type '{f}' does not allow address zero", .{ptr_ty.fmt(pt)});
if (addr != 0 and ptr_align != .none) {
const masked_addr = if (ptr_ty.childType(zcu).fnPtrMaskOrNull(zcu)) |mask|
addr & mask
@@ -22249,7 +22246,7 @@ fn ptrFromIntVal(
addr;
if (!ptr_align.check(masked_addr)) {
- return sema.fail(block, operand_src, "pointer type '{}' requires aligned address", .{ptr_ty.fmt(pt)});
+ return sema.fail(block, operand_src, "pointer type '{f}' requires aligned address", .{ptr_ty.fmt(pt)});
}
}
@@ -22294,8 +22291,8 @@ fn zirErrorCast(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData
errdefer msg.destroy(sema.gpa);
const dest_payload_ty = dest_ty.errorUnionPayload(zcu);
const operand_payload_ty = operand_ty.errorUnionPayload(zcu);
- try sema.errNote(src, msg, "destination payload is '{}'", .{dest_payload_ty.fmt(pt)});
- try sema.errNote(src, msg, "operand payload is '{}'", .{operand_payload_ty.fmt(pt)});
+ try sema.errNote(src, msg, "destination payload is '{f}'", .{dest_payload_ty.fmt(pt)});
+ try sema.errNote(src, msg, "operand payload is '{f}'", .{operand_payload_ty.fmt(pt)});
try addDeclaredHereNote(sema, msg, dest_ty);
try addDeclaredHereNote(sema, msg, operand_ty);
break :msg msg;
@@ -22340,7 +22337,7 @@ fn zirErrorCast(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData
break :disjoint true;
};
if (disjoint and !(operand_tag == .error_union and dest_tag == .error_union)) {
- return sema.fail(block, src, "error sets '{}' and '{}' have no common errors", .{
+ return sema.fail(block, src, "error sets '{f}' and '{f}' have no common errors", .{
operand_err_ty.fmt(pt), dest_err_ty.fmt(pt),
});
}
@@ -22360,7 +22357,7 @@ fn zirErrorCast(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData
};
if (!dest_err_ty.isAnyError(zcu) and !Type.errorSetHasFieldIp(ip, dest_err_ty.toIntern(), err_name)) {
- return sema.fail(block, src, "'error.{}' not a member of error set '{}'", .{
+ return sema.fail(block, src, "'error.{f}' not a member of error set '{f}'", .{
err_name.fmt(ip), dest_err_ty.fmt(pt),
});
}
@@ -22520,13 +22517,15 @@ fn ptrCastFull(
const src_elem_size = src_elem_ty.abiSize(zcu);
const dest_elem_size = dest_elem_ty.abiSize(zcu);
if (dest_elem_size == 0) {
- return sema.fail(block, src, "cannot infer length of slice of zero-bit '{}' from '{}'", .{ dest_elem_ty.fmt(pt), operand_ty.fmt(pt) });
+ return sema.fail(block, src, "cannot infer length of slice of zero-bit '{f}' from '{f}'", .{
+ dest_elem_ty.fmt(pt), operand_ty.fmt(pt),
+ });
}
if (opt_src_len) |src_len| {
const bytes = src_len * src_elem_size;
const dest_len = std.math.divExact(u64, bytes, dest_elem_size) catch switch (src_info.flags.size) {
.slice => return sema.fail(block, src, "slice length '{d}' does not divide exactly into destination elements", .{src_len}),
- .one => return sema.fail(block, src, "type '{}' does not divide exactly into destination elements", .{src_elem_ty.fmt(pt)}),
+ .one => return sema.fail(block, src, "type '{f}' does not divide exactly into destination elements", .{src_elem_ty.fmt(pt)}),
else => unreachable,
};
break :len .{ .constant = dest_len };
@@ -22544,7 +22543,9 @@ fn ptrCastFull(
// The source value has `src_len * src_base_per_elem` values of type `src_base_ty`.
// The result value will have `dest_len * dest_base_per_elem` values of type `dest_base_ty`.
if (dest_base_ty.toIntern() != src_base_ty.toIntern()) {
- return sema.fail(block, src, "cannot infer length of comptime-only '{}' from incompatible '{}'", .{ dest_ty.fmt(pt), operand_ty.fmt(pt) });
+ return sema.fail(block, src, "cannot infer length of comptime-only '{f}' from incompatible '{f}'", .{
+ dest_ty.fmt(pt), operand_ty.fmt(pt),
+ });
}
// `src_base_ty` is comptime-only, so `src_elem_ty` is comptime-only, so `operand_ty` is
// comptime-only, so `operand` is comptime-known, so `opt_src_len` is non-`null`.
@@ -22552,7 +22553,7 @@ fn ptrCastFull(
const base_len = src_len * src_base_per_elem;
const dest_len = std.math.divExact(u64, base_len, dest_base_per_elem) catch switch (src_info.flags.size) {
.slice => return sema.fail(block, src, "slice length '{d}' does not divide exactly into destination elements", .{src_len}),
- .one => return sema.fail(block, src, "type '{}' does not divide exactly into destination elements", .{src_elem_ty.fmt(pt)}),
+ .one => return sema.fail(block, src, "type '{f}' does not divide exactly into destination elements", .{src_elem_ty.fmt(pt)}),
else => unreachable,
};
break :len .{ .constant = dest_len };
@@ -22613,7 +22614,7 @@ fn ptrCastFull(
);
if (imc_res == .ok) break :check_child;
return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(src, "pointer element type '{}' cannot coerce into element type '{}'", .{
+ const msg = try sema.errMsg(src, "pointer element type '{f}' cannot coerce into element type '{f}'", .{
src_child.fmt(pt), dest_child.fmt(pt),
});
errdefer msg.destroy(sema.gpa);
@@ -22640,11 +22641,11 @@ fn ptrCastFull(
}
return sema.failWithOwnedErrorMsg(block, msg: {
const msg = if (src_info.sentinel == .none) blk: {
- break :blk try sema.errMsg(src, "destination pointer requires '{}' sentinel", .{
+ break :blk try sema.errMsg(src, "destination pointer requires '{f}' sentinel", .{
Value.fromInterned(dest_info.sentinel).fmtValueSema(pt, sema),
});
} else blk: {
- break :blk try sema.errMsg(src, "pointer sentinel '{}' cannot coerce into pointer sentinel '{}'", .{
+ break :blk try sema.errMsg(src, "pointer sentinel '{f}' cannot coerce into pointer sentinel '{f}'", .{
Value.fromInterned(src_info.sentinel).fmtValueSema(pt, sema),
Value.fromInterned(dest_info.sentinel).fmtValueSema(pt, sema),
});
@@ -22686,7 +22687,7 @@ fn ptrCastFull(
if (dest_allows_zero) break :check_allowzero;
return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(src, "'{}' could have null values which are illegal in type '{}'", .{
+ const msg = try sema.errMsg(src, "'{f}' could have null values which are illegal in type '{f}'", .{
operand_ty.fmt(pt),
dest_ty.fmt(pt),
});
@@ -22714,10 +22715,10 @@ fn ptrCastFull(
return sema.failWithOwnedErrorMsg(block, msg: {
const msg = try sema.errMsg(src, "{s} increases pointer alignment", .{operation});
errdefer msg.destroy(sema.gpa);
- try sema.errNote(operand_src, msg, "'{}' has alignment '{d}'", .{
+ try sema.errNote(operand_src, msg, "'{f}' has alignment '{d}'", .{
operand_ty.fmt(pt), src_align.toByteUnits() orelse 0,
});
- try sema.errNote(src, msg, "'{}' has alignment '{d}'", .{
+ try sema.errNote(src, msg, "'{f}' has alignment '{d}'", .{
dest_ty.fmt(pt), dest_align.toByteUnits() orelse 0,
});
try sema.errNote(src, msg, "use @alignCast to assert pointer alignment", .{});
@@ -22731,10 +22732,10 @@ fn ptrCastFull(
return sema.failWithOwnedErrorMsg(block, msg: {
const msg = try sema.errMsg(src, "{s} changes pointer address space", .{operation});
errdefer msg.destroy(sema.gpa);
- try sema.errNote(operand_src, msg, "'{}' has address space '{s}'", .{
+ try sema.errNote(operand_src, msg, "'{f}' has address space '{s}'", .{
operand_ty.fmt(pt), @tagName(src_info.flags.address_space),
});
- try sema.errNote(src, msg, "'{}' has address space '{s}'", .{
+ try sema.errNote(src, msg, "'{f}' has address space '{s}'", .{
dest_ty.fmt(pt), @tagName(dest_info.flags.address_space),
});
try sema.errNote(src, msg, "use @addrSpaceCast to cast pointer address space", .{});
@@ -22801,7 +22802,7 @@ fn ptrCastFull(
if (operand_val.isNull(zcu)) {
if (!dest_ty.ptrAllowsZero(zcu)) {
- return sema.fail(block, operand_src, "null pointer casted to type '{}'", .{dest_ty.fmt(pt)});
+ return sema.fail(block, operand_src, "null pointer casted to type '{f}'", .{dest_ty.fmt(pt)});
}
if (dest_ty.zigTypeTag(zcu) == .optional) {
return Air.internedToRef((try pt.nullValue(dest_ty)).toIntern());
@@ -23092,7 +23093,7 @@ fn zirTruncate(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
const operand_is_vector = operand_ty.zigTypeTag(zcu) == .vector;
const dest_is_vector = dest_ty.zigTypeTag(zcu) == .vector;
if (operand_is_vector != dest_is_vector) {
- return sema.fail(block, operand_src, "expected type '{}', found '{}'", .{ dest_ty.fmt(pt), operand_ty.fmt(pt) });
+ return sema.fail(block, operand_src, "expected type '{f}', found '{f}'", .{ dest_ty.fmt(pt), operand_ty.fmt(pt) });
}
if (dest_scalar_ty.zigTypeTag(zcu) == .comptime_int) {
@@ -23112,7 +23113,7 @@ fn zirTruncate(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
}
if (operand_info.signedness != dest_info.signedness) {
- return sema.fail(block, operand_src, "expected {s} integer type, found '{}'", .{
+ return sema.fail(block, operand_src, "expected {s} integer type, found '{f}'", .{
@tagName(dest_info.signedness), operand_ty.fmt(pt),
});
}
@@ -23121,7 +23122,7 @@ fn zirTruncate(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
const msg = msg: {
const msg = try sema.errMsg(
src,
- "destination type '{}' has more bits than source type '{}'",
+ "destination type '{f}' has more bits than source type '{f}'",
.{ dest_ty.fmt(pt), operand_ty.fmt(pt) },
);
errdefer msg.destroy(sema.gpa);
@@ -23239,7 +23240,7 @@ fn zirByteSwap(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Ai
return sema.fail(
block,
operand_src,
- "@byteSwap requires the number of bits to be evenly divisible by 8, but {} has {} bits",
+ "@byteSwap requires the number of bits to be evenly divisible by 8, but {f} has {} bits",
.{ scalar_ty.fmt(pt), bits },
);
}
@@ -23359,7 +23360,7 @@ fn bitOffsetOf(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!u6
try ty.resolveLayout(pt);
switch (ty.zigTypeTag(zcu)) {
.@"struct" => {},
- else => return sema.fail(block, ty_src, "expected struct type, found '{}'", .{ty.fmt(pt)}),
+ else => return sema.fail(block, ty_src, "expected struct type, found '{f}'", .{ty.fmt(pt)}),
}
const field_index = if (ty.isTuple(zcu)) blk: {
@@ -23394,7 +23395,7 @@ fn checkNamespaceType(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) Com
const zcu = pt.zcu;
switch (ty.zigTypeTag(zcu)) {
.@"struct", .@"enum", .@"union", .@"opaque" => return,
- else => return sema.fail(block, src, "expected struct, enum, union, or opaque; found '{}'", .{ty.fmt(pt)}),
+ else => return sema.fail(block, src, "expected struct, enum, union, or opaque; found '{f}'", .{ty.fmt(pt)}),
}
}
@@ -23405,7 +23406,7 @@ fn checkIntType(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) CompileEr
switch (ty.zigTypeTag(zcu)) {
.comptime_int => return true,
.int => return false,
- else => return sema.fail(block, src, "expected integer type, found '{}'", .{ty.fmt(pt)}),
+ else => return sema.fail(block, src, "expected integer type, found '{f}'", .{ty.fmt(pt)}),
}
}
@@ -23459,7 +23460,7 @@ fn checkPtrOperand(
const msg = msg: {
const msg = try sema.errMsg(
ty_src,
- "expected pointer, found '{}'",
+ "expected pointer, found '{f}'",
.{ty.fmt(pt)},
);
errdefer msg.destroy(sema.gpa);
@@ -23473,7 +23474,7 @@ fn checkPtrOperand(
.optional => if (ty.childType(zcu).zigTypeTag(zcu) == .pointer) return,
else => {},
}
- return sema.fail(block, ty_src, "expected pointer type, found '{}'", .{ty.fmt(pt)});
+ return sema.fail(block, ty_src, "expected pointer type, found '{f}'", .{ty.fmt(pt)});
}
fn checkPtrType(
@@ -23491,7 +23492,7 @@ fn checkPtrType(
const msg = msg: {
const msg = try sema.errMsg(
ty_src,
- "expected pointer type, found '{}'",
+ "expected pointer type, found '{f}'",
.{ty.fmt(pt)},
);
errdefer msg.destroy(sema.gpa);
@@ -23505,7 +23506,7 @@ fn checkPtrType(
.optional => if (ty.childType(zcu).zigTypeTag(zcu) == .pointer) return,
else => {},
}
- return sema.fail(block, ty_src, "expected pointer type, found '{}'", .{ty.fmt(pt)});
+ return sema.fail(block, ty_src, "expected pointer type, found '{f}'", .{ty.fmt(pt)});
}
fn checkLogicalPtrOperation(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) !void {
@@ -23516,7 +23517,7 @@ fn checkLogicalPtrOperation(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Typ
const as = ty.ptrAddressSpace(zcu);
if (target_util.arePointersLogical(target, as)) {
return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(src, "illegal operation on logical pointer of type '{}'", .{ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "illegal operation on logical pointer of type '{f}'", .{ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.errNote(
src,
@@ -23547,7 +23548,7 @@ fn checkVectorElemType(
.optional, .pointer => if (ty.isPtrAtRuntime(zcu)) return,
else => {},
}
- return sema.fail(block, ty_src, "expected integer, float, bool, or pointer for the vector element type; found '{}'", .{ty.fmt(pt)});
+ return sema.fail(block, ty_src, "expected integer, float, bool, or pointer for the vector element type; found '{f}'", .{ty.fmt(pt)});
}
fn checkFloatType(
@@ -23560,7 +23561,7 @@ fn checkFloatType(
const zcu = pt.zcu;
switch (ty.zigTypeTag(zcu)) {
.comptime_int, .comptime_float, .float => {},
- else => return sema.fail(block, ty_src, "expected float type, found '{}'", .{ty.fmt(pt)}),
+ else => return sema.fail(block, ty_src, "expected float type, found '{f}'", .{ty.fmt(pt)}),
}
}
@@ -23578,7 +23579,7 @@ fn checkNumericType(
.comptime_float, .float, .comptime_int, .int => {},
else => |t| return sema.fail(block, ty_src, "expected number, found '{}'", .{t}),
},
- else => return sema.fail(block, ty_src, "expected number, found '{}'", .{ty.fmt(pt)}),
+ else => return sema.fail(block, ty_src, "expected number, found '{f}'", .{ty.fmt(pt)}),
}
}
@@ -23612,7 +23613,7 @@ fn checkAtomicPtrOperand(
error.BadType => return sema.fail(
block,
elem_ty_src,
- "expected bool, integer, float, enum, packed struct, or pointer type; found '{}'",
+ "expected bool, integer, float, enum, packed struct, or pointer type; found '{f}'",
.{elem_ty.fmt(pt)},
),
};
@@ -23673,12 +23674,12 @@ fn checkIntOrVector(
const elem_ty = operand_ty.childType(zcu);
switch (elem_ty.zigTypeTag(zcu)) {
.int => return elem_ty,
- else => return sema.fail(block, operand_src, "expected vector of integers; found vector of '{}'", .{
+ else => return sema.fail(block, operand_src, "expected vector of integers; found vector of '{f}'", .{
elem_ty.fmt(pt),
}),
}
},
- else => return sema.fail(block, operand_src, "expected integer or vector, found '{}'", .{
+ else => return sema.fail(block, operand_src, "expected integer or vector, found '{f}'", .{
operand_ty.fmt(pt),
}),
}
@@ -23698,12 +23699,12 @@ fn checkIntOrVectorAllowComptime(
const elem_ty = operand_ty.childType(zcu);
switch (elem_ty.zigTypeTag(zcu)) {
.int, .comptime_int => return elem_ty,
- else => return sema.fail(block, operand_src, "expected vector of integers; found vector of '{}'", .{
+ else => return sema.fail(block, operand_src, "expected vector of integers; found vector of '{f}'", .{
elem_ty.fmt(pt),
}),
}
},
- else => return sema.fail(block, operand_src, "expected integer or vector, found '{}'", .{
+ else => return sema.fail(block, operand_src, "expected integer or vector, found '{f}'", .{
operand_ty.fmt(pt),
}),
}
@@ -23794,7 +23795,7 @@ fn checkVectorizableBinaryOperands(
}
} else {
const msg = msg: {
- const msg = try sema.errMsg(src, "mixed scalar and vector operands: '{}' and '{}'", .{
+ const msg = try sema.errMsg(src, "mixed scalar and vector operands: '{f}' and '{f}'", .{
lhs_ty.fmt(pt), rhs_ty.fmt(pt),
});
errdefer msg.destroy(sema.gpa);
@@ -23928,7 +23929,7 @@ fn zirCmpxchg(
return sema.fail(
block,
elem_ty_src,
- "expected bool, integer, enum, packed struct, or pointer type; found '{}'",
+ "expected bool, integer, enum, packed struct, or pointer type; found '{f}'",
.{elem_ty.fmt(pt)},
);
}
@@ -24012,7 +24013,7 @@ fn zirSplat(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.I
switch (dest_ty.zigTypeTag(zcu)) {
.array, .vector => {},
- else => return sema.fail(block, src, "expected array or vector type, found '{}'", .{dest_ty.fmt(pt)}),
+ else => return sema.fail(block, src, "expected array or vector type, found '{f}'", .{dest_ty.fmt(pt)}),
}
const operand = try sema.resolveInst(extra.rhs);
@@ -24088,7 +24089,7 @@ fn zirReduce(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
const zcu = pt.zcu;
if (operand_ty.zigTypeTag(zcu) != .vector) {
- return sema.fail(block, operand_src, "expected vector, found '{}'", .{operand_ty.fmt(pt)});
+ return sema.fail(block, operand_src, "expected vector, found '{f}'", .{operand_ty.fmt(pt)});
}
const scalar_ty = operand_ty.childType(zcu);
@@ -24097,13 +24098,13 @@ fn zirReduce(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
switch (operation) {
.And, .Or, .Xor => switch (scalar_ty.zigTypeTag(zcu)) {
.int, .bool => {},
- else => return sema.fail(block, operand_src, "@reduce operation '{s}' requires integer or boolean operand; found '{}'", .{
+ else => return sema.fail(block, operand_src, "@reduce operation '{s}' requires integer or boolean operand; found '{f}'", .{
@tagName(operation), operand_ty.fmt(pt),
}),
},
.Min, .Max, .Add, .Mul => switch (scalar_ty.zigTypeTag(zcu)) {
.int, .float => {},
- else => return sema.fail(block, operand_src, "@reduce operation '{s}' requires integer or float operand; found '{}'", .{
+ else => return sema.fail(block, operand_src, "@reduce operation '{s}' requires integer or float operand; found '{f}'", .{
@tagName(operation), operand_ty.fmt(pt),
}),
},
@@ -24157,7 +24158,7 @@ fn zirShuffle(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air
const mask_len = switch (sema.typeOf(mask).zigTypeTag(zcu)) {
.array, .vector => sema.typeOf(mask).arrayLen(zcu),
- else => return sema.fail(block, mask_src, "expected vector or array, found '{}'", .{sema.typeOf(mask).fmt(pt)}),
+ else => return sema.fail(block, mask_src, "expected vector or array, found '{f}'", .{sema.typeOf(mask).fmt(pt)}),
};
mask_ty = try pt.vectorType(.{
.len = @intCast(mask_len),
@@ -24184,11 +24185,14 @@ fn analyzeShuffle(
const b_src = block.builtinCallArgSrc(src_node, 2);
const mask_src = block.builtinCallArgSrc(src_node, 3);
- // If the type of `a` is `@Type(.undefined)`, i.e. the argument is untyped, this is 0, because it is an error to index into this vector.
+ // If the type of `a` is `@Type(.undefined)`, i.e. the argument is untyped,
+ // this is 0, because it is an error to index into this vector.
const a_len: u32 = switch (sema.typeOf(a_uncoerced).zigTypeTag(zcu)) {
.array, .vector => @intCast(sema.typeOf(a_uncoerced).arrayLen(zcu)),
.undefined => 0,
- else => return sema.fail(block, a_src, "expected vector of '{}', found '{}'", .{ elem_ty.fmt(pt), sema.typeOf(a_uncoerced).fmt(pt) }),
+ else => return sema.fail(block, a_src, "expected vector of '{f}', found '{f}'", .{
+ elem_ty.fmt(pt), sema.typeOf(a_uncoerced).fmt(pt),
+ }),
};
const a_ty = try pt.vectorType(.{ .len = a_len, .child = elem_ty.toIntern() });
const a_coerced = try sema.coerce(block, a_ty, a_uncoerced, a_src);
@@ -24197,7 +24201,9 @@ fn analyzeShuffle(
const b_len: u32 = switch (sema.typeOf(b_uncoerced).zigTypeTag(zcu)) {
.array, .vector => @intCast(sema.typeOf(b_uncoerced).arrayLen(zcu)),
.undefined => 0,
- else => return sema.fail(block, b_src, "expected vector of '{}', found '{}'", .{ elem_ty.fmt(pt), sema.typeOf(b_uncoerced).fmt(pt) }),
+ else => return sema.fail(block, b_src, "expected vector of '{f}', found '{f}'", .{
+ elem_ty.fmt(pt), sema.typeOf(b_uncoerced).fmt(pt),
+ }),
};
const b_ty = try pt.vectorType(.{ .len = b_len, .child = elem_ty.toIntern() });
const b_coerced = try sema.coerce(block, b_ty, b_uncoerced, b_src);
@@ -24235,7 +24241,7 @@ fn analyzeShuffle(
if (idx >= a_len) return sema.failWithOwnedErrorMsg(block, msg: {
const msg = try sema.errMsg(mask_src, "mask element at index '{d}' selects out-of-bounds index", .{mask_idx});
errdefer msg.destroy(sema.gpa);
- try sema.errNote(a_src, msg, "index '{d}' exceeds bounds of '{}' given here", .{ idx, a_ty.fmt(pt) });
+ try sema.errNote(a_src, msg, "index '{d}' exceeds bounds of '{f}' given here", .{ idx, a_ty.fmt(pt) });
if (idx < b_len) {
try sema.errNote(b_src, msg, "use '~@as(u32, {d})' to index into second vector given here", .{idx});
}
@@ -24351,7 +24357,7 @@ fn zirSelect(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.InstData) C
const vec_len_u64 = switch (pred_ty.zigTypeTag(zcu)) {
.vector, .array => pred_ty.arrayLen(zcu),
- else => return sema.fail(block, pred_src, "expected vector or array, found '{}'", .{pred_ty.fmt(pt)}),
+ else => return sema.fail(block, pred_src, "expected vector or array, found '{f}'", .{pred_ty.fmt(pt)}),
};
const vec_len: u32 = @intCast(try sema.usizeCast(block, pred_src, vec_len_u64));
@@ -24611,7 +24617,7 @@ fn zirMulAdd(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!Air.
switch (ty.scalarType(zcu).zigTypeTag(zcu)) {
.comptime_float, .float => {},
- else => return sema.fail(block, src, "expected vector of floats or float type, found '{}'", .{ty.fmt(pt)}),
+ else => return sema.fail(block, src, "expected vector of floats or float type, found '{f}'", .{ty.fmt(pt)}),
}
const runtime_src = if (maybe_mulend1) |mulend1_val| rs: {
@@ -24712,7 +24718,7 @@ fn zirBuiltinCall(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
const args_ty = sema.typeOf(args);
if (!args_ty.isTuple(zcu)) {
- return sema.fail(block, args_src, "expected a tuple, found '{}'", .{args_ty.fmt(pt)});
+ return sema.fail(block, args_src, "expected a tuple, found '{f}'", .{args_ty.fmt(pt)});
}
const resolved_args: []Air.Inst.Ref = try sema.arena.alloc(Air.Inst.Ref, args_ty.structFieldCount(zcu));
@@ -24757,12 +24763,12 @@ fn zirFieldParentPtr(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.Ins
try sema.checkPtrType(block, inst_src, parent_ptr_ty, true);
const parent_ptr_info = parent_ptr_ty.ptrInfo(zcu);
if (parent_ptr_info.flags.size != .one) {
- return sema.fail(block, inst_src, "expected single pointer type, found '{}'", .{parent_ptr_ty.fmt(pt)});
+ return sema.fail(block, inst_src, "expected single pointer type, found '{f}'", .{parent_ptr_ty.fmt(pt)});
}
const parent_ty: Type = .fromInterned(parent_ptr_info.child);
switch (parent_ty.zigTypeTag(zcu)) {
.@"struct", .@"union" => {},
- else => return sema.fail(block, inst_src, "expected pointer to struct or union type, found '{}'", .{parent_ptr_ty.fmt(pt)}),
+ else => return sema.fail(block, inst_src, "expected pointer to struct or union type, found '{f}'", .{parent_ptr_ty.fmt(pt)}),
}
try parent_ty.resolveLayout(pt);
@@ -24912,7 +24918,7 @@ fn zirFieldParentPtr(sema: *Sema, block: *Block, extended: Zir.Inst.Extended.Ins
}
if (field.index != field_index) {
- return sema.fail(block, inst_src, "field '{}' has index '{d}' but pointer value is index '{d}' of struct '{}'", .{
+ return sema.fail(block, inst_src, "field '{f}' has index '{d}' but pointer value is index '{d}' of struct '{f}'", .{
field_name.fmt(ip), field_index, field.index, parent_ty.fmt(pt),
});
}
@@ -25371,10 +25377,10 @@ fn zirMemcpy(
const msg = msg: {
const msg = try sema.errMsg(src, "unknown copy length", .{});
errdefer msg.destroy(sema.gpa);
- try sema.errNote(dest_src, msg, "destination type '{}' provides no length", .{
+ try sema.errNote(dest_src, msg, "destination type '{f}' provides no length", .{
dest_ty.fmt(pt),
});
- try sema.errNote(src_src, msg, "source type '{}' provides no length", .{
+ try sema.errNote(src_src, msg, "source type '{f}' provides no length", .{
src_ty.fmt(pt),
});
break :msg msg;
@@ -25398,7 +25404,7 @@ fn zirMemcpy(
if (imc != .ok) return sema.failWithOwnedErrorMsg(block, msg: {
const msg = try sema.errMsg(
src,
- "pointer element type '{}' cannot coerce into element type '{}'",
+ "pointer element type '{f}' cannot coerce into element type '{f}'",
.{ src_elem_ty.fmt(pt), dest_elem_ty.fmt(pt) },
);
errdefer msg.destroy(sema.gpa);
@@ -25417,10 +25423,10 @@ fn zirMemcpy(
const msg = msg: {
const msg = try sema.errMsg(src, "non-matching copy lengths", .{});
errdefer msg.destroy(sema.gpa);
- try sema.errNote(dest_src, msg, "length {} here", .{
+ try sema.errNote(dest_src, msg, "length {f} here", .{
dest_len_val.fmtValueSema(pt, sema),
});
- try sema.errNote(src_src, msg, "length {} here", .{
+ try sema.errNote(src_src, msg, "length {f} here", .{
src_len_val.fmtValueSema(pt, sema),
});
break :msg msg;
@@ -25635,7 +25641,7 @@ fn zirMemset(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError!void
return sema.failWithOwnedErrorMsg(block, msg: {
const msg = try sema.errMsg(src, "unknown @memset length", .{});
errdefer msg.destroy(sema.gpa);
- try sema.errNote(dest_src, msg, "destination type '{}' provides no length", .{
+ try sema.errNote(dest_src, msg, "destination type '{f}' provides no length", .{
dest_ptr_ty.fmt(pt),
});
break :msg msg;
@@ -25815,7 +25821,7 @@ fn zirCUndef(
const src = block.builtinCallArgSrc(extra.node, 0);
const name = try sema.resolveConstString(block, src, extra.operand, .{ .simple = .operand_cUndef_macro_name });
- try block.c_import_buf.?.writer().print("#undef {s}\n", .{name});
+ try block.c_import_buf.?.print("#undef {s}\n", .{name});
return .void_value;
}
@@ -25828,7 +25834,7 @@ fn zirCInclude(
const src = block.builtinCallArgSrc(extra.node, 0);
const name = try sema.resolveConstString(block, src, extra.operand, .{ .simple = .operand_cInclude_file_name });
- try block.c_import_buf.?.writer().print("#include <{s}>\n", .{name});
+ try block.c_import_buf.?.print("#include <{s}>\n", .{name});
return .void_value;
}
@@ -25847,9 +25853,9 @@ fn zirCDefine(
const rhs = try sema.resolveInst(extra.rhs);
if (sema.typeOf(rhs).zigTypeTag(zcu) != .void) {
const value = try sema.resolveConstString(block, val_src, extra.rhs, .{ .simple = .operand_cDefine_macro_value });
- try block.c_import_buf.?.writer().print("#define {s} {s}\n", .{ name, value });
+ try block.c_import_buf.?.print("#define {s} {s}\n", .{ name, value });
} else {
- try block.c_import_buf.?.writer().print("#define {s}\n", .{name});
+ try block.c_import_buf.?.print("#define {s}\n", .{name});
}
return .void_value;
}
@@ -26067,7 +26073,7 @@ fn zirBuiltinExtern(
}
if (!try sema.validateExternType(ty, .other)) {
const msg = msg: {
- const msg = try sema.errMsg(ty_src, "extern symbol cannot have type '{}'", .{ty.fmt(pt)});
+ const msg = try sema.errMsg(ty_src, "extern symbol cannot have type '{f}'", .{ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.explainWhyTypeIsNotExtern(msg, ty_src, ty, .other);
break :msg msg;
@@ -26307,7 +26313,7 @@ pub fn validateVarType(
if (is_extern) {
if (!try sema.validateExternType(var_ty, .other)) {
const msg = msg: {
- const msg = try sema.errMsg(src, "extern variable cannot have type '{}'", .{var_ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "extern variable cannot have type '{f}'", .{var_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.explainWhyTypeIsNotExtern(msg, src, var_ty, .other);
break :msg msg;
@@ -26319,7 +26325,7 @@ pub fn validateVarType(
return sema.fail(
block,
src,
- "non-extern variable with opaque type '{}'",
+ "non-extern variable with opaque type '{f}'",
.{var_ty.fmt(pt)},
);
}
@@ -26328,7 +26334,7 @@ pub fn validateVarType(
if (!try var_ty.comptimeOnlySema(pt)) return;
const msg = msg: {
- const msg = try sema.errMsg(src, "variable of type '{}' must be const or comptime", .{var_ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "variable of type '{f}' must be const or comptime", .{var_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.explainWhyTypeIsComptime(msg, src, var_ty);
@@ -26378,7 +26384,7 @@ fn explainWhyTypeIsComptimeInner(
=> return,
.@"fn" => {
- try sema.errNote(src_loc, msg, "use '*const {}' for a function pointer type", .{ty.fmt(pt)});
+ try sema.errNote(src_loc, msg, "use '*const {f}' for a function pointer type", .{ty.fmt(pt)});
},
.type => {
@@ -26394,7 +26400,7 @@ fn explainWhyTypeIsComptimeInner(
=> return,
.@"opaque" => {
- try sema.errNote(src_loc, msg, "opaque type '{}' has undefined size", .{ty.fmt(pt)});
+ try sema.errNote(src_loc, msg, "opaque type '{f}' has undefined size", .{ty.fmt(pt)});
},
.array, .vector => {
@@ -26581,7 +26587,7 @@ fn explainWhyTypeIsNotExtern(
if (!ty.isConstPtr(zcu) and pointee_ty.zigTypeTag(zcu) == .@"fn") {
try sema.errNote(src_loc, msg, "pointer to extern function must be 'const'", .{});
} else if (try ty.comptimeOnlySema(pt)) {
- try sema.errNote(src_loc, msg, "pointer to comptime-only type '{}'", .{pointee_ty.fmt(pt)});
+ try sema.errNote(src_loc, msg, "pointer to comptime-only type '{f}'", .{pointee_ty.fmt(pt)});
try sema.explainWhyTypeIsComptime(msg, src_loc, ty);
}
try sema.explainWhyTypeIsNotExtern(msg, src_loc, pointee_ty, .other);
@@ -26609,7 +26615,7 @@ fn explainWhyTypeIsNotExtern(
},
.@"enum" => {
const tag_ty = ty.intTagType(zcu);
- try sema.errNote(src_loc, msg, "enum tag type '{}' is not extern compatible", .{tag_ty.fmt(pt)});
+ try sema.errNote(src_loc, msg, "enum tag type '{f}' is not extern compatible", .{tag_ty.fmt(pt)});
try sema.explainWhyTypeIsNotExtern(msg, src_loc, tag_ty, position);
},
.@"struct" => try sema.errNote(src_loc, msg, "only extern structs and ABI sized packed structs are extern compatible", .{}),
@@ -27045,7 +27051,7 @@ fn fieldVal(
return sema.fail(
block,
field_name_src,
- "no member named '{}' in '{}'",
+ "no member named '{f}' in '{f}'",
.{ field_name.fmt(ip), object_ty.fmt(pt) },
);
}
@@ -27069,7 +27075,7 @@ fn fieldVal(
return sema.fail(
block,
field_name_src,
- "no member named '{}' in '{}'",
+ "no member named '{f}' in '{f}'",
.{ field_name.fmt(ip), object_ty.fmt(pt) },
);
}
@@ -27089,7 +27095,7 @@ fn fieldVal(
switch (ip.indexToKey(child_type.toIntern())) {
.error_set_type => |error_set_type| blk: {
if (error_set_type.nameIndex(ip, field_name) != null) break :blk;
- return sema.fail(block, src, "no error named '{}' in '{}'", .{
+ return sema.fail(block, src, "no error named '{f}' in '{f}'", .{
field_name.fmt(ip), child_type.fmt(pt),
});
},
@@ -27144,7 +27150,7 @@ fn fieldVal(
return sema.failWithBadMemberAccess(block, child_type, src, field_name);
},
else => return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(src, "type '{}' has no members", .{child_type.fmt(pt)});
+ const msg = try sema.errMsg(src, "type '{f}' has no members", .{child_type.fmt(pt)});
errdefer msg.destroy(sema.gpa);
if (child_type.isSlice(zcu)) try sema.errNote(src, msg, "slice values have 'len' and 'ptr' members", .{});
if (child_type.zigTypeTag(zcu) == .array) try sema.errNote(src, msg, "array values have 'len' member", .{});
@@ -27190,7 +27196,7 @@ fn fieldPtr(
const object_ptr_ty = sema.typeOf(object_ptr);
const object_ty = switch (object_ptr_ty.zigTypeTag(zcu)) {
.pointer => object_ptr_ty.childType(zcu),
- else => return sema.fail(block, object_ptr_src, "expected pointer, found '{}'", .{object_ptr_ty.fmt(pt)}),
+ else => return sema.fail(block, object_ptr_src, "expected pointer, found '{f}'", .{object_ptr_ty.fmt(pt)}),
};
// Zig allows dereferencing a single pointer during field lookup. Note that
@@ -27243,7 +27249,7 @@ fn fieldPtr(
return sema.fail(
block,
field_name_src,
- "no member named '{}' in '{}'",
+ "no member named '{f}' in '{f}'",
.{ field_name.fmt(ip), object_ty.fmt(pt) },
);
}
@@ -27298,7 +27304,7 @@ fn fieldPtr(
return sema.fail(
block,
field_name_src,
- "no member named '{}' in '{}'",
+ "no member named '{f}' in '{f}'",
.{ field_name.fmt(ip), object_ty.fmt(pt) },
);
}
@@ -27321,7 +27327,7 @@ fn fieldPtr(
if (error_set_type.nameIndex(ip, field_name) != null) {
break :blk;
}
- return sema.fail(block, src, "no error named '{}' in '{}'", .{
+ return sema.fail(block, src, "no error named '{f}' in '{f}'", .{
field_name.fmt(ip), child_type.fmt(pt),
});
},
@@ -27375,7 +27381,7 @@ fn fieldPtr(
}
return sema.failWithBadMemberAccess(block, child_type, field_name_src, field_name);
},
- else => return sema.fail(block, src, "type '{}' has no members", .{child_type.fmt(pt)}),
+ else => return sema.fail(block, src, "type '{f}' has no members", .{child_type.fmt(pt)}),
}
},
.@"struct" => {
@@ -27430,7 +27436,7 @@ fn fieldCallBind(
const inner_ty = if (raw_ptr_ty.zigTypeTag(zcu) == .pointer and (raw_ptr_ty.ptrSize(zcu) == .one or raw_ptr_ty.ptrSize(zcu) == .c))
raw_ptr_ty.childType(zcu)
else
- return sema.fail(block, raw_ptr_src, "expected single pointer, found '{}'", .{raw_ptr_ty.fmt(pt)});
+ return sema.fail(block, raw_ptr_src, "expected single pointer, found '{f}'", .{raw_ptr_ty.fmt(pt)});
// Optionally dereference a second pointer to get the concrete type.
const is_double_ptr = inner_ty.zigTypeTag(zcu) == .pointer and inner_ty.ptrSize(zcu) == .one;
@@ -27549,7 +27555,7 @@ fn fieldCallBind(
};
const msg = msg: {
- const msg = try sema.errMsg(src, "no field or member function named '{}' in '{}'", .{
+ const msg = try sema.errMsg(src, "no field or member function named '{f}' in '{f}'", .{
field_name.fmt(ip),
concrete_ty.fmt(pt),
});
@@ -27559,7 +27565,7 @@ fn fieldCallBind(
try sema.errNote(
zcu.navSrcLoc(nav_index),
msg,
- "'{}' is not a member function",
+ "'{f}' is not a member function",
.{field_name.fmt(ip)},
);
}
@@ -27627,7 +27633,7 @@ fn namespaceLookup(
if (try sema.lookupInNamespace(block, namespace, decl_name)) |lookup| {
if (!lookup.accessible) {
return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(src, "'{}' is not marked 'pub'", .{
+ const msg = try sema.errMsg(src, "'{f}' is not marked 'pub'", .{
decl_name.fmt(&zcu.intern_pool),
});
errdefer msg.destroy(gpa);
@@ -27865,12 +27871,12 @@ fn tupleFieldIndex(
assert(!field_name.eqlSlice("len", ip));
if (field_name.toUnsigned(ip)) |field_index| {
if (field_index < tuple_ty.structFieldCount(pt.zcu)) return field_index;
- return sema.fail(block, field_name_src, "index '{}' out of bounds of tuple '{}'", .{
+ return sema.fail(block, field_name_src, "index '{f}' out of bounds of tuple '{f}'", .{
field_name.fmt(ip), tuple_ty.fmt(pt),
});
}
- return sema.fail(block, field_name_src, "no field named '{}' in tuple '{}'", .{
+ return sema.fail(block, field_name_src, "no field named '{f}' in tuple '{f}'", .{
field_name.fmt(ip), tuple_ty.fmt(pt),
});
}
@@ -27957,7 +27963,7 @@ fn unionFieldPtr(
const msg = try sema.errMsg(src, "cannot initialize 'noreturn' field of union", .{});
errdefer msg.destroy(sema.gpa);
- try sema.addFieldErrNote(union_ty, field_index, msg, "field '{}' declared here", .{
+ try sema.addFieldErrNote(union_ty, field_index, msg, "field '{f}' declared here", .{
field_name.fmt(ip),
});
try sema.addDeclaredHereNote(msg, union_ty);
@@ -27991,7 +27997,7 @@ fn unionFieldPtr(
const msg = msg: {
const active_index = Type.fromInterned(union_obj.enum_tag_ty).enumTagFieldIndex(Value.fromInterned(un.tag), zcu).?;
const active_field_name = Type.fromInterned(union_obj.enum_tag_ty).enumFieldName(active_index, zcu);
- const msg = try sema.errMsg(src, "access of union field '{}' while field '{}' is active", .{
+ const msg = try sema.errMsg(src, "access of union field '{f}' while field '{f}' is active", .{
field_name.fmt(ip),
active_field_name.fmt(ip),
});
@@ -28059,7 +28065,7 @@ fn unionFieldVal(
const msg = msg: {
const active_index = Type.fromInterned(union_obj.enum_tag_ty).enumTagFieldIndex(Value.fromInterned(un.tag), zcu).?;
const active_field_name = Type.fromInterned(union_obj.enum_tag_ty).enumFieldName(active_index, zcu);
- const msg = try sema.errMsg(src, "access of union field '{}' while field '{}' is active", .{
+ const msg = try sema.errMsg(src, "access of union field '{f}' while field '{f}' is active", .{
field_name.fmt(ip), active_field_name.fmt(ip),
});
errdefer msg.destroy(sema.gpa);
@@ -28117,7 +28123,7 @@ fn elemPtr(
const indexable_ty = switch (indexable_ptr_ty.zigTypeTag(zcu)) {
.pointer => indexable_ptr_ty.childType(zcu),
- else => return sema.fail(block, indexable_ptr_src, "expected pointer, found '{}'", .{indexable_ptr_ty.fmt(pt)}),
+ else => return sema.fail(block, indexable_ptr_src, "expected pointer, found '{f}'", .{indexable_ptr_ty.fmt(pt)}),
};
try sema.checkIndexable(block, src, indexable_ty);
@@ -28288,7 +28294,7 @@ fn validateRuntimeElemAccess(
const msg = msg: {
const msg = try sema.errMsg(
elem_index_src,
- "values of type '{}' must be comptime-known, but index value is runtime-known",
+ "values of type '{f}' must be comptime-known, but index value is runtime-known",
.{parent_ty.fmt(sema.pt)},
);
errdefer msg.destroy(sema.gpa);
@@ -28304,7 +28310,7 @@ fn validateRuntimeElemAccess(
const target = zcu.getTarget();
const as = parent_ty.ptrAddressSpace(zcu);
if (target_util.arePointersLogical(target, as)) {
- return sema.fail(block, elem_index_src, "cannot access element of logical pointer '{}'", .{parent_ty.fmt(pt)});
+ return sema.fail(block, elem_index_src, "cannot access element of logical pointer '{f}'", .{parent_ty.fmt(pt)});
}
}
}
@@ -29000,7 +29006,7 @@ fn coerceExtra(
return sema.fail(
block,
inst_src,
- "array literal requires address-of operator (&) to coerce to slice type '{}'",
+ "array literal requires address-of operator (&) to coerce to slice type '{f}'",
.{dest_ty.fmt(pt)},
);
}
@@ -29027,7 +29033,7 @@ fn coerceExtra(
// pointer to tuple to slice
if (!dest_info.flags.is_const) {
const err_msg = err_msg: {
- const err_msg = try sema.errMsg(inst_src, "cannot cast pointer to tuple to '{}'", .{dest_ty.fmt(pt)});
+ const err_msg = try sema.errMsg(inst_src, "cannot cast pointer to tuple to '{f}'", .{dest_ty.fmt(pt)});
errdefer err_msg.destroy(sema.gpa);
try sema.errNote(dest_ty_src, err_msg, "pointers to tuples can only coerce to constant pointers", .{});
break :err_msg err_msg;
@@ -29082,7 +29088,7 @@ fn coerceExtra(
// comptime-known integer to other number
if (!(try sema.intFitsInType(val, dest_ty, null))) {
if (!opts.report_err) return error.NotCoercible;
- return sema.fail(block, inst_src, "type '{}' cannot represent integer value '{}'", .{ dest_ty.fmt(pt), val.fmtValueSema(pt, sema) });
+ return sema.fail(block, inst_src, "type '{f}' cannot represent integer value '{f}'", .{ dest_ty.fmt(pt), val.fmtValueSema(pt, sema) });
}
return switch (zcu.intern_pool.indexToKey(val.toIntern())) {
.undef => try pt.undefRef(dest_ty),
@@ -29124,7 +29130,7 @@ fn coerceExtra(
return sema.fail(
block,
inst_src,
- "type '{}' cannot represent float value '{}'",
+ "type '{f}' cannot represent float value '{f}'",
.{ dest_ty.fmt(pt), val.fmtValueSema(pt, sema) },
);
}
@@ -29157,7 +29163,7 @@ fn coerceExtra(
// return sema.fail(
// block,
// inst_src,
- // "type '{}' cannot represent integer value '{}'",
+ // "type '{f}' cannot represent integer value '{}'",
// .{ dest_ty.fmt(pt), val },
// );
//}
@@ -29171,7 +29177,7 @@ fn coerceExtra(
const val = try sema.resolveConstDefinedValue(block, LazySrcLoc.unneeded, inst, undefined);
const string = zcu.intern_pool.indexToKey(val.toIntern()).enum_literal;
const field_index = dest_ty.enumFieldIndex(string, zcu) orelse {
- return sema.fail(block, inst_src, "no field named '{}' in enum '{}'", .{
+ return sema.fail(block, inst_src, "no field named '{f}' in enum '{f}'", .{
string.fmt(&zcu.intern_pool), dest_ty.fmt(pt),
});
};
@@ -29320,11 +29326,11 @@ fn coerceExtra(
}
const msg = msg: {
- const msg = try sema.errMsg(inst_src, "expected type '{}', found '{}'", .{ dest_ty.fmt(pt), inst_ty.fmt(pt) });
+ const msg = try sema.errMsg(inst_src, "expected type '{f}', found '{f}'", .{ dest_ty.fmt(pt), inst_ty.fmt(pt) });
errdefer msg.destroy(sema.gpa);
if (!can_coerce_to) {
- try sema.errNote(inst_src, msg, "cannot coerce to '{}'", .{dest_ty.fmt(pt)});
+ try sema.errNote(inst_src, msg, "cannot coerce to '{f}'", .{dest_ty.fmt(pt)});
}
// E!T to T
@@ -29513,13 +29519,13 @@ const InMemoryCoercionResult = union(enum) {
break;
},
.comptime_int_not_coercible => |int| {
- try sema.errNote(src, msg, "type '{}' cannot represent value '{}'", .{
+ try sema.errNote(src, msg, "type '{f}' cannot represent value '{f}'", .{
int.wanted.fmt(pt), int.actual.fmtValueSema(pt, sema),
});
break;
},
.error_union_payload => |pair| {
- try sema.errNote(src, msg, "error union payload '{}' cannot cast into error union payload '{}'", .{
+ try sema.errNote(src, msg, "error union payload '{f}' cannot cast into error union payload '{f}'", .{
pair.actual.fmt(pt), pair.wanted.fmt(pt),
});
cur = pair.child;
@@ -29532,18 +29538,18 @@ const InMemoryCoercionResult = union(enum) {
},
.array_sentinel => |sentinel| {
if (sentinel.actual.toIntern() != .unreachable_value) {
- try sema.errNote(src, msg, "array sentinel '{}' cannot cast into array sentinel '{}'", .{
+ try sema.errNote(src, msg, "array sentinel '{f}' cannot cast into array sentinel '{f}'", .{
sentinel.actual.fmtValueSema(pt, sema), sentinel.wanted.fmtValueSema(pt, sema),
});
} else {
- try sema.errNote(src, msg, "destination array requires '{}' sentinel", .{
+ try sema.errNote(src, msg, "destination array requires '{f}' sentinel", .{
sentinel.wanted.fmtValueSema(pt, sema),
});
}
break;
},
.array_elem => |pair| {
- try sema.errNote(src, msg, "array element type '{}' cannot cast into array element type '{}'", .{
+ try sema.errNote(src, msg, "array element type '{f}' cannot cast into array element type '{f}'", .{
pair.actual.fmt(pt), pair.wanted.fmt(pt),
});
cur = pair.child;
@@ -29555,19 +29561,19 @@ const InMemoryCoercionResult = union(enum) {
break;
},
.vector_elem => |pair| {
- try sema.errNote(src, msg, "vector element type '{}' cannot cast into vector element type '{}'", .{
+ try sema.errNote(src, msg, "vector element type '{f}' cannot cast into vector element type '{f}'", .{
pair.actual.fmt(pt), pair.wanted.fmt(pt),
});
cur = pair.child;
},
.optional_shape => |pair| {
- try sema.errNote(src, msg, "optional type child '{}' cannot cast into optional type child '{}'", .{
+ try sema.errNote(src, msg, "optional type child '{f}' cannot cast into optional type child '{f}'", .{
pair.actual.optionalChild(pt.zcu).fmt(pt), pair.wanted.optionalChild(pt.zcu).fmt(pt),
});
break;
},
.optional_child => |pair| {
- try sema.errNote(src, msg, "optional type child '{}' cannot cast into optional type child '{}'", .{
+ try sema.errNote(src, msg, "optional type child '{f}' cannot cast into optional type child '{f}'", .{
pair.actual.fmt(pt), pair.wanted.fmt(pt),
});
cur = pair.child;
@@ -29578,7 +29584,7 @@ const InMemoryCoercionResult = union(enum) {
},
.missing_error => |missing_errors| {
for (missing_errors) |err| {
- try sema.errNote(src, msg, "'error.{}' not a member of destination error set", .{err.fmt(&pt.zcu.intern_pool)});
+ try sema.errNote(src, msg, "'error.{f}' not a member of destination error set", .{err.fmt(&pt.zcu.intern_pool)});
}
break;
},
@@ -29631,7 +29637,7 @@ const InMemoryCoercionResult = union(enum) {
break;
},
.fn_param => |param| {
- try sema.errNote(src, msg, "parameter {d} '{}' cannot cast into '{}'", .{
+ try sema.errNote(src, msg, "parameter {d} '{f}' cannot cast into '{f}'", .{
param.index, param.actual.fmt(pt), param.wanted.fmt(pt),
});
cur = param.child;
@@ -29641,13 +29647,13 @@ const InMemoryCoercionResult = union(enum) {
break;
},
.fn_return_type => |pair| {
- try sema.errNote(src, msg, "return type '{}' cannot cast into return type '{}'", .{
+ try sema.errNote(src, msg, "return type '{f}' cannot cast into return type '{f}'", .{
pair.actual.fmt(pt), pair.wanted.fmt(pt),
});
cur = pair.child;
},
.ptr_child => |pair| {
- try sema.errNote(src, msg, "pointer type child '{}' cannot cast into pointer type child '{}'", .{
+ try sema.errNote(src, msg, "pointer type child '{f}' cannot cast into pointer type child '{f}'", .{
pair.actual.fmt(pt), pair.wanted.fmt(pt),
});
cur = pair.child;
@@ -29658,11 +29664,11 @@ const InMemoryCoercionResult = union(enum) {
},
.ptr_sentinel => |sentinel| {
if (sentinel.actual.toIntern() != .unreachable_value) {
- try sema.errNote(src, msg, "pointer sentinel '{}' cannot cast into pointer sentinel '{}'", .{
+ try sema.errNote(src, msg, "pointer sentinel '{f}' cannot cast into pointer sentinel '{f}'", .{
sentinel.actual.fmtValueSema(pt, sema), sentinel.wanted.fmtValueSema(pt, sema),
});
} else {
- try sema.errNote(src, msg, "destination pointer requires '{}' sentinel", .{
+ try sema.errNote(src, msg, "destination pointer requires '{f}' sentinel", .{
sentinel.wanted.fmtValueSema(pt, sema),
});
}
@@ -29676,11 +29682,11 @@ const InMemoryCoercionResult = union(enum) {
const wanted_allow_zero = pair.wanted.ptrAllowsZero(pt.zcu);
const actual_allow_zero = pair.actual.ptrAllowsZero(pt.zcu);
if (actual_allow_zero and !wanted_allow_zero) {
- try sema.errNote(src, msg, "'{}' could have null values which are illegal in type '{}'", .{
+ try sema.errNote(src, msg, "'{f}' could have null values which are illegal in type '{f}'", .{
pair.actual.fmt(pt), pair.wanted.fmt(pt),
});
} else {
- try sema.errNote(src, msg, "mutable '{}' would allow illegal null values stored to type '{}'", .{
+ try sema.errNote(src, msg, "mutable '{f}' would allow illegal null values stored to type '{f}'", .{
pair.wanted.fmt(pt), pair.actual.fmt(pt),
});
}
@@ -29692,7 +29698,7 @@ const InMemoryCoercionResult = union(enum) {
if (actual_const and !wanted_const) {
try sema.errNote(src, msg, "cast discards const qualifier", .{});
} else {
- try sema.errNote(src, msg, "mutable '{}' would allow illegal const pointers stored to type '{}'", .{
+ try sema.errNote(src, msg, "mutable '{f}' would allow illegal const pointers stored to type '{f}'", .{
pair.wanted.fmt(pt), pair.actual.fmt(pt),
});
}
@@ -29704,7 +29710,7 @@ const InMemoryCoercionResult = union(enum) {
if (actual_volatile and !wanted_volatile) {
try sema.errNote(src, msg, "cast discards volatile qualifier", .{});
} else {
- try sema.errNote(src, msg, "mutable '{}' would allow illegal volatile pointers stored to type '{}'", .{
+ try sema.errNote(src, msg, "mutable '{f}' would allow illegal volatile pointers stored to type '{f}'", .{
pair.wanted.fmt(pt), pair.actual.fmt(pt),
});
}
@@ -29730,13 +29736,13 @@ const InMemoryCoercionResult = union(enum) {
break;
},
.double_ptr_to_anyopaque => |pair| {
- try sema.errNote(src, msg, "cannot implicitly cast double pointer '{}' to anyopaque pointer '{}'", .{
+ try sema.errNote(src, msg, "cannot implicitly cast double pointer '{f}' to anyopaque pointer '{f}'", .{
pair.actual.fmt(pt), pair.wanted.fmt(pt),
});
break;
},
.slice_to_anyopaque => |pair| {
- try sema.errNote(src, msg, "cannot implicitly cast slice '{}' to anyopaque pointer '{}'", .{
+ try sema.errNote(src, msg, "cannot implicitly cast slice '{f}' to anyopaque pointer '{f}'", .{
pair.actual.fmt(pt), pair.wanted.fmt(pt),
});
try sema.errNote(src, msg, "consider using '.ptr'", .{});
@@ -30510,7 +30516,7 @@ fn coerceVarArgParam(
const coerced_ty = sema.typeOf(coerced);
if (!try sema.validateExternType(coerced_ty, .param_ty)) {
const msg = msg: {
- const msg = try sema.errMsg(inst_src, "cannot pass '{}' to variadic function", .{coerced_ty.fmt(pt)});
+ const msg = try sema.errMsg(inst_src, "cannot pass '{f}' to variadic function", .{coerced_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.explainWhyTypeIsNotExtern(msg, inst_src, coerced_ty, .param_ty);
@@ -30613,7 +30619,7 @@ fn storePtr2(
// is not comptime-only. We can hit this case with a `@ptrFromInt` pointer.
if (try elem_ty.comptimeOnlySema(pt)) {
return sema.failWithOwnedErrorMsg(block, msg: {
- const msg = try sema.errMsg(src, "cannot store comptime-only type '{}' at runtime", .{elem_ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "cannot store comptime-only type '{f}' at runtime", .{elem_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.errNote(ptr_src, msg, "operation is runtime due to this pointer", .{});
break :msg msg;
@@ -30646,7 +30652,7 @@ fn storePtr2(
});
return;
}
- return sema.fail(block, ptr_src, "unable to determine vector element index of type '{}'", .{
+ return sema.fail(block, ptr_src, "unable to determine vector element index of type '{f}'", .{
ptr_ty.fmt(pt),
});
}
@@ -30815,19 +30821,19 @@ fn storePtrVal(
.{},
),
.undef => return sema.failWithUseOfUndef(block, src),
- .err_payload => |err_name| return sema.fail(block, src, "attempt to unwrap error: {}", .{err_name.fmt(ip)}),
+ .err_payload => |err_name| return sema.fail(block, src, "attempt to unwrap error: {f}", .{err_name.fmt(ip)}),
.null_payload => return sema.fail(block, src, "attempt to use null value", .{}),
.inactive_union_field => return sema.fail(block, src, "access of inactive union field", .{}),
.needed_well_defined => |ty| return sema.fail(
block,
src,
- "comptime dereference requires '{}' to have a well-defined layout",
+ "comptime dereference requires '{f}' to have a well-defined layout",
.{ty.fmt(pt)},
),
.out_of_bounds => |ty| return sema.fail(
block,
src,
- "dereference of '{}' exceeds bounds of containing decl of type '{}'",
+ "dereference of '{f}' exceeds bounds of containing decl of type '{f}'",
.{ ptr_ty.fmt(pt), ty.fmt(pt) },
),
.exceeds_host_size => return sema.fail(block, src, "bit-pointer target exceeds host size", .{}),
@@ -30853,7 +30859,7 @@ fn bitCast(
const old_bits = old_ty.bitSize(zcu);
if (old_bits != dest_bits) {
- return sema.fail(block, inst_src, "@bitCast size mismatch: destination type '{}' has {d} bits but source type '{}' has {d} bits", .{
+ return sema.fail(block, inst_src, "@bitCast size mismatch: destination type '{f}' has {d} bits but source type '{f}' has {d} bits", .{
dest_ty.fmt(pt),
dest_bits,
old_ty.fmt(pt),
@@ -30971,7 +30977,7 @@ fn coerceCompatiblePtrs(
const inst_ty = sema.typeOf(inst);
if (try sema.resolveValue(inst)) |val| {
if (!val.isUndef(zcu) and val.isNull(zcu) and !dest_ty.isAllowzeroPtr(zcu)) {
- return sema.fail(block, inst_src, "null pointer casted to type '{}'", .{dest_ty.fmt(pt)});
+ return sema.fail(block, inst_src, "null pointer casted to type '{f}'", .{dest_ty.fmt(pt)});
}
// The comptime Value representation is compatible with both types.
return Air.internedToRef(
@@ -31017,7 +31023,7 @@ fn coerceEnumToUnion(
const tag_ty = union_ty.unionTagType(zcu) orelse {
const msg = msg: {
- const msg = try sema.errMsg(inst_src, "expected type '{}', found '{}'", .{
+ const msg = try sema.errMsg(inst_src, "expected type '{f}', found '{f}'", .{
union_ty.fmt(pt), inst_ty.fmt(pt),
});
errdefer msg.destroy(sema.gpa);
@@ -31031,7 +31037,7 @@ fn coerceEnumToUnion(
const enum_tag = try sema.coerce(block, tag_ty, inst, inst_src);
if (try sema.resolveDefinedValue(block, inst_src, enum_tag)) |val| {
const field_index = union_ty.unionTagFieldIndex(val, pt.zcu) orelse {
- return sema.fail(block, inst_src, "union '{}' has no tag with value '{}'", .{
+ return sema.fail(block, inst_src, "union '{f}' has no tag with value '{f}'", .{
union_ty.fmt(pt), val.fmtValueSema(pt, sema),
});
};
@@ -31045,7 +31051,7 @@ fn coerceEnumToUnion(
errdefer msg.destroy(sema.gpa);
const field_name = union_obj.loadTagType(ip).names.get(ip)[field_index];
- try sema.addFieldErrNote(union_ty, field_index, msg, "field '{}' declared here", .{
+ try sema.addFieldErrNote(union_ty, field_index, msg, "field '{f}' declared here", .{
field_name.fmt(ip),
});
try sema.addDeclaredHereNote(msg, union_ty);
@@ -31056,13 +31062,13 @@ fn coerceEnumToUnion(
const opv = (try sema.typeHasOnePossibleValue(field_ty)) orelse {
const msg = msg: {
const field_name = union_obj.loadTagType(ip).names.get(ip)[field_index];
- const msg = try sema.errMsg(inst_src, "coercion from enum '{}' to union '{}' must initialize '{}' field '{}'", .{
+ const msg = try sema.errMsg(inst_src, "coercion from enum '{f}' to union '{f}' must initialize '{f}' field '{f}'", .{
inst_ty.fmt(pt), union_ty.fmt(pt),
field_ty.fmt(pt), field_name.fmt(ip),
});
errdefer msg.destroy(sema.gpa);
- try sema.addFieldErrNote(union_ty, field_index, msg, "field '{}' declared here", .{
+ try sema.addFieldErrNote(union_ty, field_index, msg, "field '{f}' declared here", .{
field_name.fmt(ip),
});
try sema.addDeclaredHereNote(msg, union_ty);
@@ -31078,7 +31084,7 @@ fn coerceEnumToUnion(
if (tag_ty.isNonexhaustiveEnum(zcu)) {
const msg = msg: {
- const msg = try sema.errMsg(inst_src, "runtime coercion to union '{}' from non-exhaustive enum", .{
+ const msg = try sema.errMsg(inst_src, "runtime coercion to union '{f}' from non-exhaustive enum", .{
union_ty.fmt(pt),
});
errdefer msg.destroy(sema.gpa);
@@ -31097,7 +31103,7 @@ fn coerceEnumToUnion(
if (Type.fromInterned(field_ty).zigTypeTag(zcu) == .noreturn) {
const err_msg = msg orelse try sema.errMsg(
inst_src,
- "runtime coercion from enum '{}' to union '{}' which has a 'noreturn' field",
+ "runtime coercion from enum '{f}' to union '{f}' which has a 'noreturn' field",
.{ tag_ty.fmt(pt), union_ty.fmt(pt) },
);
msg = err_msg;
@@ -31120,7 +31126,7 @@ fn coerceEnumToUnion(
const msg = msg: {
const msg = try sema.errMsg(
inst_src,
- "runtime coercion from enum '{}' to union '{}' which has non-void fields",
+ "runtime coercion from enum '{f}' to union '{f}' which has non-void fields",
.{ tag_ty.fmt(pt), union_ty.fmt(pt) },
);
errdefer msg.destroy(sema.gpa);
@@ -31129,7 +31135,7 @@ fn coerceEnumToUnion(
const field_name = union_obj.loadTagType(ip).names.get(ip)[field_index];
const field_ty: Type = .fromInterned(union_obj.field_types.get(ip)[field_index]);
if (!(try field_ty.hasRuntimeBitsSema(pt))) continue;
- try sema.addFieldErrNote(union_ty, field_index, msg, "field '{}' has type '{}'", .{
+ try sema.addFieldErrNote(union_ty, field_index, msg, "field '{f}' has type '{f}'", .{
field_name.fmt(ip),
field_ty.fmt(pt),
});
@@ -31170,7 +31176,7 @@ fn coerceArrayLike(
const dest_len = try sema.usizeCast(block, dest_ty_src, dest_ty.arrayLen(zcu));
if (dest_len != inst_len) {
const msg = msg: {
- const msg = try sema.errMsg(inst_src, "expected type '{}', found '{}'", .{
+ const msg = try sema.errMsg(inst_src, "expected type '{f}', found '{f}'", .{
dest_ty.fmt(pt), inst_ty.fmt(pt),
});
errdefer msg.destroy(sema.gpa);
@@ -31258,7 +31264,7 @@ fn coerceTupleToArray(
if (dest_len != inst_len) {
const msg = msg: {
- const msg = try sema.errMsg(inst_src, "expected type '{}', found '{}'", .{
+ const msg = try sema.errMsg(inst_src, "expected type '{f}', found '{f}'", .{
dest_ty.fmt(pt), inst_ty.fmt(pt),
});
errdefer msg.destroy(sema.gpa);
@@ -31734,10 +31740,10 @@ fn analyzeLoad(
const ptr_ty = sema.typeOf(ptr);
const elem_ty = switch (ptr_ty.zigTypeTag(zcu)) {
.pointer => ptr_ty.childType(zcu),
- else => return sema.fail(block, ptr_src, "expected pointer, found '{}'", .{ptr_ty.fmt(pt)}),
+ else => return sema.fail(block, ptr_src, "expected pointer, found '{f}'", .{ptr_ty.fmt(pt)}),
};
if (elem_ty.zigTypeTag(zcu) == .@"opaque") {
- return sema.fail(block, ptr_src, "cannot load opaque type '{}'", .{elem_ty.fmt(pt)});
+ return sema.fail(block, ptr_src, "cannot load opaque type '{f}'", .{elem_ty.fmt(pt)});
}
if (try sema.typeHasOnePossibleValue(elem_ty)) |opv| {
@@ -31758,7 +31764,7 @@ fn analyzeLoad(
const bin_op = sema.getTmpAir().extraData(Air.Bin, ty_pl.payload).data;
return block.addBinOp(.ptr_elem_val, bin_op.lhs, bin_op.rhs);
}
- return sema.fail(block, ptr_src, "unable to determine vector element index of type '{}'", .{
+ return sema.fail(block, ptr_src, "unable to determine vector element index of type '{f}'", .{
ptr_ty.fmt(pt),
});
}
@@ -32046,7 +32052,7 @@ fn analyzeSlice(
const ptr_ptr_ty = sema.typeOf(ptr_ptr);
const ptr_ptr_child_ty = switch (ptr_ptr_ty.zigTypeTag(zcu)) {
.pointer => ptr_ptr_ty.childType(zcu),
- else => return sema.fail(block, ptr_src, "expected pointer, found '{}'", .{ptr_ptr_ty.fmt(pt)}),
+ else => return sema.fail(block, ptr_src, "expected pointer, found '{f}'", .{ptr_ptr_ty.fmt(pt)}),
};
var array_ty = ptr_ptr_child_ty;
@@ -32095,7 +32101,7 @@ fn analyzeSlice(
try sema.errNote(
start_src,
msg,
- "expected '{}', found '{}'",
+ "expected '{f}', found '{f}'",
.{
Value.zero_comptime_int.fmtValueSema(pt, sema),
start_value.fmtValueSema(pt, sema),
@@ -32111,7 +32117,7 @@ fn analyzeSlice(
try sema.errNote(
end_src,
msg,
- "expected '{}', found '{}'",
+ "expected '{f}', found '{f}'",
.{
Value.one_comptime_int.fmtValueSema(pt, sema),
end_value.fmtValueSema(pt, sema),
@@ -32126,7 +32132,7 @@ fn analyzeSlice(
return sema.fail(
block,
end_src,
- "end index {} out of bounds for slice of single-item pointer",
+ "end index {f} out of bounds for slice of single-item pointer",
.{end_value.fmtValueSema(pt, sema)},
);
}
@@ -32173,7 +32179,7 @@ fn analyzeSlice(
elem_ty = ptr_ptr_child_ty.childType(zcu);
},
},
- else => return sema.fail(block, src, "slice of non-array type '{}'", .{ptr_ptr_child_ty.fmt(pt)}),
+ else => return sema.fail(block, src, "slice of non-array type '{f}'", .{ptr_ptr_child_ty.fmt(pt)}),
}
const ptr = if (slice_ty.isSlice(zcu))
@@ -32220,7 +32226,7 @@ fn analyzeSlice(
return sema.fail(
block,
end_src,
- "end index {} out of bounds for array of length {}{s}",
+ "end index {f} out of bounds for array of length {f}{s}",
.{
end_val.fmtValueSema(pt, sema),
len_val.fmtValueSema(pt, sema),
@@ -32265,7 +32271,7 @@ fn analyzeSlice(
return sema.fail(
block,
end_src,
- "end index {} out of bounds for slice of length {d}{s}",
+ "end index {f} out of bounds for slice of length {d}{s}",
.{
end_val.fmtValueSema(pt, sema),
try slice_val.sliceLen(pt),
@@ -32324,7 +32330,7 @@ fn analyzeSlice(
return sema.fail(
block,
start_src,
- "start index {} is larger than end index {}",
+ "start index {f} is larger than end index {f}",
.{
start_val.fmtValueSema(pt, sema),
end_val.fmtValueSema(pt, sema),
@@ -32348,13 +32354,13 @@ fn analyzeSlice(
.needed_well_defined => |ty| return sema.fail(
block,
src,
- "comptime dereference requires '{}' to have a well-defined layout",
+ "comptime dereference requires '{f}' to have a well-defined layout",
.{ty.fmt(pt)},
),
.out_of_bounds => |ty| return sema.fail(
block,
end_src,
- "slice end index {d} exceeds bounds of containing decl of type '{}'",
+ "slice end index {d} exceeds bounds of containing decl of type '{f}'",
.{ end_int, ty.fmt(pt) },
),
};
@@ -32363,7 +32369,7 @@ fn analyzeSlice(
const msg = msg: {
const msg = try sema.errMsg(src, "value in memory does not match slice sentinel", .{});
errdefer msg.destroy(sema.gpa);
- try sema.errNote(src, msg, "expected '{}', found '{}'", .{
+ try sema.errNote(src, msg, "expected '{f}', found '{f}'", .{
expected_sentinel.fmtValueSema(pt, sema),
actual_sentinel.fmtValueSema(pt, sema),
});
@@ -33251,7 +33257,7 @@ const PeerResolveResult = union(enum) {
};
},
.field_error => |field_error| {
- const fmt = "struct field '{}' has conflicting types";
+ const fmt = "struct field '{f}' has conflicting types";
const args = .{field_error.field_name.fmt(&pt.zcu.intern_pool)};
if (opt_msg) |msg| {
try sema.errNote(src, msg, fmt, args);
@@ -33282,7 +33288,7 @@ const PeerResolveResult = union(enum) {
candidate_srcs.resolve(block, conflict_idx[1]),
};
- const fmt = "incompatible types: '{}' and '{}'";
+ const fmt = "incompatible types: '{f}' and '{f}'";
const args = .{
conflict_tys[0].fmt(pt),
conflict_tys[1].fmt(pt),
@@ -33296,8 +33302,8 @@ const PeerResolveResult = union(enum) {
break :msg msg;
};
- if (conflict_srcs[0]) |src_loc| try sema.errNote(src_loc, msg, "type '{}' here", .{conflict_tys[0].fmt(pt)});
- if (conflict_srcs[1]) |src_loc| try sema.errNote(src_loc, msg, "type '{}' here", .{conflict_tys[1].fmt(pt)});
+ if (conflict_srcs[0]) |src_loc| try sema.errNote(src_loc, msg, "type '{f}' here", .{conflict_tys[0].fmt(pt)});
+ if (conflict_srcs[1]) |src_loc| try sema.errNote(src_loc, msg, "type '{f}' here", .{conflict_tys[1].fmt(pt)});
// No child error
break;
@@ -34609,7 +34615,7 @@ pub fn resolveStructLayout(sema: *Sema, ty: Type) SemaError!void {
if (struct_type.setLayoutWip(ip)) {
const msg = try sema.errMsg(
ty.srcLoc(zcu),
- "struct '{}' depends on itself",
+ "struct '{f}' depends on itself",
.{ty.fmt(pt)},
);
return sema.failWithOwnedErrorMsg(null, msg);
@@ -34828,13 +34834,13 @@ fn checkBackingIntType(sema: *Sema, block: *Block, src: LazySrcLoc, backing_int_
const zcu = pt.zcu;
if (!backing_int_ty.isInt(zcu)) {
- return sema.fail(block, src, "expected backing integer type, found '{}'", .{backing_int_ty.fmt(pt)});
+ return sema.fail(block, src, "expected backing integer type, found '{f}'", .{backing_int_ty.fmt(pt)});
}
if (backing_int_ty.bitSize(zcu) != fields_bit_sum) {
return sema.fail(
block,
src,
- "backing integer type '{}' has bit size {} but the struct fields have a total bit size of {}",
+ "backing integer type '{f}' has bit size {} but the struct fields have a total bit size of {}",
.{ backing_int_ty.fmt(pt), backing_int_ty.bitSize(zcu), fields_bit_sum },
);
}
@@ -34844,7 +34850,7 @@ fn checkIndexable(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) !void {
const pt = sema.pt;
if (!ty.isIndexable(pt.zcu)) {
const msg = msg: {
- const msg = try sema.errMsg(src, "type '{}' does not support indexing", .{ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "type '{f}' does not support indexing", .{ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.errNote(src, msg, "operand must be an array, slice, tuple, or vector", .{});
break :msg msg;
@@ -34868,7 +34874,7 @@ fn checkMemOperand(sema: *Sema, block: *Block, src: LazySrcLoc, ty: Type) !void
}
}
const msg = msg: {
- const msg = try sema.errMsg(src, "type '{}' is not an indexable pointer", .{ty.fmt(pt)});
+ const msg = try sema.errMsg(src, "type '{f}' is not an indexable pointer", .{ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.errNote(src, msg, "operand must be a slice, a many pointer or a pointer to an array", .{});
break :msg msg;
@@ -34936,7 +34942,7 @@ pub fn resolveUnionLayout(sema: *Sema, ty: Type) SemaError!void {
.field_types_wip, .layout_wip => {
const msg = try sema.errMsg(
ty.srcLoc(pt.zcu),
- "union '{}' depends on itself",
+ "union '{f}' depends on itself",
.{ty.fmt(pt)},
);
return sema.failWithOwnedErrorMsg(null, msg);
@@ -35124,7 +35130,7 @@ pub fn resolveStructFieldTypes(
if (struct_type.setFieldTypesWip(ip)) {
const msg = try sema.errMsg(
Type.fromInterned(ty).srcLoc(zcu),
- "struct '{}' depends on itself",
+ "struct '{f}' depends on itself",
.{Type.fromInterned(ty).fmt(pt)},
);
return sema.failWithOwnedErrorMsg(null, msg);
@@ -35153,7 +35159,7 @@ pub fn resolveStructFieldInits(sema: *Sema, ty: Type) SemaError!void {
if (struct_type.setInitsWip(ip)) {
const msg = try sema.errMsg(
ty.srcLoc(zcu),
- "struct '{}' depends on itself",
+ "struct '{f}' depends on itself",
.{ty.fmt(pt)},
);
return sema.failWithOwnedErrorMsg(null, msg);
@@ -35179,7 +35185,7 @@ pub fn resolveUnionFieldTypes(sema: *Sema, ty: Type, union_type: InternPool.Load
.field_types_wip => {
const msg = try sema.errMsg(
ty.srcLoc(zcu),
- "union '{}' depends on itself",
+ "union '{f}' depends on itself",
.{ty.fmt(pt)},
);
return sema.failWithOwnedErrorMsg(null, msg);
@@ -35549,7 +35555,7 @@ fn structFields(
switch (struct_type.layout) {
.@"extern" => if (!try sema.validateExternType(field_ty, .struct_field)) {
const msg = msg: {
- const msg = try sema.errMsg(ty_src, "extern structs cannot contain fields of type '{}'", .{field_ty.fmt(pt)});
+ const msg = try sema.errMsg(ty_src, "extern structs cannot contain fields of type '{f}'", .{field_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.explainWhyTypeIsNotExtern(msg, ty_src, field_ty, .struct_field);
@@ -35561,7 +35567,7 @@ fn structFields(
},
.@"packed" => if (!try sema.validatePackedType(field_ty)) {
const msg = msg: {
- const msg = try sema.errMsg(ty_src, "packed structs cannot contain fields of type '{}'", .{field_ty.fmt(pt)});
+ const msg = try sema.errMsg(ty_src, "packed structs cannot contain fields of type '{f}'", .{field_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.explainWhyTypeIsNotPacked(msg, ty_src, field_ty);
@@ -35808,7 +35814,7 @@ fn unionFields(
// The provided type is an integer type and we must construct the enum tag type here.
int_tag_ty = provided_ty;
if (int_tag_ty.zigTypeTag(zcu) != .int and int_tag_ty.zigTypeTag(zcu) != .comptime_int) {
- return sema.fail(&block_scope, tag_ty_src, "expected integer tag type, found '{}'", .{int_tag_ty.fmt(pt)});
+ return sema.fail(&block_scope, tag_ty_src, "expected integer tag type, found '{f}'", .{int_tag_ty.fmt(pt)});
}
if (fields_len > 0) {
@@ -35817,7 +35823,7 @@ fn unionFields(
const msg = msg: {
const msg = try sema.errMsg(tag_ty_src, "specified integer tag type cannot represent every field", .{});
errdefer msg.destroy(sema.gpa);
- try sema.errNote(tag_ty_src, msg, "type '{}' cannot fit values in range 0...{d}", .{
+ try sema.errNote(tag_ty_src, msg, "type '{f}' cannot fit values in range 0...{d}", .{
int_tag_ty.fmt(pt),
fields_len - 1,
});
@@ -35832,7 +35838,7 @@ fn unionFields(
// The provided type is the enum tag type.
const enum_type = switch (ip.indexToKey(provided_ty.toIntern())) {
.enum_type => ip.loadEnumType(provided_ty.toIntern()),
- else => return sema.fail(&block_scope, tag_ty_src, "expected enum tag type, found '{}'", .{provided_ty.fmt(pt)}),
+ else => return sema.fail(&block_scope, tag_ty_src, "expected enum tag type, found '{f}'", .{provided_ty.fmt(pt)}),
};
union_type.setTagType(ip, provided_ty.toIntern());
// The fields of the union must match the enum exactly.
@@ -35929,7 +35935,7 @@ fn unionFields(
if (result.overflow) return sema.fail(
&block_scope,
value_src,
- "enumeration value '{}' too large for type '{}'",
+ "enumeration value '{f}' too large for type '{f}'",
.{ result.val.fmtValueSema(pt, sema), int_tag_ty.fmt(pt) },
);
last_tag_val = result.val;
@@ -35947,7 +35953,7 @@ fn unionFields(
const msg = msg: {
const msg = try sema.errMsg(
value_src,
- "enum tag value {} already taken",
+ "enum tag value {f} already taken",
.{enum_tag_val.fmtValueSema(pt, sema)},
);
errdefer msg.destroy(gpa);
@@ -35975,7 +35981,7 @@ fn unionFields(
const tag_ty = union_type.tagTypeUnordered(ip);
const tag_info = ip.loadEnumType(tag_ty);
const enum_index = tag_info.nameIndex(ip, field_name) orelse {
- return sema.fail(&block_scope, name_src, "no field named '{}' in enum '{}'", .{
+ return sema.fail(&block_scope, name_src, "no field named '{f}' in enum '{f}'", .{
field_name.fmt(ip), Type.fromInterned(tag_ty).fmt(pt),
});
};
@@ -35992,7 +35998,7 @@ fn unionFields(
.base_node_inst = Type.fromInterned(tag_ty).typeDeclInstAllowGeneratedTag(zcu).?,
.offset = .{ .container_field_name = enum_index },
};
- const msg = try sema.errMsg(name_src, "union field '{}' ordered differently than corresponding enum field", .{
+ const msg = try sema.errMsg(name_src, "union field '{f}' ordered differently than corresponding enum field", .{
field_name.fmt(ip),
});
errdefer msg.destroy(sema.gpa);
@@ -36018,7 +36024,7 @@ fn unionFields(
!try sema.validateExternType(field_ty, .union_field))
{
const msg = msg: {
- const msg = try sema.errMsg(type_src, "extern unions cannot contain fields of type '{}'", .{field_ty.fmt(pt)});
+ const msg = try sema.errMsg(type_src, "extern unions cannot contain fields of type '{f}'", .{field_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.explainWhyTypeIsNotExtern(msg, type_src, field_ty, .union_field);
@@ -36029,7 +36035,7 @@ fn unionFields(
return sema.failWithOwnedErrorMsg(&block_scope, msg);
} else if (layout == .@"packed" and !try sema.validatePackedType(field_ty)) {
const msg = msg: {
- const msg = try sema.errMsg(type_src, "packed unions cannot contain fields of type '{}'", .{field_ty.fmt(pt)});
+ const msg = try sema.errMsg(type_src, "packed unions cannot contain fields of type '{f}'", .{field_ty.fmt(pt)});
errdefer msg.destroy(sema.gpa);
try sema.explainWhyTypeIsNotPacked(msg, type_src, field_ty);
@@ -36065,7 +36071,7 @@ fn unionFields(
for (tag_info.names.get(ip), 0..) |field_name, field_index| {
if (explicit_tags_seen[field_index]) continue;
- try sema.addFieldErrNote(.fromInterned(tag_ty), field_index, msg, "field '{}' missing, declared here", .{
+ try sema.addFieldErrNote(.fromInterned(tag_ty), field_index, msg, "field '{f}' missing, declared here", .{
field_name.fmt(ip),
});
}
@@ -36101,7 +36107,7 @@ fn generateUnionTagTypeNumbered(
const name = try ip.getOrPutStringFmt(
gpa,
pt.tid,
- "@typeInfo({}).@\"union\".tag_type.?",
+ "@typeInfo({f}).@\"union\".tag_type.?",
.{union_name.fmt(ip)},
.no_embedded_nulls,
);
@@ -36137,7 +36143,7 @@ fn generateUnionTagTypeSimple(
const name = try ip.getOrPutStringFmt(
gpa,
pt.tid,
- "@typeInfo({}).@\"union\".tag_type.?",
+ "@typeInfo({f}).@\"union\".tag_type.?",
.{union_name.fmt(ip)},
.no_embedded_nulls,
);
@@ -36671,13 +36677,13 @@ fn pointerDeref(sema: *Sema, block: *Block, src: LazySrcLoc, ptr_val: Value, ptr
.needed_well_defined => |ty| return sema.fail(
block,
src,
- "comptime dereference requires '{}' to have a well-defined layout",
+ "comptime dereference requires '{f}' to have a well-defined layout",
.{ty.fmt(pt)},
),
.out_of_bounds => |ty| return sema.fail(
block,
src,
- "dereference of '{}' exceeds bounds of containing decl of type '{}'",
+ "dereference of '{f}' exceeds bounds of containing decl of type '{f}'",
.{ ptr_ty.fmt(pt), ty.fmt(pt) },
),
}
@@ -36697,7 +36703,7 @@ fn pointerDerefExtra(sema: *Sema, block: *Block, src: LazySrcLoc, ptr_val: Value
.success => |mv| return .{ .val = try mv.intern(pt, sema.arena) },
.runtime_load => return .runtime_load,
.undef => return sema.failWithUseOfUndef(block, src),
- .err_payload => |err_name| return sema.fail(block, src, "attempt to unwrap error: {}", .{err_name.fmt(ip)}),
+ .err_payload => |err_name| return sema.fail(block, src, "attempt to unwrap error: {f}", .{err_name.fmt(ip)}),
.null_payload => return sema.fail(block, src, "attempt to use null value", .{}),
.inactive_union_field => return sema.fail(block, src, "access of inactive union field", .{}),
.needed_well_defined => |ty| return .{ .needed_well_defined = ty },
@@ -36822,12 +36828,12 @@ fn intFromFloatScalar(
const float = val.toFloat(f128, zcu);
if (std.math.isNan(float)) {
- return sema.fail(block, src, "float value NaN cannot be stored in integer type '{}'", .{
+ return sema.fail(block, src, "float value NaN cannot be stored in integer type '{f}'", .{
int_ty.fmt(pt),
});
}
if (std.math.isInf(float)) {
- return sema.fail(block, src, "float value Inf cannot be stored in integer type '{}'", .{
+ return sema.fail(block, src, "float value Inf cannot be stored in integer type '{f}'", .{
int_ty.fmt(pt),
});
}
@@ -36842,7 +36848,7 @@ fn intFromFloatScalar(
.exact => return sema.fail(
block,
src,
- "fractional component prevents float value '{}' from coercion to type '{}'",
+ "fractional component prevents float value '{f}' from coercion to type '{f}'",
.{ val.fmtValueSema(pt, sema), int_ty.fmt(pt) },
),
.truncate => {},
@@ -36854,7 +36860,7 @@ fn intFromFloatScalar(
const int_info = int_ty.intInfo(zcu);
if (!big_int.toConst().fitsInTwosComp(int_info.signedness, int_info.bits)) {
- return sema.fail(block, src, "float value '{}' cannot be stored in integer type '{}'", .{
+ return sema.fail(block, src, "float value '{f}' cannot be stored in integer type '{f}'", .{
val.fmtValueSema(pt, sema), int_ty.fmt(pt),
});
}
@@ -37186,9 +37192,9 @@ fn notePathToComptimeAllocPtr(sema: *Sema, msg: *Zcu.ErrorMsg, src: LazySrcLoc,
var first_path: std.ArrayListUnmanaged(u8) = .empty;
if (intermediate_value_count == 0) {
- try first_path.writer(arena).print("{i}", .{start_value_name.fmt(ip)});
+ try first_path.print(arena, "{fi}", .{start_value_name.fmt(ip)});
} else {
- try first_path.writer(arena).print("v{}", .{intermediate_value_count - 1});
+ try first_path.print(arena, "v{}", .{intermediate_value_count - 1});
}
const comptime_ptr = try sema.notePathToComptimeAllocPtrInner(val, &first_path);
@@ -37213,30 +37219,26 @@ fn notePathToComptimeAllocPtr(sema: *Sema, msg: *Zcu.ErrorMsg, src: LazySrcLoc,
error.AnalysisFail => unreachable,
};
- var second_path: std.ArrayListUnmanaged(u8) = .empty;
+ var second_path_aw: std.io.Writer.Allocating = .init(arena);
+ defer second_path_aw.deinit();
const inter_name = try std.fmt.allocPrint(arena, "v{d}", .{intermediate_value_count});
const deriv_start = @import("print_value.zig").printPtrDerivation(
derivation,
- second_path.writer(arena),
+ &second_path_aw.interface,
pt,
.lvalue,
.{ .str = inter_name },
20,
- ) catch |err| switch (err) {
- error.OutOfMemory => |e| return e,
- error.AnalysisFail => unreachable,
- error.ComptimeReturn => unreachable,
- error.ComptimeBreak => unreachable,
- };
+ ) catch return error.OutOfMemory;
switch (deriv_start) {
.int, .nav_ptr => unreachable,
.uav_ptr => |uav| {
- try sema.errNote(src, msg, "'{s}' points to '{s}', where", .{ first_path.items, second_path.items });
+ try sema.errNote(src, msg, "'{s}' points to '{s}', where", .{ first_path.items, second_path_aw.getWritten() });
return .{ .new_val = .fromInterned(uav.val) };
},
.comptime_alloc_ptr => |cta_info| {
- try sema.errNote(src, msg, "'{s}' points to '{s}', where", .{ first_path.items, second_path.items });
+ try sema.errNote(src, msg, "'{s}' points to '{s}', where", .{ first_path.items, second_path_aw.getWritten() });
const cta = sema.getComptimeAlloc(cta_info.idx);
if (cta.is_const) {
return .{ .new_val = cta_info.val };
@@ -37246,7 +37248,7 @@ fn notePathToComptimeAllocPtr(sema: *Sema, msg: *Zcu.ErrorMsg, src: LazySrcLoc,
}
},
.comptime_field_ptr => {
- try sema.errNote(src, msg, "'{s}' points to '{s}', where", .{ first_path.items, second_path.items });
+ try sema.errNote(src, msg, "'{s}' points to '{s}', where", .{ first_path.items, second_path_aw.getWritten() });
try sema.errNote(src, msg, "'{s}' is a comptime field", .{inter_name});
return .done;
},
@@ -37286,7 +37288,7 @@ fn notePathToComptimeAllocPtrInner(sema: *Sema, val: Value, path: *std.ArrayList
const backing_enum = union_ty.unionTagTypeHypothetical(zcu);
const field_idx = backing_enum.enumTagFieldIndex(.fromInterned(un.tag), zcu).?;
const field_name = backing_enum.enumFieldName(field_idx, zcu);
- try path.writer(arena).print(".{i}", .{field_name.fmt(ip)});
+ try path.print(arena, ".{fi}", .{field_name.fmt(ip)});
return sema.notePathToComptimeAllocPtrInner(.fromInterned(un.val), path);
},
.aggregate => |agg| {
@@ -37301,17 +37303,17 @@ fn notePathToComptimeAllocPtrInner(sema: *Sema, val: Value, path: *std.ArrayList
};
const agg_ty: Type = .fromInterned(agg.ty);
switch (agg_ty.zigTypeTag(zcu)) {
- .array, .vector => try path.writer(arena).print("[{d}]", .{elem_idx}),
+ .array, .vector => try path.print(arena, "[{d}]", .{elem_idx}),
.pointer => switch (elem_idx) {
Value.slice_ptr_index => try path.appendSlice(arena, ".ptr"),
Value.slice_len_index => try path.appendSlice(arena, ".len"),
else => unreachable,
},
.@"struct" => if (agg_ty.isTuple(zcu)) {
- try path.writer(arena).print("[{d}]", .{elem_idx});
+ try path.print(arena, "[{d}]", .{elem_idx});
} else {
const name = agg_ty.structFieldName(elem_idx, zcu).unwrap().?;
- try path.writer(arena).print(".{i}", .{name.fmt(ip)});
+ try path.print(arena, ".{fi}", .{name.fmt(ip)});
},
else => unreachable,
}
@@ -37588,7 +37590,7 @@ fn resolveDeclaredEnumInner(
if (tag_type_ref != .none) {
const ty = try sema.resolveType(block, tag_ty_src, tag_type_ref);
if (ty.zigTypeTag(zcu) != .int and ty.zigTypeTag(zcu) != .comptime_int) {
- return sema.fail(block, tag_ty_src, "expected integer tag type, found '{}'", .{ty.fmt(pt)});
+ return sema.fail(block, tag_ty_src, "expected integer tag type, found '{f}'", .{ty.fmt(pt)});
}
break :ty ty;
} else if (fields_len == 0) {
@@ -37642,7 +37644,7 @@ fn resolveDeclaredEnumInner(
.offset = .{ .container_field_value = conflict.prev_field_idx },
};
const msg = msg: {
- const msg = try sema.errMsg(value_src, "enum tag value {} already taken", .{last_tag_val.?.fmtValueSema(pt, sema)});
+ const msg = try sema.errMsg(value_src, "enum tag value {f} already taken", .{last_tag_val.?.fmtValueSema(pt, sema)});
errdefer msg.destroy(gpa);
try sema.errNote(other_field_src, msg, "other occurrence here", .{});
break :msg msg;
@@ -37665,7 +37667,7 @@ fn resolveDeclaredEnumInner(
.offset = .{ .container_field_value = conflict.prev_field_idx },
};
const msg = msg: {
- const msg = try sema.errMsg(value_src, "enum tag value {} already taken", .{last_tag_val.?.fmtValueSema(pt, sema)});
+ const msg = try sema.errMsg(value_src, "enum tag value {f} already taken", .{last_tag_val.?.fmtValueSema(pt, sema)});
errdefer msg.destroy(gpa);
try sema.errNote(other_field_src, msg, "other occurrence here", .{});
break :msg msg;
@@ -37682,7 +37684,7 @@ fn resolveDeclaredEnumInner(
};
if (tag_overflow) {
- const msg = try sema.errMsg(value_src, "enumeration value '{}' too large for type '{}'", .{
+ const msg = try sema.errMsg(value_src, "enumeration value '{f}' too large for type '{f}'", .{
last_tag_val.?.fmtValueSema(pt, sema), int_tag_ty.fmt(pt),
});
return sema.failWithOwnedErrorMsg(block, msg);
src/translate_c.zig
@@ -357,7 +357,7 @@ fn transFileScopeAsm(c: *Context, scope: *Scope, file_scope_asm: *const clang.Fi
var len: usize = undefined;
const bytes_ptr = asm_string.getString_bytes_begin_size(&len);
- const str = try std.fmt.allocPrint(c.arena, "\"{}\"", .{std.zig.fmtEscapes(bytes_ptr[0..len])});
+ const str = try std.fmt.allocPrint(c.arena, "\"{f}\"", .{std.zig.fmtString(bytes_ptr[0..len])});
const str_node = try Tag.string_literal.create(c.arena, str);
const asm_node = try Tag.asm_simple.create(c.arena, str_node);
@@ -2276,7 +2276,7 @@ fn transNarrowStringLiteral(
var len: usize = undefined;
const bytes_ptr = stmt.getString_bytes_begin_size(&len);
- const str = try std.fmt.allocPrint(c.arena, "\"{}\"", .{std.zig.fmtEscapes(bytes_ptr[0..len])});
+ const str = try std.fmt.allocPrint(c.arena, "\"{f}\"", .{std.zig.fmtString(bytes_ptr[0..len])});
const node = try Tag.string_literal.create(c.arena, str);
return maybeSuppressResult(c, result_used, node);
}
@@ -3338,7 +3338,7 @@ fn transPredefinedExpr(c: *Context, scope: *Scope, expr: *const clang.Predefined
fn transCreateCharLitNode(c: *Context, narrow: bool, val: u32) TransError!Node {
return Tag.char_literal.create(c.arena, if (narrow)
- try std.fmt.allocPrint(c.arena, "'{'}'", .{std.zig.fmtEscapes(&.{@as(u8, @intCast(val))})})
+ try std.fmt.allocPrint(c.arena, "'{f}'", .{std.zig.fmtChar(&.{@as(u8, @intCast(val))})})
else
try std.fmt.allocPrint(c.arena, "'\\u{{{x}}}'", .{val}));
}
@@ -5832,7 +5832,7 @@ fn zigifyEscapeSequences(ctx: *Context, m: *MacroCtx) ![]const u8 {
num += c - 'A' + 10;
},
else => {
- i += std.fmt.formatIntBuf(bytes[i..], num, 16, .lower, std.fmt.FormatOptions{ .fill = '0', .width = 2 });
+ i += std.fmt.printInt(bytes[i..], num, 16, .lower, std.fmt.FormatOptions{ .fill = '0', .width = 2 });
num = 0;
if (c == '\\')
state = .escape
@@ -5858,7 +5858,7 @@ fn zigifyEscapeSequences(ctx: *Context, m: *MacroCtx) ![]const u8 {
};
num += c - '0';
} else {
- i += std.fmt.formatIntBuf(bytes[i..], num, 16, .lower, std.fmt.FormatOptions{ .fill = '0', .width = 2 });
+ i += std.fmt.printInt(bytes[i..], num, 16, .lower, std.fmt.FormatOptions{ .fill = '0', .width = 2 });
num = 0;
count = 0;
if (c == '\\')
@@ -5872,21 +5872,21 @@ fn zigifyEscapeSequences(ctx: *Context, m: *MacroCtx) ![]const u8 {
}
}
if (state == .hex or state == .octal)
- i += std.fmt.formatIntBuf(bytes[i..], num, 16, .lower, std.fmt.FormatOptions{ .fill = '0', .width = 2 });
+ i += std.fmt.printInt(bytes[i..], num, 16, .lower, std.fmt.FormatOptions{ .fill = '0', .width = 2 });
return bytes[0..i];
}
-/// non-ASCII characters (c > 127) are also treated as non-printable by fmtSliceEscapeLower.
+/// non-ASCII characters (c > 127) are also treated as non-printable by ascii.hexEscape.
/// If a C string literal or char literal in a macro is not valid UTF-8, we need to escape
/// non-ASCII characters so that the Zig source we output will itself be UTF-8.
fn escapeUnprintables(ctx: *Context, m: *MacroCtx) ![]const u8 {
const zigified = try zigifyEscapeSequences(ctx, m);
if (std.unicode.utf8ValidateSlice(zigified)) return zigified;
- const formatter = std.fmt.fmtSliceEscapeLower(zigified);
- const encoded_size = @as(usize, @intCast(std.fmt.count("{s}", .{formatter})));
+ const formatter = std.ascii.hexEscape(zigified, .lower);
+ const encoded_size = @as(usize, @intCast(std.fmt.count("{fs}", .{formatter})));
const output = try ctx.arena.alloc(u8, encoded_size);
- return std.fmt.bufPrint(output, "{s}", .{formatter}) catch |err| switch (err) {
+ return std.fmt.bufPrint(output, "{fs}", .{formatter}) catch |err| switch (err) {
error.NoSpaceLeft => unreachable,
else => |e| return e,
};
@@ -5905,7 +5905,7 @@ fn parseCPrimaryExpr(c: *Context, m: *MacroCtx, scope: *Scope) ParseError!Node {
if (slice[0] != '\'' or slice[1] == '\\' or slice.len == 3) {
return Tag.char_literal.create(c.arena, try escapeUnprintables(c, m));
} else {
- const str = try std.fmt.allocPrint(c.arena, "0x{s}", .{std.fmt.fmtSliceHexLower(slice[1 .. slice.len - 1])});
+ const str = try std.fmt.allocPrint(c.arena, "0x{x}", .{slice[1 .. slice.len - 1]});
return Tag.integer_literal.create(c.arena, str);
}
},
src/Type.zig
@@ -382,7 +382,9 @@ pub fn print(ty: Type, writer: anytype, pt: Zcu.PerThread) @TypeOf(writer).Error
}
}
switch (fn_info.cc) {
- .auto, .async, .naked, .@"inline" => try writer.print("callconv(.{}) ", .{std.zig.fmtId(@tagName(fn_info.cc))}),
+ .auto, .async, .naked, .@"inline" => try writer.print("callconv(.{f}) ", .{
+ std.zig.fmtId(@tagName(fn_info.cc)),
+ }),
else => try writer.print("callconv({any}) ", .{fn_info.cc}),
}
}
src/Zcu.zig
@@ -2811,7 +2811,7 @@ comptime {
}
pub fn loadZirCache(gpa: Allocator, cache_file: std.fs.File) !Zir {
- return loadZirCacheBody(gpa, try cache_file.reader().readStruct(Zir.Header), cache_file);
+ return loadZirCacheBody(gpa, try cache_file.deprecatedReader().readStruct(Zir.Header), cache_file);
}
pub fn loadZirCacheBody(gpa: Allocator, header: Zir.Header, cache_file: std.fs.File) !Zir {
test/behavior/union_with_members.zig
@@ -10,8 +10,8 @@ const ET = union(enum) {
pub fn print(a: *const ET, buf: []u8) anyerror!usize {
return switch (a.*) {
- ET.SINT => |x| fmt.formatIntBuf(buf, x, 10, .lower, fmt.FormatOptions{}),
- ET.UINT => |x| fmt.formatIntBuf(buf, x, 10, .lower, fmt.FormatOptions{}),
+ ET.SINT => |x| fmt.printInt(buf, x, 10, .lower, fmt.FormatOptions{}),
+ ET.UINT => |x| fmt.printInt(buf, x, 10, .lower, fmt.FormatOptions{}),
};
}
};
test/link/elf.zig
@@ -1316,7 +1316,7 @@ fn testGcSectionsZig(b: *Build, opts: Options) *Step {
\\extern fn live_fn2() void;
\\pub fn main() void {
\\ const stdout = std.io.getStdOut();
- \\ stdout.writer().print("{d} {d}\n", .{ live_var1, live_var2 }) catch unreachable;
+ \\ stdout.deprecatedWriter().print("{d} {d}\n", .{ live_var1, live_var2 }) catch unreachable;
\\ live_fn2();
\\}
,
@@ -1358,7 +1358,7 @@ fn testGcSectionsZig(b: *Build, opts: Options) *Step {
\\extern fn live_fn2() void;
\\pub fn main() void {
\\ const stdout = std.io.getStdOut();
- \\ stdout.writer().print("{d} {d}\n", .{ live_var1, live_var2 }) catch unreachable;
+ \\ stdout.deprecatedWriter().print("{d} {d}\n", .{ live_var1, live_var2 }) catch unreachable;
\\ live_fn2();
\\}
,
test/standalone/run_output_paths/create_file.zig
@@ -10,7 +10,7 @@ pub fn main() !void {
dir_name, .{});
const file_name = args.next().?;
const file = try dir.createFile(file_name, .{});
- try file.writer().print(
+ try file.deprecatedWriter().print(
\\{s}
\\{s}
\\Hello, world!
test/standalone/simple/brace_expansion.zig
@@ -228,7 +228,7 @@ pub fn main() !void {
const stdin_file = io.getStdIn();
const stdout_file = io.getStdOut();
- const stdin = try stdin_file.reader().readAllAlloc(global_allocator, std.math.maxInt(usize));
+ const stdin = try stdin_file.deprecatedReader().readAllAlloc(global_allocator, std.math.maxInt(usize));
defer global_allocator.free(stdin);
var result_buf = ArrayList(u8).init(global_allocator);
test/standalone/windows_argv/fuzz.zig
@@ -58,7 +58,7 @@ pub fn main() !void {
std.debug.print(">>> found discrepancy <<<\n", .{});
const cmd_line_wtf8 = try std.unicode.wtf16LeToWtf8Alloc(allocator, cmd_line_w);
defer allocator.free(cmd_line_wtf8);
- std.debug.print("\"{}\"\n\n", .{std.zig.fmtEscapes(cmd_line_wtf8)});
+ std.debug.print("\"{f}\"\n\n", .{std.zig.fmtString(cmd_line_wtf8)});
errors += 1;
}
test/standalone/windows_argv/lib.zig
@@ -27,8 +27,8 @@ fn testArgv(expected_args: []const [*:0]const u16) !void {
wtf8_buf.clearRetainingCapacity();
try std.unicode.wtf16LeToWtf8ArrayList(&wtf8_buf, std.mem.span(expected_arg));
if (!std.mem.eql(u8, wtf8_buf.items, arg_wtf8)) {
- std.debug.print("{}: expected: \"{}\"\n", .{ i, std.zig.fmtEscapes(wtf8_buf.items) });
- std.debug.print("{}: actual: \"{}\"\n", .{ i, std.zig.fmtEscapes(arg_wtf8) });
+ std.debug.print("{}: expected: \"{f}\"\n", .{ i, std.zig.fmtString(wtf8_buf.items) });
+ std.debug.print("{}: actual: \"{f}\"\n", .{ i, std.zig.fmtString(arg_wtf8) });
eql = false;
}
}
@@ -36,22 +36,22 @@ fn testArgv(expected_args: []const [*:0]const u16) !void {
for (expected_args[min_len..], min_len..) |arg, i| {
wtf8_buf.clearRetainingCapacity();
try std.unicode.wtf16LeToWtf8ArrayList(&wtf8_buf, std.mem.span(arg));
- std.debug.print("{}: expected: \"{}\"\n", .{ i, std.zig.fmtEscapes(wtf8_buf.items) });
+ std.debug.print("{}: expected: \"{f}\"\n", .{ i, std.zig.fmtString(wtf8_buf.items) });
}
for (args[min_len..], min_len..) |arg, i| {
- std.debug.print("{}: actual: \"{}\"\n", .{ i, std.zig.fmtEscapes(arg) });
+ std.debug.print("{}: actual: \"{f}\"\n", .{ i, std.zig.fmtString(arg) });
}
const peb = std.os.windows.peb();
const lpCmdLine: [*:0]u16 = @ptrCast(peb.ProcessParameters.CommandLine.Buffer);
wtf8_buf.clearRetainingCapacity();
try std.unicode.wtf16LeToWtf8ArrayList(&wtf8_buf, std.mem.span(lpCmdLine));
- std.debug.print("command line: \"{}\"\n", .{std.zig.fmtEscapes(wtf8_buf.items)});
+ std.debug.print("command line: \"{f}\"\n", .{std.zig.fmtString(wtf8_buf.items)});
std.debug.print("expected argv:\n", .{});
std.debug.print("&.{{\n", .{});
for (expected_args) |arg| {
wtf8_buf.clearRetainingCapacity();
try std.unicode.wtf16LeToWtf8ArrayList(&wtf8_buf, std.mem.span(arg));
- std.debug.print(" \"{}\",\n", .{std.zig.fmtEscapes(wtf8_buf.items)});
+ std.debug.print(" \"{f}\",\n", .{std.zig.fmtString(wtf8_buf.items)});
}
std.debug.print("}}\n", .{});
return error.ArgvMismatch;
test/tests.zig
@@ -2753,7 +2753,7 @@ pub fn addIncrementalTests(b: *std.Build, test_step: *Step) !void {
run.addArg(b.graph.zig_exe);
run.addFileArg(b.path("test/incremental/").path(b, entry.path));
- run.addArgs(&.{ "--zig-lib-dir", b.fmt("{}", .{b.graph.zig_lib_directory}) });
+ run.addArgs(&.{ "--zig-lib-dir", b.fmt("{f}", .{b.graph.zig_lib_directory}) });
run.addCheck(.{ .expect_term = .{ .Exited = 0 } });
build.zig
@@ -279,7 +279,7 @@ pub fn build(b: *std.Build) !void {
const ancestor_ver = try std.SemanticVersion.parse(tagged_ancestor);
if (zig_version.order(ancestor_ver) != .gt) {
- std.debug.print("Zig version '{}' must be greater than tagged ancestor '{}'\n", .{ zig_version, ancestor_ver });
+ std.debug.print("Zig version '{f}' must be greater than tagged ancestor '{f}'\n", .{ zig_version, ancestor_ver });
std.process.exit(1);
}
@@ -1449,7 +1449,7 @@ fn generateLangRef(b: *std.Build) std.Build.LazyPath {
}
var dir = b.build_root.handle.openDir("doc/langref", .{ .iterate = true }) catch |err| {
- std.debug.panic("unable to open '{}doc/langref' directory: {s}", .{
+ std.debug.panic("unable to open '{f}doc/langref' directory: {s}", .{
b.build_root, @errorName(err),
});
};
@@ -1470,7 +1470,7 @@ fn generateLangRef(b: *std.Build) std.Build.LazyPath {
// in a temporary directory
"--cache-root", b.cache_root.path orelse ".",
});
- cmd.addArgs(&.{ "--zig-lib-dir", b.fmt("{}", .{b.graph.zig_lib_directory}) });
+ cmd.addArgs(&.{ "--zig-lib-dir", b.fmt("{f}", .{b.graph.zig_lib_directory}) });
cmd.addArgs(&.{"-i"});
cmd.addFileArg(b.path(b.fmt("doc/langref/{s}", .{entry.name})));
CMakeLists.txt
@@ -436,7 +436,6 @@ set(ZIG_STAGE2_SOURCES
lib/std/elf.zig
lib/std/fifo.zig
lib/std/fmt.zig
- lib/std/fmt/format_float.zig
lib/std/fmt/parse_float.zig
lib/std/fs.zig
lib/std/fs/AtomicFile.zig