Commit 18f1fef142
Changed files (22)
lib
std
atomic
debug
io
json
os
lib/std/atomic/queue.zig
@@ -104,21 +104,17 @@ pub fn Queue(comptime T: type) type {
}
pub fn dump(self: *Self) void {
- var stderr_file = std.io.getStdErr() catch return;
- const stderr = &stderr_file.outStream().stream;
- const Error = @typeInfo(@TypeOf(stderr)).Pointer.child.Error;
-
- self.dumpToStream(Error, stderr) catch return;
+ self.dumpToStream(std.io.getStdErr().outStream()) catch return;
}
- pub fn dumpToStream(self: *Self, comptime Error: type, stream: *std.io.OutStream(Error)) Error!void {
+ pub fn dumpToStream(self: *Self, stream: var) !void {
const S = struct {
fn dumpRecursive(
- s: *std.io.OutStream(Error),
+ s: var,
optional_node: ?*Node,
indent: usize,
comptime depth: comptime_int,
- ) Error!void {
+ ) !void {
try s.writeByteNTimes(' ', indent);
if (optional_node) |node| {
try s.print("0x{x}={}\n", .{ @ptrToInt(node), node.data });
@@ -326,17 +322,16 @@ test "std.atomic.Queue single-threaded" {
test "std.atomic.Queue dump" {
const mem = std.mem;
- const SliceOutStream = std.io.SliceOutStream;
var buffer: [1024]u8 = undefined;
var expected_buffer: [1024]u8 = undefined;
- var sos = SliceOutStream.init(buffer[0..]);
+ var fbs = std.io.fixedBufferStream(&buffer);
var queue = Queue(i32).init();
// Test empty stream
- sos.reset();
- try queue.dumpToStream(SliceOutStream.Error, &sos.stream);
- expect(mem.eql(u8, buffer[0..sos.pos],
+ fbs.reset();
+ try queue.dumpToStream(fbs.outStream());
+ expect(mem.eql(u8, buffer[0..fbs.pos],
\\head: (null)
\\tail: (null)
\\
@@ -350,8 +345,8 @@ test "std.atomic.Queue dump" {
};
queue.put(&node_0);
- sos.reset();
- try queue.dumpToStream(SliceOutStream.Error, &sos.stream);
+ fbs.reset();
+ try queue.dumpToStream(fbs.outStream());
var expected = try std.fmt.bufPrint(expected_buffer[0..],
\\head: 0x{x}=1
@@ -360,7 +355,7 @@ test "std.atomic.Queue dump" {
\\ (null)
\\
, .{ @ptrToInt(queue.head), @ptrToInt(queue.tail) });
- expect(mem.eql(u8, buffer[0..sos.pos], expected));
+ expect(mem.eql(u8, buffer[0..fbs.pos], expected));
// Test a stream with two elements
var node_1 = Queue(i32).Node{
@@ -370,8 +365,8 @@ test "std.atomic.Queue dump" {
};
queue.put(&node_1);
- sos.reset();
- try queue.dumpToStream(SliceOutStream.Error, &sos.stream);
+ fbs.reset();
+ try queue.dumpToStream(fbs.outStream());
expected = try std.fmt.bufPrint(expected_buffer[0..],
\\head: 0x{x}=1
@@ -381,5 +376,5 @@ test "std.atomic.Queue dump" {
\\ (null)
\\
, .{ @ptrToInt(queue.head), @ptrToInt(queue.head.?.next), @ptrToInt(queue.tail) });
- expect(mem.eql(u8, buffer[0..sos.pos], expected));
+ expect(mem.eql(u8, buffer[0..fbs.pos], expected));
}
lib/std/debug/leb128.zig
@@ -121,18 +121,18 @@ pub fn readILEB128Mem(comptime T: type, ptr: *[*]const u8) !T {
}
fn test_read_stream_ileb128(comptime T: type, encoded: []const u8) !T {
- var in_stream = std.io.SliceInStream.init(encoded);
- return try readILEB128(T, &in_stream.stream);
+ var in_stream = std.io.fixedBufferStream(encoded);
+ return try readILEB128(T, in_stream.inStream());
}
fn test_read_stream_uleb128(comptime T: type, encoded: []const u8) !T {
- var in_stream = std.io.SliceInStream.init(encoded);
- return try readULEB128(T, &in_stream.stream);
+ var in_stream = std.io.fixedBufferStream(encoded);
+ return try readULEB128(T, in_stream.inStream());
}
fn test_read_ileb128(comptime T: type, encoded: []const u8) !T {
- var in_stream = std.io.SliceInStream.init(encoded);
- const v1 = readILEB128(T, &in_stream.stream);
+ var in_stream = std.io.fixedBufferStream(encoded);
+ const v1 = readILEB128(T, in_stream.inStream());
var in_ptr = encoded.ptr;
const v2 = readILEB128Mem(T, &in_ptr);
testing.expectEqual(v1, v2);
@@ -140,8 +140,8 @@ fn test_read_ileb128(comptime T: type, encoded: []const u8) !T {
}
fn test_read_uleb128(comptime T: type, encoded: []const u8) !T {
- var in_stream = std.io.SliceInStream.init(encoded);
- const v1 = readULEB128(T, &in_stream.stream);
+ var in_stream = std.io.fixedBufferStream(encoded);
+ const v1 = readULEB128(T, in_stream.inStream());
var in_ptr = encoded.ptr;
const v2 = readULEB128Mem(T, &in_ptr);
testing.expectEqual(v1, v2);
@@ -149,22 +149,22 @@ fn test_read_uleb128(comptime T: type, encoded: []const u8) !T {
}
fn test_read_ileb128_seq(comptime T: type, comptime N: usize, encoded: []const u8) void {
- var in_stream = std.io.SliceInStream.init(encoded);
+ var in_stream = std.io.fixedBufferStream(encoded);
var in_ptr = encoded.ptr;
var i: usize = 0;
while (i < N) : (i += 1) {
- const v1 = readILEB128(T, &in_stream.stream);
+ const v1 = readILEB128(T, in_stream.inStream());
const v2 = readILEB128Mem(T, &in_ptr);
testing.expectEqual(v1, v2);
}
}
fn test_read_uleb128_seq(comptime T: type, comptime N: usize, encoded: []const u8) void {
- var in_stream = std.io.SliceInStream.init(encoded);
+ var in_stream = std.io.fixedBufferStream(encoded);
var in_ptr = encoded.ptr;
var i: usize = 0;
while (i < N) : (i += 1) {
- const v1 = readULEB128(T, &in_stream.stream);
+ const v1 = readULEB128(T, in_stream.inStream());
const v2 = readULEB128Mem(T, &in_ptr);
testing.expectEqual(v1, v2);
}
lib/std/heap/logging_allocator.zig
@@ -1,63 +1,69 @@
const std = @import("../std.zig");
const Allocator = std.mem.Allocator;
-const AnyErrorOutStream = std.io.OutStream(anyerror);
-
/// This allocator is used in front of another allocator and logs to the provided stream
/// on every call to the allocator. Stream errors are ignored.
/// If https://github.com/ziglang/zig/issues/2586 is implemented, this API can be improved.
-pub const LoggingAllocator = struct {
- allocator: Allocator,
- parent_allocator: *Allocator,
- out_stream: *AnyErrorOutStream,
+pub fn LoggingAllocator(comptime OutStreamType: type) type {
+ return struct {
+ allocator: Allocator,
+ parent_allocator: *Allocator,
+ out_stream: OutStreamType,
- const Self = @This();
+ const Self = @This();
- pub fn init(parent_allocator: *Allocator, out_stream: *AnyErrorOutStream) Self {
- return Self{
- .allocator = Allocator{
- .reallocFn = realloc,
- .shrinkFn = shrink,
- },
- .parent_allocator = parent_allocator,
- .out_stream = out_stream,
- };
- }
-
- fn realloc(allocator: *Allocator, old_mem: []u8, old_align: u29, new_size: usize, new_align: u29) ![]u8 {
- const self = @fieldParentPtr(Self, "allocator", allocator);
- if (old_mem.len == 0) {
- self.out_stream.print("allocation of {} ", .{new_size}) catch {};
- } else {
- self.out_stream.print("resize from {} to {} ", .{ old_mem.len, new_size }) catch {};
+ pub fn init(parent_allocator: *Allocator, out_stream: OutStreamType) Self {
+ return Self{
+ .allocator = Allocator{
+ .reallocFn = realloc,
+ .shrinkFn = shrink,
+ },
+ .parent_allocator = parent_allocator,
+ .out_stream = out_stream,
+ };
}
- const result = self.parent_allocator.reallocFn(self.parent_allocator, old_mem, old_align, new_size, new_align);
- if (result) |buff| {
- self.out_stream.print("success!\n", .{}) catch {};
- } else |err| {
- self.out_stream.print("failure!\n", .{}) catch {};
+
+ fn realloc(allocator: *Allocator, old_mem: []u8, old_align: u29, new_size: usize, new_align: u29) ![]u8 {
+ const self = @fieldParentPtr(Self, "allocator", allocator);
+ if (old_mem.len == 0) {
+ self.out_stream.print("allocation of {} ", .{new_size}) catch {};
+ } else {
+ self.out_stream.print("resize from {} to {} ", .{ old_mem.len, new_size }) catch {};
+ }
+ const result = self.parent_allocator.reallocFn(self.parent_allocator, old_mem, old_align, new_size, new_align);
+ if (result) |buff| {
+ self.out_stream.print("success!\n", .{}) catch {};
+ } else |err| {
+ self.out_stream.print("failure!\n", .{}) catch {};
+ }
+ return result;
}
- return result;
- }
- fn shrink(allocator: *Allocator, old_mem: []u8, old_align: u29, new_size: usize, new_align: u29) []u8 {
- const self = @fieldParentPtr(Self, "allocator", allocator);
- const result = self.parent_allocator.shrinkFn(self.parent_allocator, old_mem, old_align, new_size, new_align);
- if (new_size == 0) {
- self.out_stream.print("free of {} bytes success!\n", .{old_mem.len}) catch {};
- } else {
- self.out_stream.print("shrink from {} bytes to {} bytes success!\n", .{ old_mem.len, new_size }) catch {};
+ fn shrink(allocator: *Allocator, old_mem: []u8, old_align: u29, new_size: usize, new_align: u29) []u8 {
+ const self = @fieldParentPtr(Self, "allocator", allocator);
+ const result = self.parent_allocator.shrinkFn(self.parent_allocator, old_mem, old_align, new_size, new_align);
+ if (new_size == 0) {
+ self.out_stream.print("free of {} bytes success!\n", .{old_mem.len}) catch {};
+ } else {
+ self.out_stream.print("shrink from {} bytes to {} bytes success!\n", .{ old_mem.len, new_size }) catch {};
+ }
+ return result;
}
- return result;
- }
-};
+ };
+}
+
+pub fn loggingAllocator(
+ parent_allocator: *Allocator,
+ out_stream: var,
+) LoggingAllocator(@TypeOf(out_stream)) {
+ return LoggingAllocator(@TypeOf(out_stream)).init(parent_allocator, out_stream);
+}
test "LoggingAllocator" {
var buf: [255]u8 = undefined;
- var slice_stream = std.io.SliceOutStream.init(buf[0..]);
- const stream = &slice_stream.stream;
+ var fbs = std.io.fixedBufferStream(&buf);
- const allocator = &LoggingAllocator.init(std.testing.allocator, @ptrCast(*AnyErrorOutStream, stream)).allocator;
+ const allocator = &loggingAllocator(std.testing.allocator, fbs.outStream()).allocator;
const ptr = try allocator.alloc(u8, 10);
allocator.free(ptr);
@@ -66,5 +72,5 @@ test "LoggingAllocator" {
\\allocation of 10 success!
\\free of 10 bytes success!
\\
- , slice_stream.getWritten());
+ , fbs.getWritten());
}
lib/std/io/bit_in_stream.zig
@@ -0,0 +1,237 @@
+const std = @import("../std.zig");
+const builtin = std.builtin;
+const io = std.io;
+const assert = std.debug.assert;
+const testing = std.testing;
+const trait = std.meta.trait;
+const meta = std.meta;
+const math = std.math;
+
+/// Creates a stream which allows for reading bit fields from another stream
+pub fn BitInStream(endian: builtin.Endian, comptime InStreamType: type) type {
+ return struct {
+ in_stream: InStreamType,
+ bit_buffer: u7,
+ bit_count: u3,
+
+ pub const Error = InStreamType.Error;
+ pub const InStream = io.InStream(*Self, Error, read);
+
+ const Self = @This();
+ const u8_bit_count = comptime meta.bitCount(u8);
+ const u7_bit_count = comptime meta.bitCount(u7);
+ const u4_bit_count = comptime meta.bitCount(u4);
+
+ pub fn init(in_stream: InStreamType) Self {
+ return Self{
+ .in_stream = in_stream,
+ .bit_buffer = 0,
+ .bit_count = 0,
+ };
+ }
+
+ /// Reads `bits` bits from the stream and returns a specified unsigned int type
+ /// containing them in the least significant end, returning an error if the
+ /// specified number of bits could not be read.
+ pub fn readBitsNoEof(self: *Self, comptime U: type, bits: usize) !U {
+ var n: usize = undefined;
+ const result = try self.readBits(U, bits, &n);
+ if (n < bits) return error.EndOfStream;
+ return result;
+ }
+
+ /// Reads `bits` bits from the stream and returns a specified unsigned int type
+ /// containing them in the least significant end. The number of bits successfully
+ /// read is placed in `out_bits`, as reaching the end of the stream is not an error.
+ pub fn readBits(self: *Self, comptime U: type, bits: usize, out_bits: *usize) Error!U {
+ comptime assert(trait.isUnsignedInt(U));
+
+ //by extending the buffer to a minimum of u8 we can cover a number of edge cases
+ // related to shifting and casting.
+ const u_bit_count = comptime meta.bitCount(U);
+ const buf_bit_count = bc: {
+ assert(u_bit_count >= bits);
+ break :bc if (u_bit_count <= u8_bit_count) u8_bit_count else u_bit_count;
+ };
+ const Buf = std.meta.IntType(false, buf_bit_count);
+ const BufShift = math.Log2Int(Buf);
+
+ out_bits.* = @as(usize, 0);
+ if (U == u0 or bits == 0) return 0;
+ var out_buffer = @as(Buf, 0);
+
+ if (self.bit_count > 0) {
+ const n = if (self.bit_count >= bits) @intCast(u3, bits) else self.bit_count;
+ const shift = u7_bit_count - n;
+ switch (endian) {
+ .Big => {
+ out_buffer = @as(Buf, self.bit_buffer >> shift);
+ self.bit_buffer <<= n;
+ },
+ .Little => {
+ const value = (self.bit_buffer << shift) >> shift;
+ out_buffer = @as(Buf, value);
+ self.bit_buffer >>= n;
+ },
+ }
+ self.bit_count -= n;
+ out_bits.* = n;
+ }
+ //at this point we know bit_buffer is empty
+
+ //copy bytes until we have enough bits, then leave the rest in bit_buffer
+ while (out_bits.* < bits) {
+ const n = bits - out_bits.*;
+ const next_byte = self.in_stream.readByte() catch |err| {
+ if (err == error.EndOfStream) {
+ return @intCast(U, out_buffer);
+ }
+ //@BUG: See #1810. Not sure if the bug is that I have to do this for some
+ // streams, or that I don't for streams with emtpy errorsets.
+ return @errSetCast(Error, err);
+ };
+
+ switch (endian) {
+ .Big => {
+ if (n >= u8_bit_count) {
+ out_buffer <<= @intCast(u3, u8_bit_count - 1);
+ out_buffer <<= 1;
+ out_buffer |= @as(Buf, next_byte);
+ out_bits.* += u8_bit_count;
+ continue;
+ }
+
+ const shift = @intCast(u3, u8_bit_count - n);
+ out_buffer <<= @intCast(BufShift, n);
+ out_buffer |= @as(Buf, next_byte >> shift);
+ out_bits.* += n;
+ self.bit_buffer = @truncate(u7, next_byte << @intCast(u3, n - 1));
+ self.bit_count = shift;
+ },
+ .Little => {
+ if (n >= u8_bit_count) {
+ out_buffer |= @as(Buf, next_byte) << @intCast(BufShift, out_bits.*);
+ out_bits.* += u8_bit_count;
+ continue;
+ }
+
+ const shift = @intCast(u3, u8_bit_count - n);
+ const value = (next_byte << shift) >> shift;
+ out_buffer |= @as(Buf, value) << @intCast(BufShift, out_bits.*);
+ out_bits.* += n;
+ self.bit_buffer = @truncate(u7, next_byte >> @intCast(u3, n));
+ self.bit_count = shift;
+ },
+ }
+ }
+
+ return @intCast(U, out_buffer);
+ }
+
+ pub fn alignToByte(self: *Self) void {
+ self.bit_buffer = 0;
+ self.bit_count = 0;
+ }
+
+ pub fn read(self: *Self, buffer: []u8) Error!usize {
+ var out_bits: usize = undefined;
+ var out_bits_total = @as(usize, 0);
+ //@NOTE: I'm not sure this is a good idea, maybe alignToByte should be forced
+ if (self.bit_count > 0) {
+ for (buffer) |*b, i| {
+ b.* = try self.readBits(u8, u8_bit_count, &out_bits);
+ out_bits_total += out_bits;
+ }
+ const incomplete_byte = @boolToInt(out_bits_total % u8_bit_count > 0);
+ return (out_bits_total / u8_bit_count) + incomplete_byte;
+ }
+
+ return self.in_stream.read(buffer);
+ }
+
+ pub fn inStream(self: *Self) InStream {
+ return .{ .context = self };
+ }
+ };
+}
+
+pub fn bitInStream(
+ comptime endian: builtin.Endian,
+ underlying_stream: var,
+) BitInStream(endian, @TypeOf(underlying_stream)) {
+ return BitInStream(endian, @TypeOf(underlying_stream)).init(underlying_stream);
+}
+
+test "api coverage" {
+ const mem_be = [_]u8{ 0b11001101, 0b00001011 };
+ const mem_le = [_]u8{ 0b00011101, 0b10010101 };
+
+ var mem_in_be = io.fixedBufferStream(&mem_be);
+ var bit_stream_be = bitInStream(.Big, mem_in_be.inStream());
+
+ var out_bits: usize = undefined;
+
+ const expect = testing.expect;
+ const expectError = testing.expectError;
+
+ expect(1 == try bit_stream_be.readBits(u2, 1, &out_bits));
+ expect(out_bits == 1);
+ expect(2 == try bit_stream_be.readBits(u5, 2, &out_bits));
+ expect(out_bits == 2);
+ expect(3 == try bit_stream_be.readBits(u128, 3, &out_bits));
+ expect(out_bits == 3);
+ expect(4 == try bit_stream_be.readBits(u8, 4, &out_bits));
+ expect(out_bits == 4);
+ expect(5 == try bit_stream_be.readBits(u9, 5, &out_bits));
+ expect(out_bits == 5);
+ expect(1 == try bit_stream_be.readBits(u1, 1, &out_bits));
+ expect(out_bits == 1);
+
+ mem_in_be.pos = 0;
+ bit_stream_be.bit_count = 0;
+ expect(0b110011010000101 == try bit_stream_be.readBits(u15, 15, &out_bits));
+ expect(out_bits == 15);
+
+ mem_in_be.pos = 0;
+ bit_stream_be.bit_count = 0;
+ expect(0b1100110100001011 == try bit_stream_be.readBits(u16, 16, &out_bits));
+ expect(out_bits == 16);
+
+ _ = try bit_stream_be.readBits(u0, 0, &out_bits);
+
+ expect(0 == try bit_stream_be.readBits(u1, 1, &out_bits));
+ expect(out_bits == 0);
+ expectError(error.EndOfStream, bit_stream_be.readBitsNoEof(u1, 1));
+
+ var mem_in_le = io.fixedBufferStream(&mem_le);
+ var bit_stream_le = bitInStream(.Little, mem_in_le.inStream());
+
+ expect(1 == try bit_stream_le.readBits(u2, 1, &out_bits));
+ expect(out_bits == 1);
+ expect(2 == try bit_stream_le.readBits(u5, 2, &out_bits));
+ expect(out_bits == 2);
+ expect(3 == try bit_stream_le.readBits(u128, 3, &out_bits));
+ expect(out_bits == 3);
+ expect(4 == try bit_stream_le.readBits(u8, 4, &out_bits));
+ expect(out_bits == 4);
+ expect(5 == try bit_stream_le.readBits(u9, 5, &out_bits));
+ expect(out_bits == 5);
+ expect(1 == try bit_stream_le.readBits(u1, 1, &out_bits));
+ expect(out_bits == 1);
+
+ mem_in_le.pos = 0;
+ bit_stream_le.bit_count = 0;
+ expect(0b001010100011101 == try bit_stream_le.readBits(u15, 15, &out_bits));
+ expect(out_bits == 15);
+
+ mem_in_le.pos = 0;
+ bit_stream_le.bit_count = 0;
+ expect(0b1001010100011101 == try bit_stream_le.readBits(u16, 16, &out_bits));
+ expect(out_bits == 16);
+
+ _ = try bit_stream_le.readBits(u0, 0, &out_bits);
+
+ expect(0 == try bit_stream_le.readBits(u1, 1, &out_bits));
+ expect(out_bits == 0);
+ expectError(error.EndOfStream, bit_stream_le.readBitsNoEof(u1, 1));
+}
lib/std/io/bit_out_stream.zig
@@ -0,0 +1,197 @@
+const std = @import("../std.zig");
+const builtin = std.builtin;
+const io = std.io;
+const testing = std.testing;
+const assert = std.debug.assert;
+const trait = std.meta.trait;
+const meta = std.meta;
+const math = std.math;
+
+/// Creates a stream which allows for writing bit fields to another stream
+pub fn BitOutStream(endian: builtin.Endian, comptime OutStreamType: type) type {
+ return struct {
+ out_stream: OutStreamType,
+ bit_buffer: u8,
+ bit_count: u4,
+
+ pub const Error = OutStreamType.Error;
+ pub const OutStream = io.OutStream(*Self, Error, write);
+
+ const Self = @This();
+ const u8_bit_count = comptime meta.bitCount(u8);
+ const u4_bit_count = comptime meta.bitCount(u4);
+
+ pub fn init(out_stream: OutStreamType) Self {
+ return Self{
+ .out_stream = out_stream,
+ .bit_buffer = 0,
+ .bit_count = 0,
+ };
+ }
+
+ /// Write the specified number of bits to the stream from the least significant bits of
+ /// the specified unsigned int value. Bits will only be written to the stream when there
+ /// are enough to fill a byte.
+ pub fn writeBits(self: *Self, value: var, bits: usize) Error!void {
+ if (bits == 0) return;
+
+ const U = @TypeOf(value);
+ comptime assert(trait.isUnsignedInt(U));
+
+ //by extending the buffer to a minimum of u8 we can cover a number of edge cases
+ // related to shifting and casting.
+ const u_bit_count = comptime meta.bitCount(U);
+ const buf_bit_count = bc: {
+ assert(u_bit_count >= bits);
+ break :bc if (u_bit_count <= u8_bit_count) u8_bit_count else u_bit_count;
+ };
+ const Buf = std.meta.IntType(false, buf_bit_count);
+ const BufShift = math.Log2Int(Buf);
+
+ const buf_value = @intCast(Buf, value);
+
+ const high_byte_shift = @intCast(BufShift, buf_bit_count - u8_bit_count);
+ var in_buffer = switch (endian) {
+ .Big => buf_value << @intCast(BufShift, buf_bit_count - bits),
+ .Little => buf_value,
+ };
+ var in_bits = bits;
+
+ if (self.bit_count > 0) {
+ const bits_remaining = u8_bit_count - self.bit_count;
+ const n = @intCast(u3, if (bits_remaining > bits) bits else bits_remaining);
+ switch (endian) {
+ .Big => {
+ const shift = @intCast(BufShift, high_byte_shift + self.bit_count);
+ const v = @intCast(u8, in_buffer >> shift);
+ self.bit_buffer |= v;
+ in_buffer <<= n;
+ },
+ .Little => {
+ const v = @truncate(u8, in_buffer) << @intCast(u3, self.bit_count);
+ self.bit_buffer |= v;
+ in_buffer >>= n;
+ },
+ }
+ self.bit_count += n;
+ in_bits -= n;
+
+ //if we didn't fill the buffer, it's because bits < bits_remaining;
+ if (self.bit_count != u8_bit_count) return;
+ try self.out_stream.writeByte(self.bit_buffer);
+ self.bit_buffer = 0;
+ self.bit_count = 0;
+ }
+ //at this point we know bit_buffer is empty
+
+ //copy bytes until we can't fill one anymore, then leave the rest in bit_buffer
+ while (in_bits >= u8_bit_count) {
+ switch (endian) {
+ .Big => {
+ const v = @intCast(u8, in_buffer >> high_byte_shift);
+ try self.out_stream.writeByte(v);
+ in_buffer <<= @intCast(u3, u8_bit_count - 1);
+ in_buffer <<= 1;
+ },
+ .Little => {
+ const v = @truncate(u8, in_buffer);
+ try self.out_stream.writeByte(v);
+ in_buffer >>= @intCast(u3, u8_bit_count - 1);
+ in_buffer >>= 1;
+ },
+ }
+ in_bits -= u8_bit_count;
+ }
+
+ if (in_bits > 0) {
+ self.bit_count = @intCast(u4, in_bits);
+ self.bit_buffer = switch (endian) {
+ .Big => @truncate(u8, in_buffer >> high_byte_shift),
+ .Little => @truncate(u8, in_buffer),
+ };
+ }
+ }
+
+ /// Flush any remaining bits to the stream.
+ pub fn flushBits(self: *Self) Error!void {
+ if (self.bit_count == 0) return;
+ try self.out_stream.writeByte(self.bit_buffer);
+ self.bit_buffer = 0;
+ self.bit_count = 0;
+ }
+
+ pub fn write(self: *Self, buffer: []const u8) Error!usize {
+ // TODO: I'm not sure this is a good idea, maybe flushBits should be forced
+ if (self.bit_count > 0) {
+ for (buffer) |b, i|
+ try self.writeBits(b, u8_bit_count);
+ return buffer.len;
+ }
+
+ return self.out_stream.write(buffer);
+ }
+
+ pub fn outStream(self: *Self) OutStream {
+ return .{ .context = self };
+ }
+ };
+}
+
+pub fn bitOutStream(
+ comptime endian: builtin.Endian,
+ underlying_stream: var,
+) BitOutStream(endian, @TypeOf(underlying_stream)) {
+ return BitOutStream(endian, @TypeOf(underlying_stream)).init(underlying_stream);
+}
+
+test "api coverage" {
+ var mem_be = [_]u8{0} ** 2;
+ var mem_le = [_]u8{0} ** 2;
+
+ var mem_out_be = io.fixedBufferStream(&mem_be);
+ var bit_stream_be = bitOutStream(.Big, mem_out_be.outStream());
+
+ try bit_stream_be.writeBits(@as(u2, 1), 1);
+ try bit_stream_be.writeBits(@as(u5, 2), 2);
+ try bit_stream_be.writeBits(@as(u128, 3), 3);
+ try bit_stream_be.writeBits(@as(u8, 4), 4);
+ try bit_stream_be.writeBits(@as(u9, 5), 5);
+ try bit_stream_be.writeBits(@as(u1, 1), 1);
+
+ testing.expect(mem_be[0] == 0b11001101 and mem_be[1] == 0b00001011);
+
+ mem_out_be.pos = 0;
+
+ try bit_stream_be.writeBits(@as(u15, 0b110011010000101), 15);
+ try bit_stream_be.flushBits();
+ testing.expect(mem_be[0] == 0b11001101 and mem_be[1] == 0b00001010);
+
+ mem_out_be.pos = 0;
+ try bit_stream_be.writeBits(@as(u32, 0b110011010000101), 16);
+ testing.expect(mem_be[0] == 0b01100110 and mem_be[1] == 0b10000101);
+
+ try bit_stream_be.writeBits(@as(u0, 0), 0);
+
+ var mem_out_le = io.fixedBufferStream(&mem_le);
+ var bit_stream_le = bitOutStream(.Little, mem_out_le.outStream());
+
+ try bit_stream_le.writeBits(@as(u2, 1), 1);
+ try bit_stream_le.writeBits(@as(u5, 2), 2);
+ try bit_stream_le.writeBits(@as(u128, 3), 3);
+ try bit_stream_le.writeBits(@as(u8, 4), 4);
+ try bit_stream_le.writeBits(@as(u9, 5), 5);
+ try bit_stream_le.writeBits(@as(u1, 1), 1);
+
+ testing.expect(mem_le[0] == 0b00011101 and mem_le[1] == 0b10010101);
+
+ mem_out_le.pos = 0;
+ try bit_stream_le.writeBits(@as(u15, 0b110011010000101), 15);
+ try bit_stream_le.flushBits();
+ testing.expect(mem_le[0] == 0b10000101 and mem_le[1] == 0b01100110);
+
+ mem_out_le.pos = 0;
+ try bit_stream_le.writeBits(@as(u32, 0b1100110100001011), 16);
+ testing.expect(mem_le[0] == 0b00001011 and mem_le[1] == 0b11001101);
+
+ try bit_stream_le.writeBits(@as(u0, 0), 0);
+}
lib/std/io/buffered_in_stream.zig
@@ -1,7 +1,9 @@
const std = @import("../std.zig");
const io = std.io;
+const assert = std.debug.assert;
+const testing = std.testing;
-pub fn BufferedInStream(comptime buffer_size: usize, comptime InStreamType) type {
+pub fn BufferedInStream(comptime buffer_size: usize, comptime InStreamType: type) type {
return struct {
unbuffered_in_stream: InStreamType,
fifo: FifoType = FifoType.init(),
lib/std/io/c_out_stream.zig
@@ -0,0 +1,44 @@
+const std = @import("../std.zig");
+const builtin = std.builtin;
+const io = std.io;
+const testing = std.testing;
+
+pub const COutStream = io.OutStream(*std.c.FILE, std.fs.File.WriteError, cOutStreamWrite);
+
+pub fn cOutStream(c_file: *std.c.FILE) COutStream {
+ return .{ .context = c_file };
+}
+
+fn cOutStreamWrite(c_file: *std.c.FILE, bytes: []const u8) std.fs.File.WriteError!usize {
+ const amt_written = std.c.fwrite(bytes.ptr, 1, bytes.len, c_file);
+ if (amt_written >= 0) return amt_written;
+ switch (std.c._errno().*) {
+ 0 => unreachable,
+ os.EINVAL => unreachable,
+ os.EFAULT => unreachable,
+ os.EAGAIN => unreachable, // this is a blocking API
+ os.EBADF => unreachable, // always a race condition
+ os.EDESTADDRREQ => unreachable, // connect was never called
+ os.EDQUOT => return error.DiskQuota,
+ os.EFBIG => return error.FileTooBig,
+ os.EIO => return error.InputOutput,
+ os.ENOSPC => return error.NoSpaceLeft,
+ os.EPERM => return error.AccessDenied,
+ os.EPIPE => return error.BrokenPipe,
+ else => |err| return os.unexpectedErrno(@intCast(usize, err)),
+ }
+}
+
+test "" {
+ if (!builtin.link_libc) return error.SkipZigTest;
+
+ const filename = "tmp_io_test_file.txt";
+ const out_file = std.c.fopen(filename, "w") orelse return error.UnableToOpenTestFile;
+ defer {
+ _ = std.c.fclose(out_file);
+ fs.cwd().deleteFileC(filename) catch {};
+ }
+
+ const out_stream = &io.COutStream.init(out_file).stream;
+ try out_stream.print("hi: {}\n", .{@as(i32, 123)});
+}
lib/std/io/counting_out_stream.zig
@@ -1,5 +1,6 @@
const std = @import("../std.zig");
const io = std.io;
+const testing = std.testing;
/// An OutStream that counts how many bytes has been written to it.
pub fn CountingOutStream(comptime OutStreamType: type) type {
@@ -12,13 +13,6 @@ pub fn CountingOutStream(comptime OutStreamType: type) type {
const Self = @This();
- pub fn init(child_stream: OutStreamType) Self {
- return Self{
- .bytes_written = 0,
- .child_stream = child_stream,
- };
- }
-
pub fn write(self: *Self, bytes: []const u8) Error!usize {
const amt = try self.child_stream.write(bytes);
self.bytes_written += amt;
@@ -31,12 +25,15 @@ pub fn CountingOutStream(comptime OutStreamType: type) type {
};
}
+pub fn countingOutStream(child_stream: var) CountingOutStream(@TypeOf(child_stream)) {
+ return .{ .bytes_written = 0, .child_stream = child_stream };
+}
+
test "io.CountingOutStream" {
- var counting_stream = CountingOutStream(NullOutStream.Error).init(std.io.null_out_stream);
- const stream = &counting_stream.stream;
+ var counting_stream = countingOutStream(std.io.null_out_stream);
+ const stream = counting_stream.outStream();
- const bytes = "yay" ** 10000;
- stream.write(bytes) catch unreachable;
+ const bytes = "yay" ** 100;
+ stream.writeAll(bytes) catch unreachable;
testing.expect(counting_stream.bytes_written == bytes.len);
}
-
lib/std/io/fixed_buffer_stream.zig
@@ -1,9 +1,11 @@
const std = @import("../std.zig");
const io = std.io;
const testing = std.testing;
+const mem = std.mem;
+const assert = std.debug.assert;
-/// This turns a slice into an `io.OutStream`, `io.InStream`, or `io.SeekableStream`.
-/// If the supplied slice is const, then `io.OutStream` is not available.
+/// This turns a byte buffer into an `io.OutStream`, `io.InStream`, or `io.SeekableStream`.
+/// If the supplied byte buffer is const, then `io.OutStream` is not available.
pub fn FixedBufferStream(comptime Buffer: type) type {
return struct {
/// `Buffer` is either a `[]u8` or `[]const u8`.
@@ -46,7 +48,7 @@ pub fn FixedBufferStream(comptime Buffer: type) type {
const size = std.math.min(dest.len, self.buffer.len - self.pos);
const end = self.pos + size;
- std.mem.copy(u8, dest[0..size], self.buffer[self.pos..end]);
+ mem.copy(u8, dest[0..size], self.buffer[self.pos..end]);
self.pos = end;
if (size == 0) return error.EndOfStream;
@@ -65,7 +67,7 @@ pub fn FixedBufferStream(comptime Buffer: type) type {
else
self.buffer.len - self.pos;
- std.mem.copy(u8, self.buffer[self.pos .. self.pos + n], bytes[0..n]);
+ mem.copy(u8, self.buffer[self.pos .. self.pos + n], bytes[0..n]);
self.pos += n;
if (n == 0) return error.OutOfMemory;
@@ -100,7 +102,7 @@ pub fn FixedBufferStream(comptime Buffer: type) type {
}
pub fn getWritten(self: Self) []const u8 {
- return self.slice[0..self.pos];
+ return self.buffer[0..self.pos];
}
pub fn reset(self: *Self) void {
@@ -110,16 +112,16 @@ pub fn FixedBufferStream(comptime Buffer: type) type {
}
pub fn fixedBufferStream(buffer: var) FixedBufferStream(NonSentinelSpan(@TypeOf(buffer))) {
- return .{ .buffer = std.mem.span(buffer), .pos = 0 };
+ return .{ .buffer = mem.span(buffer), .pos = 0 };
}
fn NonSentinelSpan(comptime T: type) type {
- var ptr_info = @typeInfo(std.mem.Span(T)).Pointer;
+ var ptr_info = @typeInfo(mem.Span(T)).Pointer;
ptr_info.sentinel = null;
return @Type(std.builtin.TypeInfo{ .Pointer = ptr_info });
}
-test "FixedBufferStream" {
+test "FixedBufferStream output" {
var buf: [255]u8 = undefined;
var fbs = fixedBufferStream(&buf);
const stream = fbs.outStream();
@@ -127,3 +129,41 @@ test "FixedBufferStream" {
try stream.print("{}{}!", .{ "Hello", "World" });
testing.expectEqualSlices(u8, "HelloWorld!", fbs.getWritten());
}
+
+test "FixedBufferStream output 2" {
+ var buffer: [10]u8 = undefined;
+ var fbs = fixedBufferStream(&buffer);
+
+ try fbs.outStream().writeAll("Hello");
+ testing.expect(mem.eql(u8, fbs.getWritten(), "Hello"));
+
+ try fbs.outStream().writeAll("world");
+ testing.expect(mem.eql(u8, fbs.getWritten(), "Helloworld"));
+
+ testing.expectError(error.OutOfMemory, fbs.outStream().writeAll("!"));
+ testing.expect(mem.eql(u8, fbs.getWritten(), "Helloworld"));
+
+ fbs.reset();
+ testing.expect(fbs.getWritten().len == 0);
+
+ testing.expectError(error.OutOfMemory, fbs.outStream().writeAll("Hello world!"));
+ testing.expect(mem.eql(u8, fbs.getWritten(), "Hello worl"));
+}
+
+test "FixedBufferStream input" {
+ const bytes = [_]u8{ 1, 2, 3, 4, 5, 6, 7 };
+ var fbs = fixedBufferStream(&bytes);
+
+ var dest: [4]u8 = undefined;
+
+ var read = try fbs.inStream().read(dest[0..4]);
+ testing.expect(read == 4);
+ testing.expect(mem.eql(u8, dest[0..4], bytes[0..4]));
+
+ read = try fbs.inStream().read(dest[0..4]);
+ testing.expect(read == 3);
+ testing.expect(mem.eql(u8, dest[0..3], bytes[4..7]));
+
+ read = try fbs.inStream().read(dest[0..4]);
+ testing.expect(read == 0);
+}
lib/std/io/in_stream.zig
@@ -273,8 +273,7 @@ pub fn InStream(
test "InStream" {
var buf = "a\x02".*;
- var slice_stream = std.io.SliceInStream.init(&buf);
- const in_stream = &slice_stream.stream;
+ const in_stream = std.io.fixedBufferStream(&buf).inStream();
testing.expect((try in_stream.readByte()) == 'a');
testing.expect((try in_stream.readEnum(enum(u8) {
a = 0,
lib/std/io/peek_stream.zig
@@ -0,0 +1,112 @@
+const std = @import("../std.zig");
+const io = std.io;
+const mem = std.mem;
+const testing = std.testing;
+
+/// Creates a stream which supports 'un-reading' data, so that it can be read again.
+/// This makes look-ahead style parsing much easier.
+/// TODO merge this with `std.io.BufferedInStream`: https://github.com/ziglang/zig/issues/4501
+pub fn PeekStream(
+ comptime buffer_type: std.fifo.LinearFifoBufferType,
+ comptime InStreamType: type,
+) type {
+ return struct {
+ unbuffered_in_stream: InStreamType,
+ fifo: FifoType,
+
+ pub const Error = InStreamType.Error;
+ pub const InStream = io.InStream(*Self, Error, read);
+
+ const Self = @This();
+ const FifoType = std.fifo.LinearFifo(u8, buffer_type);
+
+ pub usingnamespace switch (buffer_type) {
+ .Static => struct {
+ pub fn init(base: InStreamType) Self {
+ return .{
+ .base = base,
+ .fifo = FifoType.init(),
+ };
+ }
+ },
+ .Slice => struct {
+ pub fn init(base: InStreamType, buf: []u8) Self {
+ return .{
+ .base = base,
+ .fifo = FifoType.init(buf),
+ };
+ }
+ },
+ .Dynamic => struct {
+ pub fn init(base: InStreamType, allocator: *mem.Allocator) Self {
+ return .{
+ .base = base,
+ .fifo = FifoType.init(allocator),
+ };
+ }
+ },
+ };
+
+ pub fn putBackByte(self: *Self, byte: u8) !void {
+ try self.putBack(&[_]u8{byte});
+ }
+
+ pub fn putBack(self: *Self, bytes: []const u8) !void {
+ try self.fifo.unget(bytes);
+ }
+
+ pub fn read(self: *Self, dest: []u8) Error!usize {
+ // copy over anything putBack()'d
+ var dest_index = self.fifo.read(dest);
+ if (dest_index == dest.len) return dest_index;
+
+ // ask the backing stream for more
+ dest_index += try self.base.read(dest[dest_index..]);
+ return dest_index;
+ }
+
+ pub fn inStream(self: *Self) InStream {
+ return .{ .context = self };
+ }
+ };
+}
+
+pub fn peekStream(
+ comptime lookahead: comptime_int,
+ underlying_stream: var,
+) PeekStream(.{ .Static = lookahead }, @TypeOf(underlying_stream)) {
+ return PeekStream(.{ .Static = lookahead }, @TypeOf(underlying_stream)).init(underlying_stream);
+}
+
+test "PeekStream" {
+ const bytes = [_]u8{ 1, 2, 3, 4, 5, 6, 7, 8 };
+ var fbs = io.fixedBufferStream(&bytes);
+ var ps = peekStream(2, fbs.inStream());
+
+ var dest: [4]u8 = undefined;
+
+ try ps.putBackByte(9);
+ try ps.putBackByte(10);
+
+ var read = try ps.inStream().read(dest[0..4]);
+ testing.expect(read == 4);
+ testing.expect(dest[0] == 10);
+ testing.expect(dest[1] == 9);
+ testing.expect(mem.eql(u8, dest[2..4], bytes[0..2]));
+
+ read = try ps.inStream().read(dest[0..4]);
+ testing.expect(read == 4);
+ testing.expect(mem.eql(u8, dest[0..4], bytes[2..6]));
+
+ read = try ps.inStream().read(dest[0..4]);
+ testing.expect(read == 2);
+ testing.expect(mem.eql(u8, dest[0..2], bytes[6..8]));
+
+ try ps.putBackByte(11);
+ try ps.putBackByte(12);
+
+ read = try ps.inStream().read(dest[0..4]);
+ testing.expect(read == 2);
+ testing.expect(dest[0] == 12);
+ testing.expect(dest[1] == 11);
+}
lib/std/io/serialization.zig
@@ -0,0 +1,606 @@
+const std = @import("../std.zig");
+const builtin = std.builtin;
+const io = std.io;
+
+pub const Packing = enum {
+ /// Pack data to byte alignment
+ Byte,
+
+ /// Pack data to bit alignment
+ Bit,
+};
+
+/// Creates a deserializer that deserializes types from any stream.
+/// If `is_packed` is true, the data stream is treated as bit-packed,
+/// otherwise data is expected to be packed to the smallest byte.
+/// Types may implement a custom deserialization routine with a
+/// function named `deserialize` in the form of:
+/// pub fn deserialize(self: *Self, deserializer: var) !void
+/// which will be called when the deserializer is used to deserialize
+/// that type. It will pass a pointer to the type instance to deserialize
+/// into and a pointer to the deserializer struct.
+pub fn Deserializer(comptime endian: builtin.Endian, comptime packing: Packing, comptime InStreamType: type) type {
+ return struct {
+ in_stream: if (packing == .Bit) io.BitInStream(endian, InStreamType) else InStreamType,
+
+ const Self = @This();
+
+ pub fn init(in_stream: InStreamType) Self {
+ return Self{
+ .in_stream = switch (packing) {
+ .Bit => io.bitInStream(endian, in_stream),
+ .Byte => in_stream,
+ },
+ };
+ }
+
+ pub fn alignToByte(self: *Self) void {
+ if (packing == .Byte) return;
+ self.in_stream.alignToByte();
+ }
+
+ //@BUG: inferred error issue. See: #1386
+ fn deserializeInt(self: *Self, comptime T: type) (InStreamType.Error || error{EndOfStream})!T {
+ comptime assert(trait.is(.Int)(T) or trait.is(.Float)(T));
+
+ const u8_bit_count = 8;
+ const t_bit_count = comptime meta.bitCount(T);
+
+ const U = std.meta.IntType(false, t_bit_count);
+ const Log2U = math.Log2Int(U);
+ const int_size = (U.bit_count + 7) / 8;
+
+ if (packing == .Bit) {
+ const result = try self.in_stream.readBitsNoEof(U, t_bit_count);
+ return @bitCast(T, result);
+ }
+
+ var buffer: [int_size]u8 = undefined;
+ const read_size = try self.in_stream.read(buffer[0..]);
+ if (read_size < int_size) return error.EndOfStream;
+
+ if (int_size == 1) {
+ if (t_bit_count == 8) return @bitCast(T, buffer[0]);
+ const PossiblySignedByte = std.meta.IntType(T.is_signed, 8);
+ return @truncate(T, @bitCast(PossiblySignedByte, buffer[0]));
+ }
+
+ var result = @as(U, 0);
+ for (buffer) |byte, i| {
+ switch (endian) {
+ .Big => {
+ result = (result << u8_bit_count) | byte;
+ },
+ .Little => {
+ result |= @as(U, byte) << @intCast(Log2U, u8_bit_count * i);
+ },
+ }
+ }
+
+ return @bitCast(T, result);
+ }
+
+ /// Deserializes and returns data of the specified type from the stream
+ pub fn deserialize(self: *Self, comptime T: type) !T {
+ var value: T = undefined;
+ try self.deserializeInto(&value);
+ return value;
+ }
+
+ /// Deserializes data into the type pointed to by `ptr`
+ pub fn deserializeInto(self: *Self, ptr: var) !void {
+ const T = @TypeOf(ptr);
+ comptime assert(trait.is(.Pointer)(T));
+
+ if (comptime trait.isSlice(T) or comptime trait.isPtrTo(.Array)(T)) {
+ for (ptr) |*v|
+ try self.deserializeInto(v);
+ return;
+ }
+
+ comptime assert(trait.isSingleItemPtr(T));
+
+ const C = comptime meta.Child(T);
+ const child_type_id = @typeInfo(C);
+
+ //custom deserializer: fn(self: *Self, deserializer: var) !void
+ if (comptime trait.hasFn("deserialize")(C)) return C.deserialize(ptr, self);
+
+ if (comptime trait.isPacked(C) and packing != .Bit) {
+ var packed_deserializer = deserializer(endian, .Bit, self.in_stream);
+ return packed_deserializer.deserializeInto(ptr);
+ }
+
+ switch (child_type_id) {
+ .Void => return,
+ .Bool => ptr.* = (try self.deserializeInt(u1)) > 0,
+ .Float, .Int => ptr.* = try self.deserializeInt(C),
+ .Struct => {
+ const info = @typeInfo(C).Struct;
+
+ inline for (info.fields) |*field_info| {
+ const name = field_info.name;
+ const FieldType = field_info.field_type;
+
+ if (FieldType == void or FieldType == u0) continue;
+
+ //it doesn't make any sense to read pointers
+ if (comptime trait.is(.Pointer)(FieldType)) {
+ @compileError("Will not " ++ "read field " ++ name ++ " of struct " ++
+ @typeName(C) ++ " because it " ++ "is of pointer-type " ++
+ @typeName(FieldType) ++ ".");
+ }
+
+ try self.deserializeInto(&@field(ptr, name));
+ }
+ },
+ .Union => {
+ const info = @typeInfo(C).Union;
+ if (info.tag_type) |TagType| {
+ //we avoid duplicate iteration over the enum tags
+ // by getting the int directly and casting it without
+ // safety. If it is bad, it will be caught anyway.
+ const TagInt = @TagType(TagType);
+ const tag = try self.deserializeInt(TagInt);
+
+ inline for (info.fields) |field_info| {
+ if (field_info.enum_field.?.value == tag) {
+ const name = field_info.name;
+ const FieldType = field_info.field_type;
+ ptr.* = @unionInit(C, name, undefined);
+ try self.deserializeInto(&@field(ptr, name));
+ return;
+ }
+ }
+ //This is reachable if the enum data is bad
+ return error.InvalidEnumTag;
+ }
+ @compileError("Cannot meaningfully deserialize " ++ @typeName(C) ++
+ " because it is an untagged union. Use a custom deserialize().");
+ },
+ .Optional => {
+ const OC = comptime meta.Child(C);
+ const exists = (try self.deserializeInt(u1)) > 0;
+ if (!exists) {
+ ptr.* = null;
+ return;
+ }
+
+ ptr.* = @as(OC, undefined); //make it non-null so the following .? is guaranteed safe
+ const val_ptr = &ptr.*.?;
+ try self.deserializeInto(val_ptr);
+ },
+ .Enum => {
+ var value = try self.deserializeInt(@TagType(C));
+ ptr.* = try meta.intToEnum(C, value);
+ },
+ else => {
+ @compileError("Cannot deserialize " ++ @tagName(child_type_id) ++ " types (unimplemented).");
+ },
+ }
+ }
+ };
+}
+
+pub fn deserializer(
+ comptime endian: builtin.Endian,
+ comptime packing: Packing,
+ in_stream: var,
+) Deserializer(endian, packing, @TypeOf(in_stream)) {
+ return Deserializer(endian, packing, @TypeOf(in_stream)).init(in_stream);
+}
+
+/// Creates a serializer that serializes types to any stream.
+/// If `is_packed` is true, the data will be bit-packed into the stream.
+/// Note that the you must call `serializer.flush()` when you are done
+/// writing bit-packed data in order ensure any unwritten bits are committed.
+/// If `is_packed` is false, data is packed to the smallest byte. In the case
+/// of packed structs, the struct will written bit-packed and with the specified
+/// endianess, after which data will resume being written at the next byte boundary.
+/// Types may implement a custom serialization routine with a
+/// function named `serialize` in the form of:
+/// pub fn serialize(self: Self, serializer: var) !void
+/// which will be called when the serializer is used to serialize that type. It will
+/// pass a const pointer to the type instance to be serialized and a pointer
+/// to the serializer struct.
+pub fn Serializer(comptime endian: builtin.Endian, comptime packing: Packing, comptime OutStreamType: type) type {
+ return struct {
+ out_stream: if (packing == .Bit) BitOutStream(endian, OutStreamType) else OutStreamType,
+
+ const Self = @This();
+ pub const Error = OutStreamType.Error;
+
+ pub fn init(out_stream: OutStreamType) Self {
+ return Self{
+ .out_stream = switch (packing) {
+ .Bit => io.bitOutStream(endian, out_stream),
+ .Byte => out_stream,
+ },
+ };
+ }
+
+ /// Flushes any unwritten bits to the stream
+ pub fn flush(self: *Self) Error!void {
+ if (packing == .Bit) return self.out_stream.flushBits();
+ }
+
+ fn serializeInt(self: *Self, value: var) Error!void {
+ const T = @TypeOf(value);
+ comptime assert(trait.is(.Int)(T) or trait.is(.Float)(T));
+
+ const t_bit_count = comptime meta.bitCount(T);
+ const u8_bit_count = comptime meta.bitCount(u8);
+
+ const U = std.meta.IntType(false, t_bit_count);
+ const Log2U = math.Log2Int(U);
+ const int_size = (U.bit_count + 7) / 8;
+
+ const u_value = @bitCast(U, value);
+
+ if (packing == .Bit) return self.out_stream.writeBits(u_value, t_bit_count);
+
+ var buffer: [int_size]u8 = undefined;
+ if (int_size == 1) buffer[0] = u_value;
+
+ for (buffer) |*byte, i| {
+ const idx = switch (endian) {
+ .Big => int_size - i - 1,
+ .Little => i,
+ };
+ const shift = @intCast(Log2U, idx * u8_bit_count);
+ const v = u_value >> shift;
+ byte.* = if (t_bit_count < u8_bit_count) v else @truncate(u8, v);
+ }
+
+ try self.out_stream.write(&buffer);
+ }
+
+ /// Serializes the passed value into the stream
+ pub fn serialize(self: *Self, value: var) Error!void {
+ const T = comptime @TypeOf(value);
+
+ if (comptime trait.isIndexable(T)) {
+ for (value) |v|
+ try self.serialize(v);
+ return;
+ }
+
+ //custom serializer: fn(self: Self, serializer: var) !void
+ if (comptime trait.hasFn("serialize")(T)) return T.serialize(value, self);
+
+ if (comptime trait.isPacked(T) and packing != .Bit) {
+ var packed_serializer = Serializer(endian, .Bit, Error).init(self.out_stream);
+ try packed_serializer.serialize(value);
+ try packed_serializer.flush();
+ return;
+ }
+
+ switch (@typeInfo(T)) {
+ .Void => return,
+ .Bool => try self.serializeInt(@as(u1, @boolToInt(value))),
+ .Float, .Int => try self.serializeInt(value),
+ .Struct => {
+ const info = @typeInfo(T);
+
+ inline for (info.Struct.fields) |*field_info| {
+ const name = field_info.name;
+ const FieldType = field_info.field_type;
+
+ if (FieldType == void or FieldType == u0) continue;
+
+ //It doesn't make sense to write pointers
+ if (comptime trait.is(.Pointer)(FieldType)) {
+ @compileError("Will not " ++ "serialize field " ++ name ++
+ " of struct " ++ @typeName(T) ++ " because it " ++
+ "is of pointer-type " ++ @typeName(FieldType) ++ ".");
+ }
+ try self.serialize(@field(value, name));
+ }
+ },
+ .Union => {
+ const info = @typeInfo(T).Union;
+ if (info.tag_type) |TagType| {
+ const active_tag = meta.activeTag(value);
+ try self.serialize(active_tag);
+ //This inline loop is necessary because active_tag is a runtime
+ // value, but @field requires a comptime value. Our alternative
+ // is to check each field for a match
+ inline for (info.fields) |field_info| {
+ if (field_info.enum_field.?.value == @enumToInt(active_tag)) {
+ const name = field_info.name;
+ const FieldType = field_info.field_type;
+ try self.serialize(@field(value, name));
+ return;
+ }
+ }
+ unreachable;
+ }
+ @compileError("Cannot meaningfully serialize " ++ @typeName(T) ++
+ " because it is an untagged union. Use a custom serialize().");
+ },
+ .Optional => {
+ if (value == null) {
+ try self.serializeInt(@as(u1, @boolToInt(false)));
+ return;
+ }
+ try self.serializeInt(@as(u1, @boolToInt(true)));
+
+ const OC = comptime meta.Child(T);
+ const val_ptr = &value.?;
+ try self.serialize(val_ptr.*);
+ },
+ .Enum => {
+ try self.serializeInt(@enumToInt(value));
+ },
+ else => @compileError("Cannot serialize " ++ @tagName(@typeInfo(T)) ++ " types (unimplemented)."),
+ }
+ }
+ };
+}
+
+pub fn serializer(
+ comptime endian: builtin.Endian,
+ comptime packing: Packing,
+ out_stream: var,
+) Serializer(endian, packing, @TypeOf(out_stream)) {
+ return Serializer(endian, packing, @TypeOf(out_stream)).init(out_stream);
+}
+
+fn testIntSerializerDeserializer(comptime endian: builtin.Endian, comptime packing: io.Packing) !void {
+ @setEvalBranchQuota(1500);
+ //@NOTE: if this test is taking too long, reduce the maximum tested bitsize
+ const max_test_bitsize = 128;
+
+ const total_bytes = comptime blk: {
+ var bytes = 0;
+ comptime var i = 0;
+ while (i <= max_test_bitsize) : (i += 1) bytes += (i / 8) + @boolToInt(i % 8 > 0);
+ break :blk bytes * 2;
+ };
+
+ var data_mem: [total_bytes]u8 = undefined;
+ var out = io.fixedBufferStream(&data_mem);
+ var serializer = serializer(endian, packing, out.outStream());
+
+ var in = io.fixedBufferStream(&data_mem);
+ var deserializer = Deserializer(endian, packing, in.inStream());
+
+ comptime var i = 0;
+ inline while (i <= max_test_bitsize) : (i += 1) {
+ const U = std.meta.IntType(false, i);
+ const S = std.meta.IntType(true, i);
+ try serializer.serializeInt(@as(U, i));
+ if (i != 0) try serializer.serializeInt(@as(S, -1)) else try serializer.serialize(@as(S, 0));
+ }
+ try serializer.flush();
+
+ i = 0;
+ inline while (i <= max_test_bitsize) : (i += 1) {
+ const U = std.meta.IntType(false, i);
+ const S = std.meta.IntType(true, i);
+ const x = try deserializer.deserializeInt(U);
+ const y = try deserializer.deserializeInt(S);
+ expect(x == @as(U, i));
+ if (i != 0) expect(y == @as(S, -1)) else expect(y == 0);
+ }
+
+ const u8_bit_count = comptime meta.bitCount(u8);
+ //0 + 1 + 2 + ... n = (n * (n + 1)) / 2
+ //and we have each for unsigned and signed, so * 2
+ const total_bits = (max_test_bitsize * (max_test_bitsize + 1));
+ const extra_packed_byte = @boolToInt(total_bits % u8_bit_count > 0);
+ const total_packed_bytes = (total_bits / u8_bit_count) + extra_packed_byte;
+
+ expect(in.pos == if (packing == .Bit) total_packed_bytes else total_bytes);
+
+ //Verify that empty error set works with serializer.
+ //deserializer is covered by FixedBufferStream
+ var null_serializer = io.serializer(endian, packing, std.io.null_out_stream);
+ try null_serializer.serialize(data_mem[0..]);
+ try null_serializer.flush();
+}
+
+test "Serializer/Deserializer Int" {
+ try testIntSerializerDeserializer(.Big, .Byte);
+ try testIntSerializerDeserializer(.Little, .Byte);
+ // TODO these tests are disabled due to tripping an LLVM assertion
+ // https://github.com/ziglang/zig/issues/2019
+ //try testIntSerializerDeserializer(builtin.Endian.Big, true);
+ //try testIntSerializerDeserializer(builtin.Endian.Little, true);
+}
+
+fn testIntSerializerDeserializerInfNaN(
+ comptime endian: builtin.Endian,
+ comptime packing: io.Packing,
+) !void {
+ const mem_size = (16 * 2 + 32 * 2 + 64 * 2 + 128 * 2) / comptime meta.bitCount(u8);
+ var data_mem: [mem_size]u8 = undefined;
+
+ var out = io.fixedBufferStream(&data_mem);
+ var serializer = serializer(endian, packing, out.outStream());
+
+ var in = io.fixedBufferStream(&data_mem);
+ var deserializer = deserializer(endian, packing, in.inStream());
+
+ //@TODO: isInf/isNan not currently implemented for f128.
+ try serializer.serialize(std.math.nan(f16));
+ try serializer.serialize(std.math.inf(f16));
+ try serializer.serialize(std.math.nan(f32));
+ try serializer.serialize(std.math.inf(f32));
+ try serializer.serialize(std.math.nan(f64));
+ try serializer.serialize(std.math.inf(f64));
+ //try serializer.serialize(std.math.nan(f128));
+ //try serializer.serialize(std.math.inf(f128));
+ const nan_check_f16 = try deserializer.deserialize(f16);
+ const inf_check_f16 = try deserializer.deserialize(f16);
+ const nan_check_f32 = try deserializer.deserialize(f32);
+ deserializer.alignToByte();
+ const inf_check_f32 = try deserializer.deserialize(f32);
+ const nan_check_f64 = try deserializer.deserialize(f64);
+ const inf_check_f64 = try deserializer.deserialize(f64);
+ //const nan_check_f128 = try deserializer.deserialize(f128);
+ //const inf_check_f128 = try deserializer.deserialize(f128);
+ expect(std.math.isNan(nan_check_f16));
+ expect(std.math.isInf(inf_check_f16));
+ expect(std.math.isNan(nan_check_f32));
+ expect(std.math.isInf(inf_check_f32));
+ expect(std.math.isNan(nan_check_f64));
+ expect(std.math.isInf(inf_check_f64));
+ //expect(std.math.isNan(nan_check_f128));
+ //expect(std.math.isInf(inf_check_f128));
+}
+
+test "Serializer/Deserializer Int: Inf/NaN" {
+ try testIntSerializerDeserializerInfNaN(.Big, .Byte);
+ try testIntSerializerDeserializerInfNaN(.Little, .Byte);
+ try testIntSerializerDeserializerInfNaN(.Big, .Bit);
+ try testIntSerializerDeserializerInfNaN(.Little, .Bit);
+}
+
+fn testAlternateSerializer(self: var, serializer: var) !void {
+ try serializer.serialize(self.f_f16);
+}
+
+fn testSerializerDeserializer(comptime endian: builtin.Endian, comptime packing: io.Packing) !void {
+ const ColorType = enum(u4) {
+ RGB8 = 1,
+ RA16 = 2,
+ R32 = 3,
+ };
+
+ const TagAlign = union(enum(u32)) {
+ A: u8,
+ B: u8,
+ C: u8,
+ };
+
+ const Color = union(ColorType) {
+ RGB8: struct {
+ r: u8,
+ g: u8,
+ b: u8,
+ a: u8,
+ },
+ RA16: struct {
+ r: u16,
+ a: u16,
+ },
+ R32: u32,
+ };
+
+ const PackedStruct = packed struct {
+ f_i3: i3,
+ f_u2: u2,
+ };
+
+ //to test custom serialization
+ const Custom = struct {
+ f_f16: f16,
+ f_unused_u32: u32,
+
+ pub fn deserialize(self: *@This(), deserializer: var) !void {
+ try deserializer.deserializeInto(&self.f_f16);
+ self.f_unused_u32 = 47;
+ }
+
+ pub const serialize = testAlternateSerializer;
+ };
+
+ const MyStruct = struct {
+ f_i3: i3,
+ f_u8: u8,
+ f_tag_align: TagAlign,
+ f_u24: u24,
+ f_i19: i19,
+ f_void: void,
+ f_f32: f32,
+ f_f128: f128,
+ f_packed_0: PackedStruct,
+ f_i7arr: [10]i7,
+ f_of64n: ?f64,
+ f_of64v: ?f64,
+ f_color_type: ColorType,
+ f_packed_1: PackedStruct,
+ f_custom: Custom,
+ f_color: Color,
+ };
+
+ const my_inst = MyStruct{
+ .f_i3 = -1,
+ .f_u8 = 8,
+ .f_tag_align = TagAlign{ .B = 148 },
+ .f_u24 = 24,
+ .f_i19 = 19,
+ .f_void = {},
+ .f_f32 = 32.32,
+ .f_f128 = 128.128,
+ .f_packed_0 = PackedStruct{ .f_i3 = -1, .f_u2 = 2 },
+ .f_i7arr = [10]i7{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 },
+ .f_of64n = null,
+ .f_of64v = 64.64,
+ .f_color_type = ColorType.R32,
+ .f_packed_1 = PackedStruct{ .f_i3 = 1, .f_u2 = 1 },
+ .f_custom = Custom{ .f_f16 = 38.63, .f_unused_u32 = 47 },
+ .f_color = Color{ .R32 = 123822 },
+ };
+
+ var data_mem: [@sizeOf(MyStruct)]u8 = undefined;
+ var out = io.fixedBufferStream(&data_mem);
+ var serializer = serializer(endian, packing, out.outStream());
+
+ var in = io.fixedBufferStream(&data_mem);
+ var deserializer = deserializer(endian, packing, in.inStream());
+
+ try serializer.serialize(my_inst);
+
+ const my_copy = try deserializer.deserialize(MyStruct);
+ expect(meta.eql(my_copy, my_inst));
+}
+
+test "Serializer/Deserializer generic" {
+ if (std.Target.current.os.tag == .windows) {
+ // TODO https://github.com/ziglang/zig/issues/508
+ return error.SkipZigTest;
+ }
+ try testSerializerDeserializer(builtin.Endian.Big, .Byte);
+ try testSerializerDeserializer(builtin.Endian.Little, .Byte);
+ try testSerializerDeserializer(builtin.Endian.Big, .Bit);
+ try testSerializerDeserializer(builtin.Endian.Little, .Bit);
+}
+
+fn testBadData(comptime endian: builtin.Endian, comptime packing: io.Packing) !void {
+ const E = enum(u14) {
+ One = 1,
+ Two = 2,
+ };
+
+ const A = struct {
+ e: E,
+ };
+
+ const C = union(E) {
+ One: u14,
+ Two: f16,
+ };
+
+ var data_mem: [4]u8 = undefined;
+ var out = io.fixedBufferStream.init(&data_mem);
+ var serializer = serializer(endian, packing, out.outStream());
+
+ var in = io.fixedBufferStream(&data_mem);
+ var deserializer = deserializer(endian, packing, in.inStream());
+
+ try serializer.serialize(@as(u14, 3));
+ expectError(error.InvalidEnumTag, deserializer.deserialize(A));
+ out.pos = 0;
+ try serializer.serialize(@as(u14, 3));
+ try serializer.serialize(@as(u14, 88));
+ expectError(error.InvalidEnumTag, deserializer.deserialize(C));
+}
+
+test "Deserializer bad data" {
+ try testBadData(.Big, .Byte);
+ try testBadData(.Little, .Byte);
+ try testBadData(.Big, .Bit);
+ try testBadData(.Little, .Bit);
+}
lib/std/io/test.zig
@@ -22,11 +22,10 @@ test "write a file, read it, then delete it" {
var file = try cwd.createFile(tmp_file_name, .{});
defer file.close();
- var file_out_stream = file.outStream();
- var buf_stream = io.BufferedOutStream(File.WriteError).init(&file_out_stream.stream);
- const st = &buf_stream.stream;
+ var buf_stream = io.bufferedOutStream(file.outStream());
+ const st = buf_stream.outStream();
try st.print("begin", .{});
- try st.write(data[0..]);
+ try st.writeAll(data[0..]);
try st.print("end", .{});
try buf_stream.flush();
}
@@ -48,9 +47,8 @@ test "write a file, read it, then delete it" {
const expected_file_size: u64 = "begin".len + data.len + "end".len;
expectEqual(expected_file_size, file_size);
- var file_in_stream = file.inStream();
- var buf_stream = io.BufferedInStream(File.ReadError).init(&file_in_stream.stream);
- const st = &buf_stream.stream;
+ var buf_stream = io.bufferedInStream(file.inStream());
+ const st = buf_stream.inStream();
const contents = try st.readAllAlloc(std.testing.allocator, 2 * 1024);
defer std.testing.allocator.free(contents);
@@ -61,224 +59,13 @@ test "write a file, read it, then delete it" {
try cwd.deleteFile(tmp_file_name);
}
-test "BufferOutStream" {
- var buffer = try std.Buffer.initSize(std.testing.allocator, 0);
- defer buffer.deinit();
- var buf_stream = &std.io.BufferOutStream.init(&buffer).stream;
-
- const x: i32 = 42;
- const y: i32 = 1234;
- try buf_stream.print("x: {}\ny: {}\n", .{ x, y });
-
- expect(mem.eql(u8, buffer.toSlice(), "x: 42\ny: 1234\n"));
-}
-
-test "SliceInStream" {
- const bytes = [_]u8{ 1, 2, 3, 4, 5, 6, 7 };
- var ss = io.SliceInStream.init(&bytes);
-
- var dest: [4]u8 = undefined;
-
- var read = try ss.stream.read(dest[0..4]);
- expect(read == 4);
- expect(mem.eql(u8, dest[0..4], bytes[0..4]));
-
- read = try ss.stream.read(dest[0..4]);
- expect(read == 3);
- expect(mem.eql(u8, dest[0..3], bytes[4..7]));
-
- read = try ss.stream.read(dest[0..4]);
- expect(read == 0);
-}
-
-test "PeekStream" {
- const bytes = [_]u8{ 1, 2, 3, 4, 5, 6, 7, 8 };
- var ss = io.SliceInStream.init(&bytes);
- var ps = io.PeekStream(.{ .Static = 2 }, io.SliceInStream.Error).init(&ss.stream);
-
- var dest: [4]u8 = undefined;
-
- try ps.putBackByte(9);
- try ps.putBackByte(10);
-
- var read = try ps.stream.read(dest[0..4]);
- expect(read == 4);
- expect(dest[0] == 10);
- expect(dest[1] == 9);
- expect(mem.eql(u8, dest[2..4], bytes[0..2]));
-
- read = try ps.stream.read(dest[0..4]);
- expect(read == 4);
- expect(mem.eql(u8, dest[0..4], bytes[2..6]));
-
- read = try ps.stream.read(dest[0..4]);
- expect(read == 2);
- expect(mem.eql(u8, dest[0..2], bytes[6..8]));
-
- try ps.putBackByte(11);
- try ps.putBackByte(12);
-
- read = try ps.stream.read(dest[0..4]);
- expect(read == 2);
- expect(dest[0] == 12);
- expect(dest[1] == 11);
-}
-
-test "SliceOutStream" {
- var buffer: [10]u8 = undefined;
- var ss = io.SliceOutStream.init(buffer[0..]);
-
- try ss.stream.write("Hello");
- expect(mem.eql(u8, ss.getWritten(), "Hello"));
-
- try ss.stream.write("world");
- expect(mem.eql(u8, ss.getWritten(), "Helloworld"));
-
- expectError(error.OutOfMemory, ss.stream.write("!"));
- expect(mem.eql(u8, ss.getWritten(), "Helloworld"));
-
- ss.reset();
- expect(ss.getWritten().len == 0);
-
- expectError(error.OutOfMemory, ss.stream.write("Hello world!"));
- expect(mem.eql(u8, ss.getWritten(), "Hello worl"));
-}
-
-test "BitInStream" {
- const mem_be = [_]u8{ 0b11001101, 0b00001011 };
- const mem_le = [_]u8{ 0b00011101, 0b10010101 };
-
- var mem_in_be = io.SliceInStream.init(mem_be[0..]);
- const InError = io.SliceInStream.Error;
- var bit_stream_be = io.BitInStream(builtin.Endian.Big, InError).init(&mem_in_be.stream);
-
- var out_bits: usize = undefined;
-
- expect(1 == try bit_stream_be.readBits(u2, 1, &out_bits));
- expect(out_bits == 1);
- expect(2 == try bit_stream_be.readBits(u5, 2, &out_bits));
- expect(out_bits == 2);
- expect(3 == try bit_stream_be.readBits(u128, 3, &out_bits));
- expect(out_bits == 3);
- expect(4 == try bit_stream_be.readBits(u8, 4, &out_bits));
- expect(out_bits == 4);
- expect(5 == try bit_stream_be.readBits(u9, 5, &out_bits));
- expect(out_bits == 5);
- expect(1 == try bit_stream_be.readBits(u1, 1, &out_bits));
- expect(out_bits == 1);
-
- mem_in_be.pos = 0;
- bit_stream_be.bit_count = 0;
- expect(0b110011010000101 == try bit_stream_be.readBits(u15, 15, &out_bits));
- expect(out_bits == 15);
-
- mem_in_be.pos = 0;
- bit_stream_be.bit_count = 0;
- expect(0b1100110100001011 == try bit_stream_be.readBits(u16, 16, &out_bits));
- expect(out_bits == 16);
-
- _ = try bit_stream_be.readBits(u0, 0, &out_bits);
-
- expect(0 == try bit_stream_be.readBits(u1, 1, &out_bits));
- expect(out_bits == 0);
- expectError(error.EndOfStream, bit_stream_be.readBitsNoEof(u1, 1));
-
- var mem_in_le = io.SliceInStream.init(mem_le[0..]);
- var bit_stream_le = io.BitInStream(builtin.Endian.Little, InError).init(&mem_in_le.stream);
-
- expect(1 == try bit_stream_le.readBits(u2, 1, &out_bits));
- expect(out_bits == 1);
- expect(2 == try bit_stream_le.readBits(u5, 2, &out_bits));
- expect(out_bits == 2);
- expect(3 == try bit_stream_le.readBits(u128, 3, &out_bits));
- expect(out_bits == 3);
- expect(4 == try bit_stream_le.readBits(u8, 4, &out_bits));
- expect(out_bits == 4);
- expect(5 == try bit_stream_le.readBits(u9, 5, &out_bits));
- expect(out_bits == 5);
- expect(1 == try bit_stream_le.readBits(u1, 1, &out_bits));
- expect(out_bits == 1);
-
- mem_in_le.pos = 0;
- bit_stream_le.bit_count = 0;
- expect(0b001010100011101 == try bit_stream_le.readBits(u15, 15, &out_bits));
- expect(out_bits == 15);
-
- mem_in_le.pos = 0;
- bit_stream_le.bit_count = 0;
- expect(0b1001010100011101 == try bit_stream_le.readBits(u16, 16, &out_bits));
- expect(out_bits == 16);
-
- _ = try bit_stream_le.readBits(u0, 0, &out_bits);
-
- expect(0 == try bit_stream_le.readBits(u1, 1, &out_bits));
- expect(out_bits == 0);
- expectError(error.EndOfStream, bit_stream_le.readBitsNoEof(u1, 1));
-}
-
-test "BitOutStream" {
- var mem_be = [_]u8{0} ** 2;
- var mem_le = [_]u8{0} ** 2;
-
- var mem_out_be = io.SliceOutStream.init(mem_be[0..]);
- const OutError = io.SliceOutStream.Error;
- var bit_stream_be = io.BitOutStream(builtin.Endian.Big, OutError).init(&mem_out_be.stream);
-
- try bit_stream_be.writeBits(@as(u2, 1), 1);
- try bit_stream_be.writeBits(@as(u5, 2), 2);
- try bit_stream_be.writeBits(@as(u128, 3), 3);
- try bit_stream_be.writeBits(@as(u8, 4), 4);
- try bit_stream_be.writeBits(@as(u9, 5), 5);
- try bit_stream_be.writeBits(@as(u1, 1), 1);
-
- expect(mem_be[0] == 0b11001101 and mem_be[1] == 0b00001011);
-
- mem_out_be.pos = 0;
-
- try bit_stream_be.writeBits(@as(u15, 0b110011010000101), 15);
- try bit_stream_be.flushBits();
- expect(mem_be[0] == 0b11001101 and mem_be[1] == 0b00001010);
-
- mem_out_be.pos = 0;
- try bit_stream_be.writeBits(@as(u32, 0b110011010000101), 16);
- expect(mem_be[0] == 0b01100110 and mem_be[1] == 0b10000101);
-
- try bit_stream_be.writeBits(@as(u0, 0), 0);
-
- var mem_out_le = io.SliceOutStream.init(mem_le[0..]);
- var bit_stream_le = io.BitOutStream(builtin.Endian.Little, OutError).init(&mem_out_le.stream);
-
- try bit_stream_le.writeBits(@as(u2, 1), 1);
- try bit_stream_le.writeBits(@as(u5, 2), 2);
- try bit_stream_le.writeBits(@as(u128, 3), 3);
- try bit_stream_le.writeBits(@as(u8, 4), 4);
- try bit_stream_le.writeBits(@as(u9, 5), 5);
- try bit_stream_le.writeBits(@as(u1, 1), 1);
-
- expect(mem_le[0] == 0b00011101 and mem_le[1] == 0b10010101);
-
- mem_out_le.pos = 0;
- try bit_stream_le.writeBits(@as(u15, 0b110011010000101), 15);
- try bit_stream_le.flushBits();
- expect(mem_le[0] == 0b10000101 and mem_le[1] == 0b01100110);
-
- mem_out_le.pos = 0;
- try bit_stream_le.writeBits(@as(u32, 0b1100110100001011), 16);
- expect(mem_le[0] == 0b00001011 and mem_le[1] == 0b11001101);
-
- try bit_stream_le.writeBits(@as(u0, 0), 0);
-}
-
test "BitStreams with File Stream" {
const tmp_file_name = "temp_test_file.txt";
{
var file = try fs.cwd().createFile(tmp_file_name, .{});
defer file.close();
- var file_out = file.outStream();
- var file_out_stream = &file_out.stream;
- const OutError = File.WriteError;
- var bit_stream = io.BitOutStream(builtin.endian, OutError).init(file_out_stream);
+ var bit_stream = io.bitOutStream(builtin.endian, file.outStream());
try bit_stream.writeBits(@as(u2, 1), 1);
try bit_stream.writeBits(@as(u5, 2), 2);
@@ -292,10 +79,7 @@ test "BitStreams with File Stream" {
var file = try fs.cwd().openFile(tmp_file_name, .{});
defer file.close();
- var file_in = file.inStream();
- var file_in_stream = &file_in.stream;
- const InError = File.ReadError;
- var bit_stream = io.BitInStream(builtin.endian, InError).init(file_in_stream);
+ var bit_stream = io.bitInStream(builtin.endian, file.inStream());
var out_bits: usize = undefined;
@@ -317,298 +101,6 @@ test "BitStreams with File Stream" {
try fs.cwd().deleteFile(tmp_file_name);
}
-fn testIntSerializerDeserializer(comptime endian: builtin.Endian, comptime packing: io.Packing) !void {
- @setEvalBranchQuota(1500);
- //@NOTE: if this test is taking too long, reduce the maximum tested bitsize
- const max_test_bitsize = 128;
-
- const total_bytes = comptime blk: {
- var bytes = 0;
- comptime var i = 0;
- while (i <= max_test_bitsize) : (i += 1) bytes += (i / 8) + @boolToInt(i % 8 > 0);
- break :blk bytes * 2;
- };
-
- var data_mem: [total_bytes]u8 = undefined;
- var out = io.SliceOutStream.init(data_mem[0..]);
- const OutError = io.SliceOutStream.Error;
- var out_stream = &out.stream;
- var serializer = io.Serializer(endian, packing, OutError).init(out_stream);
-
- var in = io.SliceInStream.init(data_mem[0..]);
- const InError = io.SliceInStream.Error;
- var in_stream = &in.stream;
- var deserializer = io.Deserializer(endian, packing, InError).init(in_stream);
-
- comptime var i = 0;
- inline while (i <= max_test_bitsize) : (i += 1) {
- const U = std.meta.IntType(false, i);
- const S = std.meta.IntType(true, i);
- try serializer.serializeInt(@as(U, i));
- if (i != 0) try serializer.serializeInt(@as(S, -1)) else try serializer.serialize(@as(S, 0));
- }
- try serializer.flush();
-
- i = 0;
- inline while (i <= max_test_bitsize) : (i += 1) {
- const U = std.meta.IntType(false, i);
- const S = std.meta.IntType(true, i);
- const x = try deserializer.deserializeInt(U);
- const y = try deserializer.deserializeInt(S);
- expect(x == @as(U, i));
- if (i != 0) expect(y == @as(S, -1)) else expect(y == 0);
- }
-
- const u8_bit_count = comptime meta.bitCount(u8);
- //0 + 1 + 2 + ... n = (n * (n + 1)) / 2
- //and we have each for unsigned and signed, so * 2
- const total_bits = (max_test_bitsize * (max_test_bitsize + 1));
- const extra_packed_byte = @boolToInt(total_bits % u8_bit_count > 0);
- const total_packed_bytes = (total_bits / u8_bit_count) + extra_packed_byte;
-
- expect(in.pos == if (packing == .Bit) total_packed_bytes else total_bytes);
-
- //Verify that empty error set works with serializer.
- //deserializer is covered by SliceInStream
- const NullError = io.NullOutStream.Error;
- var null_out = io.NullOutStream.init();
- var null_out_stream = &null_out.stream;
- var null_serializer = io.Serializer(endian, packing, NullError).init(null_out_stream);
- try null_serializer.serialize(data_mem[0..]);
- try null_serializer.flush();
-}
-
-test "Serializer/Deserializer Int" {
- try testIntSerializerDeserializer(.Big, .Byte);
- try testIntSerializerDeserializer(.Little, .Byte);
- // TODO these tests are disabled due to tripping an LLVM assertion
- // https://github.com/ziglang/zig/issues/2019
- //try testIntSerializerDeserializer(builtin.Endian.Big, true);
- //try testIntSerializerDeserializer(builtin.Endian.Little, true);
-}
-
-fn testIntSerializerDeserializerInfNaN(
- comptime endian: builtin.Endian,
- comptime packing: io.Packing,
-) !void {
- const mem_size = (16 * 2 + 32 * 2 + 64 * 2 + 128 * 2) / comptime meta.bitCount(u8);
- var data_mem: [mem_size]u8 = undefined;
-
- var out = io.SliceOutStream.init(data_mem[0..]);
- const OutError = io.SliceOutStream.Error;
- var out_stream = &out.stream;
- var serializer = io.Serializer(endian, packing, OutError).init(out_stream);
-
- var in = io.SliceInStream.init(data_mem[0..]);
- const InError = io.SliceInStream.Error;
- var in_stream = &in.stream;
- var deserializer = io.Deserializer(endian, packing, InError).init(in_stream);
-
- //@TODO: isInf/isNan not currently implemented for f128.
- try serializer.serialize(std.math.nan(f16));
- try serializer.serialize(std.math.inf(f16));
- try serializer.serialize(std.math.nan(f32));
- try serializer.serialize(std.math.inf(f32));
- try serializer.serialize(std.math.nan(f64));
- try serializer.serialize(std.math.inf(f64));
- //try serializer.serialize(std.math.nan(f128));
- //try serializer.serialize(std.math.inf(f128));
- const nan_check_f16 = try deserializer.deserialize(f16);
- const inf_check_f16 = try deserializer.deserialize(f16);
- const nan_check_f32 = try deserializer.deserialize(f32);
- deserializer.alignToByte();
- const inf_check_f32 = try deserializer.deserialize(f32);
- const nan_check_f64 = try deserializer.deserialize(f64);
- const inf_check_f64 = try deserializer.deserialize(f64);
- //const nan_check_f128 = try deserializer.deserialize(f128);
- //const inf_check_f128 = try deserializer.deserialize(f128);
- expect(std.math.isNan(nan_check_f16));
- expect(std.math.isInf(inf_check_f16));
- expect(std.math.isNan(nan_check_f32));
- expect(std.math.isInf(inf_check_f32));
- expect(std.math.isNan(nan_check_f64));
- expect(std.math.isInf(inf_check_f64));
- //expect(std.math.isNan(nan_check_f128));
- //expect(std.math.isInf(inf_check_f128));
-}
-
-test "Serializer/Deserializer Int: Inf/NaN" {
- try testIntSerializerDeserializerInfNaN(.Big, .Byte);
- try testIntSerializerDeserializerInfNaN(.Little, .Byte);
- try testIntSerializerDeserializerInfNaN(.Big, .Bit);
- try testIntSerializerDeserializerInfNaN(.Little, .Bit);
-}
-
-fn testAlternateSerializer(self: var, serializer: var) !void {
- try serializer.serialize(self.f_f16);
-}
-
-fn testSerializerDeserializer(comptime endian: builtin.Endian, comptime packing: io.Packing) !void {
- const ColorType = enum(u4) {
- RGB8 = 1,
- RA16 = 2,
- R32 = 3,
- };
-
- const TagAlign = union(enum(u32)) {
- A: u8,
- B: u8,
- C: u8,
- };
-
- const Color = union(ColorType) {
- RGB8: struct {
- r: u8,
- g: u8,
- b: u8,
- a: u8,
- },
- RA16: struct {
- r: u16,
- a: u16,
- },
- R32: u32,
- };
-
- const PackedStruct = packed struct {
- f_i3: i3,
- f_u2: u2,
- };
-
- //to test custom serialization
- const Custom = struct {
- f_f16: f16,
- f_unused_u32: u32,
-
- pub fn deserialize(self: *@This(), deserializer: var) !void {
- try deserializer.deserializeInto(&self.f_f16);
- self.f_unused_u32 = 47;
- }
-
- pub const serialize = testAlternateSerializer;
- };
-
- const MyStruct = struct {
- f_i3: i3,
- f_u8: u8,
- f_tag_align: TagAlign,
- f_u24: u24,
- f_i19: i19,
- f_void: void,
- f_f32: f32,
- f_f128: f128,
- f_packed_0: PackedStruct,
- f_i7arr: [10]i7,
- f_of64n: ?f64,
- f_of64v: ?f64,
- f_color_type: ColorType,
- f_packed_1: PackedStruct,
- f_custom: Custom,
- f_color: Color,
- };
-
- const my_inst = MyStruct{
- .f_i3 = -1,
- .f_u8 = 8,
- .f_tag_align = TagAlign{ .B = 148 },
- .f_u24 = 24,
- .f_i19 = 19,
- .f_void = {},
- .f_f32 = 32.32,
- .f_f128 = 128.128,
- .f_packed_0 = PackedStruct{ .f_i3 = -1, .f_u2 = 2 },
- .f_i7arr = [10]i7{ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 },
- .f_of64n = null,
- .f_of64v = 64.64,
- .f_color_type = ColorType.R32,
- .f_packed_1 = PackedStruct{ .f_i3 = 1, .f_u2 = 1 },
- .f_custom = Custom{ .f_f16 = 38.63, .f_unused_u32 = 47 },
- .f_color = Color{ .R32 = 123822 },
- };
-
- var data_mem: [@sizeOf(MyStruct)]u8 = undefined;
- var out = io.SliceOutStream.init(data_mem[0..]);
- const OutError = io.SliceOutStream.Error;
- var out_stream = &out.stream;
- var serializer = io.Serializer(endian, packing, OutError).init(out_stream);
-
- var in = io.SliceInStream.init(data_mem[0..]);
- const InError = io.SliceInStream.Error;
- var in_stream = &in.stream;
- var deserializer = io.Deserializer(endian, packing, InError).init(in_stream);
-
- try serializer.serialize(my_inst);
-
- const my_copy = try deserializer.deserialize(MyStruct);
- expect(meta.eql(my_copy, my_inst));
-}
-
-test "Serializer/Deserializer generic" {
- if (std.Target.current.os.tag == .windows) {
- // TODO https://github.com/ziglang/zig/issues/508
- return error.SkipZigTest;
- }
- try testSerializerDeserializer(builtin.Endian.Big, .Byte);
- try testSerializerDeserializer(builtin.Endian.Little, .Byte);
- try testSerializerDeserializer(builtin.Endian.Big, .Bit);
- try testSerializerDeserializer(builtin.Endian.Little, .Bit);
-}
-
-fn testBadData(comptime endian: builtin.Endian, comptime packing: io.Packing) !void {
- const E = enum(u14) {
- One = 1,
- Two = 2,
- };
-
- const A = struct {
- e: E,
- };
-
- const C = union(E) {
- One: u14,
- Two: f16,
- };
-
- var data_mem: [4]u8 = undefined;
- var out = io.SliceOutStream.init(data_mem[0..]);
- const OutError = io.SliceOutStream.Error;
- var out_stream = &out.stream;
- var serializer = io.Serializer(endian, packing, OutError).init(out_stream);
-
- var in = io.SliceInStream.init(data_mem[0..]);
- const InError = io.SliceInStream.Error;
- var in_stream = &in.stream;
- var deserializer = io.Deserializer(endian, packing, InError).init(in_stream);
-
- try serializer.serialize(@as(u14, 3));
- expectError(error.InvalidEnumTag, deserializer.deserialize(A));
- out.pos = 0;
- try serializer.serialize(@as(u14, 3));
- try serializer.serialize(@as(u14, 88));
- expectError(error.InvalidEnumTag, deserializer.deserialize(C));
-}
-
-test "Deserializer bad data" {
- try testBadData(.Big, .Byte);
- try testBadData(.Little, .Byte);
- try testBadData(.Big, .Bit);
- try testBadData(.Little, .Bit);
-}
-
-test "c out stream" {
- if (!builtin.link_libc) return error.SkipZigTest;
-
- const filename = "tmp_io_test_file.txt";
- const out_file = std.c.fopen(filename, "w") orelse return error.UnableToOpenTestFile;
- defer {
- _ = std.c.fclose(out_file);
- fs.cwd().deleteFileC(filename) catch {};
- }
-
- const out_stream = &io.COutStream.init(out_file).stream;
- try out_stream.print("hi: {}\n", .{@as(i32, 123)});
-}
-
test "File seek ops" {
const tmp_file_name = "temp_test_file.txt";
var file = try fs.cwd().createFile(tmp_file_name, .{});
@@ -621,16 +113,16 @@ test "File seek ops" {
// Seek to the end
try file.seekFromEnd(0);
- std.testing.expect((try file.getPos()) == try file.getEndPos());
+ expect((try file.getPos()) == try file.getEndPos());
// Negative delta
try file.seekBy(-4096);
- std.testing.expect((try file.getPos()) == 4096);
+ expect((try file.getPos()) == 4096);
// Positive delta
try file.seekBy(10);
- std.testing.expect((try file.getPos()) == 4106);
+ expect((try file.getPos()) == 4106);
// Absolute position
try file.seekTo(1234);
- std.testing.expect((try file.getPos()) == 1234);
+ expect((try file.getPos()) == 1234);
}
test "updateTimes" {
@@ -647,6 +139,6 @@ test "updateTimes" {
stat_old.mtime - 5 * std.time.ns_per_s,
);
var stat_new = try file.stat();
- std.testing.expect(stat_new.atime < stat_old.atime);
- std.testing.expect(stat_new.mtime < stat_old.mtime);
+ expect(stat_new.atime < stat_old.atime);
+ expect(stat_new.mtime < stat_old.mtime);
}
lib/std/json/write_stream.zig
@@ -249,15 +249,22 @@ pub fn WriteStream(comptime OutStream: type, comptime max_depth: usize) type {
};
}
+pub fn writeStream(
+ out_stream: var,
+ comptime max_depth: usize,
+) WriteStream(@TypeOf(out_stream), max_depth) {
+ return WriteStream(@TypeOf(out_stream), max_depth).init(out_stream);
+}
+
test "json write stream" {
var out_buf: [1024]u8 = undefined;
- var slice_stream = std.io.SliceOutStream.init(&out_buf);
- const out = &slice_stream.stream;
+ var slice_stream = std.io.fixedBufferStream(&out_buf);
+ const out = slice_stream.outStream();
var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
defer arena_allocator.deinit();
- var w = std.json.WriteStream(@TypeOf(out).Child, 10).init(out);
+ var w = std.json.writeStream(out, 10);
try w.emitJson(try getJson(&arena_allocator.allocator));
const result = slice_stream.getWritten();
lib/std/os/test.zig
@@ -354,8 +354,7 @@ test "mmap" {
const file = try fs.cwd().createFile(test_out_file, .{});
defer file.close();
- var out_stream = file.outStream();
- const stream = &out_stream.stream;
+ const stream = file.outStream();
var i: u32 = 0;
while (i < alloc_size / @sizeOf(u32)) : (i += 1) {
@@ -378,8 +377,8 @@ test "mmap" {
);
defer os.munmap(data);
- var mem_stream = io.SliceInStream.init(data);
- const stream = &mem_stream.stream;
+ var mem_stream = io.fixedBufferStream(data);
+ const stream = mem_stream.inStream();
var i: u32 = 0;
while (i < alloc_size / @sizeOf(u32)) : (i += 1) {
@@ -402,8 +401,8 @@ test "mmap" {
);
defer os.munmap(data);
- var mem_stream = io.SliceInStream.init(data);
- const stream = &mem_stream.stream;
+ var mem_stream = io.fixedBufferStream(data);
+ const stream = mem_stream.inStream();
var i: u32 = alloc_size / 2 / @sizeOf(u32);
while (i < alloc_size / @sizeOf(u32)) : (i += 1) {
lib/std/zig/parser_test.zig
@@ -2809,7 +2809,7 @@ const maxInt = std.math.maxInt;
var fixed_buffer_mem: [100 * 1024]u8 = undefined;
fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *bool) ![]u8 {
- const stderr = &io.getStdErr().outStream().stream;
+ const stderr = io.getStdErr().outStream();
const tree = try std.zig.parse(allocator, source);
defer tree.deinit();
@@ -2824,17 +2824,17 @@ fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *b
{
var i: usize = 0;
while (i < loc.column) : (i += 1) {
- try stderr.write(" ");
+ try stderr.writeAll(" ");
}
}
{
const caret_count = token.end - token.start;
var i: usize = 0;
while (i < caret_count) : (i += 1) {
- try stderr.write("~");
+ try stderr.writeAll("~");
}
}
- try stderr.write("\n");
+ try stderr.writeAll("\n");
}
if (tree.errors.len != 0) {
return error.ParseError;
@@ -2843,8 +2843,7 @@ fn testParse(source: []const u8, allocator: *mem.Allocator, anything_changed: *b
var buffer = try std.Buffer.initSize(allocator, 0);
errdefer buffer.deinit();
- var buffer_out_stream = io.BufferOutStream.init(&buffer);
- anything_changed.* = try std.zig.render(allocator, &buffer_out_stream.stream, tree);
+ anything_changed.* = try std.zig.render(allocator, buffer.outStream(), tree);
return buffer.toOwnedSlice();
}
lib/std/zig/render.zig
@@ -903,7 +903,7 @@ fn renderExpression(
var column_widths = widths[widths.len - row_size ..];
// Null stream for counting the printed length of each expression
- var counting_stream = std.io.CountingOutStream(@TypeOf(std.io.null_out_stream)).init(std.io.null_out_stream);
+ var counting_stream = std.io.countingOutStream(std.io.null_out_stream);
var it = exprs.iterator(0);
var i: usize = 0;
lib/std/buffer.zig
@@ -219,3 +219,15 @@ test "Buffer.print" {
try buf.print("Hello {} the {}", .{ 2, "world" });
testing.expect(buf.eql("Hello 2 the world"));
}
+
+test "Buffer.outStream" {
+ var buffer = try Buffer.initSize(testing.allocator, 0);
+ defer buffer.deinit();
+ const buf_stream = buffer.outStream();
+
+ const x: i32 = 42;
+ const y: i32 = 1234;
+ try buf_stream.print("x: {}\ny: {}\n", .{ x, y });
+
+ testing.expect(mem.eql(u8, buffer.toSlice(), "x: 42\ny: 1234\n"));
+}
lib/std/heap.zig
@@ -10,6 +10,7 @@ const c = std.c;
const maxInt = std.math.maxInt;
pub const LoggingAllocator = @import("heap/logging_allocator.zig").LoggingAllocator;
+pub const loggingAllocator = @import("heap/logging_allocator.zig").loggingAllocator;
const Allocator = mem.Allocator;
lib/std/io.zig
@@ -4,17 +4,13 @@ const root = @import("root");
const c = std.c;
const math = std.math;
-const debug = std.debug;
-const assert = debug.assert;
+const assert = std.debug.assert;
const os = std.os;
const fs = std.fs;
const mem = std.mem;
const meta = std.meta;
const trait = meta.trait;
-const Buffer = std.Buffer;
-const fmt = std.fmt;
const File = std.fs.File;
-const testing = std.testing;
pub const Mode = enum {
/// I/O operates normally, waiting for the operating system syscalls to complete.
@@ -92,10 +88,9 @@ pub fn getStdIn() File {
};
}
-pub const SeekableStream = @import("io/seekable_stream.zig").SeekableStream;
pub const InStream = @import("io/in_stream.zig").InStream;
pub const OutStream = @import("io/out_stream.zig").OutStream;
-pub const BufferedAtomicFile = @import("io/buffered_atomic_file.zig").BufferedAtomicFile;
+pub const SeekableStream = @import("io/seekable_stream.zig").SeekableStream;
pub const BufferedOutStream = @import("io/buffered_out_stream.zig").BufferedOutStream;
pub const bufferedOutStream = @import("io/buffered_out_stream.zig").bufferedOutStream;
@@ -103,36 +98,33 @@ pub const bufferedOutStream = @import("io/buffered_out_stream.zig").bufferedOutS
pub const BufferedInStream = @import("io/buffered_in_stream.zig").BufferedInStream;
pub const bufferedInStream = @import("io/buffered_in_stream.zig").bufferedInStream;
+pub const PeekStream = @import("io/peek_stream.zig").PeekStream;
+pub const peekStream = @import("io/peek_stream.zig").peekStream;
+
pub const FixedBufferStream = @import("io/fixed_buffer_stream.zig").FixedBufferStream;
pub const fixedBufferStream = @import("io/fixed_buffer_stream.zig").fixedBufferStream;
+pub const COutStream = @import("io/c_out_stream.zig").COutStream;
+pub const cOutStream = @import("io/c_out_stream.zig").cOutStream;
+
pub const CountingOutStream = @import("io/counting_out_stream.zig").CountingOutStream;
+pub const countingOutStream = @import("io/counting_out_stream.zig").countingOutStream;
-pub fn cOutStream(c_file: *std.c.FILE) COutStream {
- return .{ .context = c_file };
-}
+pub const BitInStream = @import("io/bit_in_stream.zig").BitInStream;
+pub const bitInStream = @import("io/bit_in_stream.zig").bitInStream;
-pub const COutStream = OutStream(*std.c.FILE, std.fs.File.WriteError, cOutStreamWrite);
-
-pub fn cOutStreamWrite(c_file: *std.c.FILE, bytes: []const u8) std.fs.File.WriteError!usize {
- const amt_written = std.c.fwrite(bytes.ptr, 1, bytes.len, c_file);
- if (amt_written >= 0) return amt_written;
- switch (std.c._errno().*) {
- 0 => unreachable,
- os.EINVAL => unreachable,
- os.EFAULT => unreachable,
- os.EAGAIN => unreachable, // this is a blocking API
- os.EBADF => unreachable, // always a race condition
- os.EDESTADDRREQ => unreachable, // connect was never called
- os.EDQUOT => return error.DiskQuota,
- os.EFBIG => return error.FileTooBig,
- os.EIO => return error.InputOutput,
- os.ENOSPC => return error.NoSpaceLeft,
- os.EPERM => return error.AccessDenied,
- os.EPIPE => return error.BrokenPipe,
- else => |err| return os.unexpectedErrno(@intCast(usize, err)),
- }
-}
+pub const BitOutStream = @import("io/bit_out_stream.zig").BitOutStream;
+pub const bitOutStream = @import("io/bit_out_stream.zig").bitOutStream;
+
+pub const Packing = @import("io/serialization.zig").Packing;
+
+pub const Serializer = @import("io/serialization.zig").Serializer;
+pub const serializer = @import("io/serialization.zig").serializer;
+
+pub const Deserializer = @import("io/serialization.zig").Deserializer;
+pub const deserializer = @import("io/serialization.zig").deserializer;
+
+pub const BufferedAtomicFile = @import("io/buffered_atomic_file.zig").BufferedAtomicFile;
/// Deprecated; use `std.fs.Dir.writeFile`.
pub fn writeFile(path: []const u8, data: []const u8) !void {
@@ -144,249 +136,6 @@ pub fn readFileAlloc(allocator: *mem.Allocator, path: []const u8) ![]u8 {
return fs.cwd().readFileAlloc(allocator, path, math.maxInt(usize));
}
-/// Creates a stream which supports 'un-reading' data, so that it can be read again.
-/// This makes look-ahead style parsing much easier.
-pub fn PeekStream(comptime buffer_type: std.fifo.LinearFifoBufferType, comptime InStreamError: type) type {
- return struct {
- const Self = @This();
- pub const Error = InStreamError;
- pub const Stream = InStream(Error);
-
- stream: Stream,
- base: *Stream,
-
- const FifoType = std.fifo.LinearFifo(u8, buffer_type);
- fifo: FifoType,
-
- pub usingnamespace switch (buffer_type) {
- .Static => struct {
- pub fn init(base: *Stream) Self {
- return .{
- .base = base,
- .fifo = FifoType.init(),
- .stream = Stream{ .readFn = readFn },
- };
- }
- },
- .Slice => struct {
- pub fn init(base: *Stream, buf: []u8) Self {
- return .{
- .base = base,
- .fifo = FifoType.init(buf),
- .stream = Stream{ .readFn = readFn },
- };
- }
- },
- .Dynamic => struct {
- pub fn init(base: *Stream, allocator: *mem.Allocator) Self {
- return .{
- .base = base,
- .fifo = FifoType.init(allocator),
- .stream = Stream{ .readFn = readFn },
- };
- }
- },
- };
-
- pub fn putBackByte(self: *Self, byte: u8) !void {
- try self.putBack(&[_]u8{byte});
- }
-
- pub fn putBack(self: *Self, bytes: []const u8) !void {
- try self.fifo.unget(bytes);
- }
-
- fn readFn(in_stream: *Stream, dest: []u8) Error!usize {
- const self = @fieldParentPtr(Self, "stream", in_stream);
-
- // copy over anything putBack()'d
- var dest_index = self.fifo.read(dest);
- if (dest_index == dest.len) return dest_index;
-
- // ask the backing stream for more
- dest_index += try self.base.read(dest[dest_index..]);
- return dest_index;
- }
- };
-}
-
-pub const SliceInStream = struct {
- const Self = @This();
- pub const Error = error{};
- pub const Stream = InStream(Error);
-
- stream: Stream,
-
- pos: usize,
- slice: []const u8,
-
- pub fn init(slice: []const u8) Self {
- return Self{
- .slice = slice,
- .pos = 0,
- .stream = Stream{ .readFn = readFn },
- };
- }
-
- fn readFn(in_stream: *Stream, dest: []u8) Error!usize {
- const self = @fieldParentPtr(Self, "stream", in_stream);
- const size = math.min(dest.len, self.slice.len - self.pos);
- const end = self.pos + size;
-
- mem.copy(u8, dest[0..size], self.slice[self.pos..end]);
- self.pos = end;
-
- return size;
- }
-};
-
-/// Creates a stream which allows for reading bit fields from another stream
-pub fn BitInStream(endian: builtin.Endian, comptime Error: type) type {
- return struct {
- const Self = @This();
-
- in_stream: *Stream,
- bit_buffer: u7,
- bit_count: u3,
- stream: Stream,
-
- pub const Stream = InStream(Error);
- const u8_bit_count = comptime meta.bitCount(u8);
- const u7_bit_count = comptime meta.bitCount(u7);
- const u4_bit_count = comptime meta.bitCount(u4);
-
- pub fn init(in_stream: *Stream) Self {
- return Self{
- .in_stream = in_stream,
- .bit_buffer = 0,
- .bit_count = 0,
- .stream = Stream{ .readFn = read },
- };
- }
-
- /// Reads `bits` bits from the stream and returns a specified unsigned int type
- /// containing them in the least significant end, returning an error if the
- /// specified number of bits could not be read.
- pub fn readBitsNoEof(self: *Self, comptime U: type, bits: usize) !U {
- var n: usize = undefined;
- const result = try self.readBits(U, bits, &n);
- if (n < bits) return error.EndOfStream;
- return result;
- }
-
- /// Reads `bits` bits from the stream and returns a specified unsigned int type
- /// containing them in the least significant end. The number of bits successfully
- /// read is placed in `out_bits`, as reaching the end of the stream is not an error.
- pub fn readBits(self: *Self, comptime U: type, bits: usize, out_bits: *usize) Error!U {
- comptime assert(trait.isUnsignedInt(U));
-
- //by extending the buffer to a minimum of u8 we can cover a number of edge cases
- // related to shifting and casting.
- const u_bit_count = comptime meta.bitCount(U);
- const buf_bit_count = bc: {
- assert(u_bit_count >= bits);
- break :bc if (u_bit_count <= u8_bit_count) u8_bit_count else u_bit_count;
- };
- const Buf = std.meta.IntType(false, buf_bit_count);
- const BufShift = math.Log2Int(Buf);
-
- out_bits.* = @as(usize, 0);
- if (U == u0 or bits == 0) return 0;
- var out_buffer = @as(Buf, 0);
-
- if (self.bit_count > 0) {
- const n = if (self.bit_count >= bits) @intCast(u3, bits) else self.bit_count;
- const shift = u7_bit_count - n;
- switch (endian) {
- .Big => {
- out_buffer = @as(Buf, self.bit_buffer >> shift);
- self.bit_buffer <<= n;
- },
- .Little => {
- const value = (self.bit_buffer << shift) >> shift;
- out_buffer = @as(Buf, value);
- self.bit_buffer >>= n;
- },
- }
- self.bit_count -= n;
- out_bits.* = n;
- }
- //at this point we know bit_buffer is empty
-
- //copy bytes until we have enough bits, then leave the rest in bit_buffer
- while (out_bits.* < bits) {
- const n = bits - out_bits.*;
- const next_byte = self.in_stream.readByte() catch |err| {
- if (err == error.EndOfStream) {
- return @intCast(U, out_buffer);
- }
- //@BUG: See #1810. Not sure if the bug is that I have to do this for some
- // streams, or that I don't for streams with emtpy errorsets.
- return @errSetCast(Error, err);
- };
-
- switch (endian) {
- .Big => {
- if (n >= u8_bit_count) {
- out_buffer <<= @intCast(u3, u8_bit_count - 1);
- out_buffer <<= 1;
- out_buffer |= @as(Buf, next_byte);
- out_bits.* += u8_bit_count;
- continue;
- }
-
- const shift = @intCast(u3, u8_bit_count - n);
- out_buffer <<= @intCast(BufShift, n);
- out_buffer |= @as(Buf, next_byte >> shift);
- out_bits.* += n;
- self.bit_buffer = @truncate(u7, next_byte << @intCast(u3, n - 1));
- self.bit_count = shift;
- },
- .Little => {
- if (n >= u8_bit_count) {
- out_buffer |= @as(Buf, next_byte) << @intCast(BufShift, out_bits.*);
- out_bits.* += u8_bit_count;
- continue;
- }
-
- const shift = @intCast(u3, u8_bit_count - n);
- const value = (next_byte << shift) >> shift;
- out_buffer |= @as(Buf, value) << @intCast(BufShift, out_bits.*);
- out_bits.* += n;
- self.bit_buffer = @truncate(u7, next_byte >> @intCast(u3, n));
- self.bit_count = shift;
- },
- }
- }
-
- return @intCast(U, out_buffer);
- }
-
- pub fn alignToByte(self: *Self) void {
- self.bit_buffer = 0;
- self.bit_count = 0;
- }
-
- pub fn read(self_stream: *Stream, buffer: []u8) Error!usize {
- var self = @fieldParentPtr(Self, "stream", self_stream);
-
- var out_bits: usize = undefined;
- var out_bits_total = @as(usize, 0);
- //@NOTE: I'm not sure this is a good idea, maybe alignToByte should be forced
- if (self.bit_count > 0) {
- for (buffer) |*b, i| {
- b.* = try self.readBits(u8, u8_bit_count, &out_bits);
- out_bits_total += out_bits;
- }
- const incomplete_byte = @boolToInt(out_bits_total % u8_bit_count > 0);
- return (out_bits_total / u8_bit_count) + incomplete_byte;
- }
-
- return self.in_stream.read(buffer);
- }
- };
-}
-
/// An OutStream that doesn't write to anything.
pub const null_out_stream = @as(NullOutStream, .{ .context = {} });
@@ -396,472 +145,9 @@ fn dummyWrite(context: void, data: []const u8) error{}!usize {
}
test "null_out_stream" {
- null_out_stream.writeAll("yay" ** 1000) catch |err| switch (err) {};
-}
-
-/// Creates a stream which allows for writing bit fields to another stream
-pub fn BitOutStream(endian: builtin.Endian, comptime Error: type) type {
- return struct {
- const Self = @This();
-
- out_stream: *Stream,
- bit_buffer: u8,
- bit_count: u4,
- stream: Stream,
-
- pub const Stream = OutStream(Error);
- const u8_bit_count = comptime meta.bitCount(u8);
- const u4_bit_count = comptime meta.bitCount(u4);
-
- pub fn init(out_stream: *Stream) Self {
- return Self{
- .out_stream = out_stream,
- .bit_buffer = 0,
- .bit_count = 0,
- .stream = Stream{ .writeFn = write },
- };
- }
-
- /// Write the specified number of bits to the stream from the least significant bits of
- /// the specified unsigned int value. Bits will only be written to the stream when there
- /// are enough to fill a byte.
- pub fn writeBits(self: *Self, value: var, bits: usize) Error!void {
- if (bits == 0) return;
-
- const U = @TypeOf(value);
- comptime assert(trait.isUnsignedInt(U));
-
- //by extending the buffer to a minimum of u8 we can cover a number of edge cases
- // related to shifting and casting.
- const u_bit_count = comptime meta.bitCount(U);
- const buf_bit_count = bc: {
- assert(u_bit_count >= bits);
- break :bc if (u_bit_count <= u8_bit_count) u8_bit_count else u_bit_count;
- };
- const Buf = std.meta.IntType(false, buf_bit_count);
- const BufShift = math.Log2Int(Buf);
-
- const buf_value = @intCast(Buf, value);
-
- const high_byte_shift = @intCast(BufShift, buf_bit_count - u8_bit_count);
- var in_buffer = switch (endian) {
- .Big => buf_value << @intCast(BufShift, buf_bit_count - bits),
- .Little => buf_value,
- };
- var in_bits = bits;
-
- if (self.bit_count > 0) {
- const bits_remaining = u8_bit_count - self.bit_count;
- const n = @intCast(u3, if (bits_remaining > bits) bits else bits_remaining);
- switch (endian) {
- .Big => {
- const shift = @intCast(BufShift, high_byte_shift + self.bit_count);
- const v = @intCast(u8, in_buffer >> shift);
- self.bit_buffer |= v;
- in_buffer <<= n;
- },
- .Little => {
- const v = @truncate(u8, in_buffer) << @intCast(u3, self.bit_count);
- self.bit_buffer |= v;
- in_buffer >>= n;
- },
- }
- self.bit_count += n;
- in_bits -= n;
-
- //if we didn't fill the buffer, it's because bits < bits_remaining;
- if (self.bit_count != u8_bit_count) return;
- try self.out_stream.writeByte(self.bit_buffer);
- self.bit_buffer = 0;
- self.bit_count = 0;
- }
- //at this point we know bit_buffer is empty
-
- //copy bytes until we can't fill one anymore, then leave the rest in bit_buffer
- while (in_bits >= u8_bit_count) {
- switch (endian) {
- .Big => {
- const v = @intCast(u8, in_buffer >> high_byte_shift);
- try self.out_stream.writeByte(v);
- in_buffer <<= @intCast(u3, u8_bit_count - 1);
- in_buffer <<= 1;
- },
- .Little => {
- const v = @truncate(u8, in_buffer);
- try self.out_stream.writeByte(v);
- in_buffer >>= @intCast(u3, u8_bit_count - 1);
- in_buffer >>= 1;
- },
- }
- in_bits -= u8_bit_count;
- }
-
- if (in_bits > 0) {
- self.bit_count = @intCast(u4, in_bits);
- self.bit_buffer = switch (endian) {
- .Big => @truncate(u8, in_buffer >> high_byte_shift),
- .Little => @truncate(u8, in_buffer),
- };
- }
- }
-
- /// Flush any remaining bits to the stream.
- pub fn flushBits(self: *Self) Error!void {
- if (self.bit_count == 0) return;
- try self.out_stream.writeByte(self.bit_buffer);
- self.bit_buffer = 0;
- self.bit_count = 0;
- }
-
- pub fn write(self_stream: *Stream, buffer: []const u8) Error!usize {
- var self = @fieldParentPtr(Self, "stream", self_stream);
-
- // TODO: I'm not sure this is a good idea, maybe flushBits should be forced
- if (self.bit_count > 0) {
- for (buffer) |b, i|
- try self.writeBits(b, u8_bit_count);
- return buffer.len;
- }
-
- return self.out_stream.write(buffer);
- }
- };
-}
-
-pub const Packing = enum {
- /// Pack data to byte alignment
- Byte,
-
- /// Pack data to bit alignment
- Bit,
-};
-
-/// Creates a deserializer that deserializes types from any stream.
-/// If `is_packed` is true, the data stream is treated as bit-packed,
-/// otherwise data is expected to be packed to the smallest byte.
-/// Types may implement a custom deserialization routine with a
-/// function named `deserialize` in the form of:
-/// pub fn deserialize(self: *Self, deserializer: var) !void
-/// which will be called when the deserializer is used to deserialize
-/// that type. It will pass a pointer to the type instance to deserialize
-/// into and a pointer to the deserializer struct.
-pub fn Deserializer(comptime endian: builtin.Endian, comptime packing: Packing, comptime Error: type) type {
- return struct {
- const Self = @This();
-
- in_stream: if (packing == .Bit) BitInStream(endian, Stream.Error) else *Stream,
-
- pub const Stream = InStream(Error);
-
- pub fn init(in_stream: *Stream) Self {
- return Self{
- .in_stream = switch (packing) {
- .Bit => BitInStream(endian, Stream.Error).init(in_stream),
- .Byte => in_stream,
- },
- };
- }
-
- pub fn alignToByte(self: *Self) void {
- if (packing == .Byte) return;
- self.in_stream.alignToByte();
- }
-
- //@BUG: inferred error issue. See: #1386
- fn deserializeInt(self: *Self, comptime T: type) (Error || error{EndOfStream})!T {
- comptime assert(trait.is(.Int)(T) or trait.is(.Float)(T));
-
- const u8_bit_count = 8;
- const t_bit_count = comptime meta.bitCount(T);
-
- const U = std.meta.IntType(false, t_bit_count);
- const Log2U = math.Log2Int(U);
- const int_size = (U.bit_count + 7) / 8;
-
- if (packing == .Bit) {
- const result = try self.in_stream.readBitsNoEof(U, t_bit_count);
- return @bitCast(T, result);
- }
-
- var buffer: [int_size]u8 = undefined;
- const read_size = try self.in_stream.read(buffer[0..]);
- if (read_size < int_size) return error.EndOfStream;
-
- if (int_size == 1) {
- if (t_bit_count == 8) return @bitCast(T, buffer[0]);
- const PossiblySignedByte = std.meta.IntType(T.is_signed, 8);
- return @truncate(T, @bitCast(PossiblySignedByte, buffer[0]));
- }
-
- var result = @as(U, 0);
- for (buffer) |byte, i| {
- switch (endian) {
- .Big => {
- result = (result << u8_bit_count) | byte;
- },
- .Little => {
- result |= @as(U, byte) << @intCast(Log2U, u8_bit_count * i);
- },
- }
- }
-
- return @bitCast(T, result);
- }
-
- /// Deserializes and returns data of the specified type from the stream
- pub fn deserialize(self: *Self, comptime T: type) !T {
- var value: T = undefined;
- try self.deserializeInto(&value);
- return value;
- }
-
- /// Deserializes data into the type pointed to by `ptr`
- pub fn deserializeInto(self: *Self, ptr: var) !void {
- const T = @TypeOf(ptr);
- comptime assert(trait.is(.Pointer)(T));
-
- if (comptime trait.isSlice(T) or comptime trait.isPtrTo(.Array)(T)) {
- for (ptr) |*v|
- try self.deserializeInto(v);
- return;
- }
-
- comptime assert(trait.isSingleItemPtr(T));
-
- const C = comptime meta.Child(T);
- const child_type_id = @typeInfo(C);
-
- //custom deserializer: fn(self: *Self, deserializer: var) !void
- if (comptime trait.hasFn("deserialize")(C)) return C.deserialize(ptr, self);
-
- if (comptime trait.isPacked(C) and packing != .Bit) {
- var packed_deserializer = Deserializer(endian, .Bit, Error).init(self.in_stream);
- return packed_deserializer.deserializeInto(ptr);
- }
-
- switch (child_type_id) {
- .Void => return,
- .Bool => ptr.* = (try self.deserializeInt(u1)) > 0,
- .Float, .Int => ptr.* = try self.deserializeInt(C),
- .Struct => {
- const info = @typeInfo(C).Struct;
-
- inline for (info.fields) |*field_info| {
- const name = field_info.name;
- const FieldType = field_info.field_type;
-
- if (FieldType == void or FieldType == u0) continue;
-
- //it doesn't make any sense to read pointers
- if (comptime trait.is(.Pointer)(FieldType)) {
- @compileError("Will not " ++ "read field " ++ name ++ " of struct " ++
- @typeName(C) ++ " because it " ++ "is of pointer-type " ++
- @typeName(FieldType) ++ ".");
- }
-
- try self.deserializeInto(&@field(ptr, name));
- }
- },
- .Union => {
- const info = @typeInfo(C).Union;
- if (info.tag_type) |TagType| {
- //we avoid duplicate iteration over the enum tags
- // by getting the int directly and casting it without
- // safety. If it is bad, it will be caught anyway.
- const TagInt = @TagType(TagType);
- const tag = try self.deserializeInt(TagInt);
-
- inline for (info.fields) |field_info| {
- if (field_info.enum_field.?.value == tag) {
- const name = field_info.name;
- const FieldType = field_info.field_type;
- ptr.* = @unionInit(C, name, undefined);
- try self.deserializeInto(&@field(ptr, name));
- return;
- }
- }
- //This is reachable if the enum data is bad
- return error.InvalidEnumTag;
- }
- @compileError("Cannot meaningfully deserialize " ++ @typeName(C) ++
- " because it is an untagged union. Use a custom deserialize().");
- },
- .Optional => {
- const OC = comptime meta.Child(C);
- const exists = (try self.deserializeInt(u1)) > 0;
- if (!exists) {
- ptr.* = null;
- return;
- }
-
- ptr.* = @as(OC, undefined); //make it non-null so the following .? is guaranteed safe
- const val_ptr = &ptr.*.?;
- try self.deserializeInto(val_ptr);
- },
- .Enum => {
- var value = try self.deserializeInt(@TagType(C));
- ptr.* = try meta.intToEnum(C, value);
- },
- else => {
- @compileError("Cannot deserialize " ++ @tagName(child_type_id) ++ " types (unimplemented).");
- },
- }
- }
- };
-}
-
-/// Creates a serializer that serializes types to any stream.
-/// If `is_packed` is true, the data will be bit-packed into the stream.
-/// Note that the you must call `serializer.flush()` when you are done
-/// writing bit-packed data in order ensure any unwritten bits are committed.
-/// If `is_packed` is false, data is packed to the smallest byte. In the case
-/// of packed structs, the struct will written bit-packed and with the specified
-/// endianess, after which data will resume being written at the next byte boundary.
-/// Types may implement a custom serialization routine with a
-/// function named `serialize` in the form of:
-/// pub fn serialize(self: Self, serializer: var) !void
-/// which will be called when the serializer is used to serialize that type. It will
-/// pass a const pointer to the type instance to be serialized and a pointer
-/// to the serializer struct.
-pub fn Serializer(comptime endian: builtin.Endian, comptime packing: Packing, comptime Error: type) type {
- return struct {
- const Self = @This();
-
- out_stream: if (packing == .Bit) BitOutStream(endian, Stream.Error) else *Stream,
-
- pub const Stream = OutStream(Error);
-
- pub fn init(out_stream: *Stream) Self {
- return Self{
- .out_stream = switch (packing) {
- .Bit => BitOutStream(endian, Stream.Error).init(out_stream),
- .Byte => out_stream,
- },
- };
- }
-
- /// Flushes any unwritten bits to the stream
- pub fn flush(self: *Self) Error!void {
- if (packing == .Bit) return self.out_stream.flushBits();
- }
-
- fn serializeInt(self: *Self, value: var) Error!void {
- const T = @TypeOf(value);
- comptime assert(trait.is(.Int)(T) or trait.is(.Float)(T));
-
- const t_bit_count = comptime meta.bitCount(T);
- const u8_bit_count = comptime meta.bitCount(u8);
-
- const U = std.meta.IntType(false, t_bit_count);
- const Log2U = math.Log2Int(U);
- const int_size = (U.bit_count + 7) / 8;
-
- const u_value = @bitCast(U, value);
-
- if (packing == .Bit) return self.out_stream.writeBits(u_value, t_bit_count);
-
- var buffer: [int_size]u8 = undefined;
- if (int_size == 1) buffer[0] = u_value;
-
- for (buffer) |*byte, i| {
- const idx = switch (endian) {
- .Big => int_size - i - 1,
- .Little => i,
- };
- const shift = @intCast(Log2U, idx * u8_bit_count);
- const v = u_value >> shift;
- byte.* = if (t_bit_count < u8_bit_count) v else @truncate(u8, v);
- }
-
- try self.out_stream.write(&buffer);
- }
-
- /// Serializes the passed value into the stream
- pub fn serialize(self: *Self, value: var) Error!void {
- const T = comptime @TypeOf(value);
-
- if (comptime trait.isIndexable(T)) {
- for (value) |v|
- try self.serialize(v);
- return;
- }
-
- //custom serializer: fn(self: Self, serializer: var) !void
- if (comptime trait.hasFn("serialize")(T)) return T.serialize(value, self);
-
- if (comptime trait.isPacked(T) and packing != .Bit) {
- var packed_serializer = Serializer(endian, .Bit, Error).init(self.out_stream);
- try packed_serializer.serialize(value);
- try packed_serializer.flush();
- return;
- }
-
- switch (@typeInfo(T)) {
- .Void => return,
- .Bool => try self.serializeInt(@as(u1, @boolToInt(value))),
- .Float, .Int => try self.serializeInt(value),
- .Struct => {
- const info = @typeInfo(T);
-
- inline for (info.Struct.fields) |*field_info| {
- const name = field_info.name;
- const FieldType = field_info.field_type;
-
- if (FieldType == void or FieldType == u0) continue;
-
- //It doesn't make sense to write pointers
- if (comptime trait.is(.Pointer)(FieldType)) {
- @compileError("Will not " ++ "serialize field " ++ name ++
- " of struct " ++ @typeName(T) ++ " because it " ++
- "is of pointer-type " ++ @typeName(FieldType) ++ ".");
- }
- try self.serialize(@field(value, name));
- }
- },
- .Union => {
- const info = @typeInfo(T).Union;
- if (info.tag_type) |TagType| {
- const active_tag = meta.activeTag(value);
- try self.serialize(active_tag);
- //This inline loop is necessary because active_tag is a runtime
- // value, but @field requires a comptime value. Our alternative
- // is to check each field for a match
- inline for (info.fields) |field_info| {
- if (field_info.enum_field.?.value == @enumToInt(active_tag)) {
- const name = field_info.name;
- const FieldType = field_info.field_type;
- try self.serialize(@field(value, name));
- return;
- }
- }
- unreachable;
- }
- @compileError("Cannot meaningfully serialize " ++ @typeName(T) ++
- " because it is an untagged union. Use a custom serialize().");
- },
- .Optional => {
- if (value == null) {
- try self.serializeInt(@as(u1, @boolToInt(false)));
- return;
- }
- try self.serializeInt(@as(u1, @boolToInt(true)));
-
- const OC = comptime meta.Child(T);
- const val_ptr = &value.?;
- try self.serialize(val_ptr.*);
- },
- .Enum => {
- try self.serializeInt(@enumToInt(value));
- },
- else => @compileError("Cannot serialize " ++ @tagName(@typeInfo(T)) ++ " types (unimplemented)."),
- }
- }
- };
+ null_out_stream.writeAll("yay" ** 10) catch |err| switch (err) {};
}
test "" {
- comptime {
- _ = @import("io/test.zig");
- }
- std.meta.refAllDecls(@This());
+ _ = @import("io/test.zig");
}
lib/std/json.zig
@@ -10,6 +10,7 @@ const mem = std.mem;
const maxInt = std.math.maxInt;
pub const WriteStream = @import("json/write_stream.zig").WriteStream;
+pub const writeStream = @import("json/write_stream.zig").writeStream;
const StringEscapes = union(enum) {
None,
@@ -2109,7 +2110,7 @@ test "write json then parse it" {
var fixed_buffer_stream = std.io.fixedBufferStream(&out_buffer);
const out_stream = fixed_buffer_stream.outStream();
- var jw = WriteStream(@TypeOf(out_stream).Child, 4).init(out_stream);
+ var jw = writeStream(out_stream, 4);
try jw.beginObject();
@@ -2140,7 +2141,7 @@ test "write json then parse it" {
var parser = Parser.init(testing.allocator, false);
defer parser.deinit();
- var tree = try parser.parse(slice_out_stream.getWritten());
+ var tree = try parser.parse(fixed_buffer_stream.getWritten());
defer tree.deinit();
testing.expect(tree.root.Object.get("f").?.value.Bool == false);
lib/std/net.zig
@@ -816,7 +816,7 @@ fn linuxLookupNameFromHosts(
};
defer file.close();
- const stream = &std.io.BufferedInStream(fs.File.ReadError).init(&file.inStream().stream).stream;
+ const stream = std.io.bufferedInStream(file.inStream()).inStream();
var line_buf: [512]u8 = undefined;
while (stream.readUntilDelimiterOrEof(&line_buf, '\n') catch |err| switch (err) {
error.StreamTooLong => blk: {
@@ -1010,7 +1010,7 @@ fn getResolvConf(allocator: *mem.Allocator, rc: *ResolvConf) !void {
};
defer file.close();
- const stream = &std.io.BufferedInStream(fs.File.ReadError).init(&file.inStream().stream).stream;
+ const stream = std.io.bufferedInStream(file.inStream()).inStream();
var line_buf: [512]u8 = undefined;
while (stream.readUntilDelimiterOrEof(&line_buf, '\n') catch |err| switch (err) {
error.StreamTooLong => blk: {