master
1const std = @import("../std.zig");
2const math = std.math;
3const mem = std.mem;
4const Allocator = std.mem.Allocator;
5const assert = std.debug.assert;
6const ArrayList = std.ArrayList;
7const Writer = std.Io.Writer;
8const Reader = std.Io.Reader;
9
10pub const RangeDecoder = struct {
11 range: u32,
12 code: u32,
13
14 pub fn init(reader: *Reader) !RangeDecoder {
15 var counter: u64 = 0;
16 return initCounting(reader, &counter);
17 }
18
19 pub fn initCounting(reader: *Reader, n_read: *u64) !RangeDecoder {
20 const reserved = try reader.takeByte();
21 n_read.* += 1;
22 if (reserved != 0) return error.InvalidRangeCode;
23 const code = try reader.takeInt(u32, .big);
24 n_read.* += 4;
25 return .{
26 .range = 0xFFFF_FFFF,
27 .code = code,
28 };
29 }
30
31 pub fn isFinished(self: RangeDecoder) bool {
32 return self.code == 0;
33 }
34
35 fn normalize(self: *RangeDecoder, reader: *Reader, n_read: *u64) !void {
36 if (self.range < 0x0100_0000) {
37 self.range <<= 8;
38 self.code = (self.code << 8) ^ @as(u32, try reader.takeByte());
39 n_read.* += 1;
40 }
41 }
42
43 fn getBit(self: *RangeDecoder, reader: *Reader, n_read: *u64) !bool {
44 self.range >>= 1;
45
46 const bit = self.code >= self.range;
47 if (bit) self.code -= self.range;
48
49 try self.normalize(reader, n_read);
50 return bit;
51 }
52
53 pub fn get(self: *RangeDecoder, reader: *Reader, count: usize, n_read: *u64) !u32 {
54 var result: u32 = 0;
55 for (0..count) |_| {
56 result = (result << 1) ^ @intFromBool(try self.getBit(reader, n_read));
57 }
58 return result;
59 }
60
61 pub fn decodeBit(self: *RangeDecoder, reader: *Reader, prob: *u16, n_read: *u64) !bool {
62 const bound = (self.range >> 11) * prob.*;
63
64 if (self.code < bound) {
65 prob.* += (0x800 - prob.*) >> 5;
66 self.range = bound;
67
68 try self.normalize(reader, n_read);
69 return false;
70 } else {
71 prob.* -= prob.* >> 5;
72 self.code -= bound;
73 self.range -= bound;
74
75 try self.normalize(reader, n_read);
76 return true;
77 }
78 }
79
80 fn parseBitTree(
81 self: *RangeDecoder,
82 reader: *Reader,
83 num_bits: u5,
84 probs: []u16,
85 n_read: *u64,
86 ) !u32 {
87 var tmp: u32 = 1;
88 var i: @TypeOf(num_bits) = 0;
89 while (i < num_bits) : (i += 1) {
90 const bit = try self.decodeBit(reader, &probs[tmp], n_read);
91 tmp = (tmp << 1) ^ @intFromBool(bit);
92 }
93 return tmp - (@as(u32, 1) << num_bits);
94 }
95
96 pub fn parseReverseBitTree(
97 self: *RangeDecoder,
98 reader: *Reader,
99 num_bits: u5,
100 probs: []u16,
101 offset: usize,
102 n_read: *u64,
103 ) !u32 {
104 var result: u32 = 0;
105 var tmp: usize = 1;
106 var i: @TypeOf(num_bits) = 0;
107 while (i < num_bits) : (i += 1) {
108 const bit = @intFromBool(try self.decodeBit(reader, &probs[offset + tmp], n_read));
109 tmp = (tmp << 1) ^ bit;
110 result ^= @as(u32, bit) << i;
111 }
112 return result;
113 }
114};
115
116pub const Decode = struct {
117 properties: Properties,
118 literal_probs: Vec2d,
119 pos_slot_decoder: [4]BitTree(6),
120 align_decoder: BitTree(4),
121 pos_decoders: [115]u16,
122 is_match: [192]u16,
123 is_rep: [12]u16,
124 is_rep_g0: [12]u16,
125 is_rep_g1: [12]u16,
126 is_rep_g2: [12]u16,
127 is_rep_0long: [192]u16,
128 state: usize,
129 rep: [4]usize,
130 len_decoder: LenDecoder,
131 rep_len_decoder: LenDecoder,
132
133 pub fn init(gpa: Allocator, properties: Properties) !Decode {
134 return .{
135 .properties = properties,
136 .literal_probs = try Vec2d.init(gpa, 0x400, @as(usize, 1) << (properties.lc + properties.lp), 0x300),
137 .pos_slot_decoder = @splat(.{}),
138 .align_decoder = .{},
139 .pos_decoders = @splat(0x400),
140 .is_match = @splat(0x400),
141 .is_rep = @splat(0x400),
142 .is_rep_g0 = @splat(0x400),
143 .is_rep_g1 = @splat(0x400),
144 .is_rep_g2 = @splat(0x400),
145 .is_rep_0long = @splat(0x400),
146 .state = 0,
147 .rep = @splat(0),
148 .len_decoder = .{},
149 .rep_len_decoder = .{},
150 };
151 }
152
153 pub fn deinit(self: *Decode, gpa: Allocator) void {
154 self.literal_probs.deinit(gpa);
155 self.* = undefined;
156 }
157
158 pub fn resetState(self: *Decode, gpa: Allocator, new_props: Properties) !void {
159 new_props.validate();
160 if (self.properties.lc + self.properties.lp == new_props.lc + new_props.lp) {
161 self.literal_probs.fill(0x400);
162 } else {
163 self.literal_probs.deinit(gpa);
164 self.literal_probs = try Vec2d.init(gpa, 0x400, @as(usize, 1) << (new_props.lc + new_props.lp), 0x300);
165 }
166
167 self.properties = new_props;
168 for (&self.pos_slot_decoder) |*t| t.reset();
169 self.align_decoder.reset();
170 self.pos_decoders = @splat(0x400);
171 self.is_match = @splat(0x400);
172 self.is_rep = @splat(0x400);
173 self.is_rep_g0 = @splat(0x400);
174 self.is_rep_g1 = @splat(0x400);
175 self.is_rep_g2 = @splat(0x400);
176 self.is_rep_0long = @splat(0x400);
177 self.state = 0;
178 self.rep = @splat(0);
179 self.len_decoder.reset();
180 self.rep_len_decoder.reset();
181 }
182
183 pub fn process(
184 self: *Decode,
185 reader: *Reader,
186 allocating: *Writer.Allocating,
187 /// `CircularBuffer` or `std.compress.lzma2.AccumBuffer`.
188 buffer: anytype,
189 decoder: *RangeDecoder,
190 n_read: *u64,
191 ) !ProcessingStatus {
192 const gpa = allocating.allocator;
193 const writer = &allocating.writer;
194 const pos_state = buffer.len & ((@as(usize, 1) << self.properties.pb) - 1);
195
196 if (!try decoder.decodeBit(reader, &self.is_match[(self.state << 4) + pos_state], n_read)) {
197 const byte: u8 = try self.decodeLiteral(reader, buffer, decoder, n_read);
198
199 try buffer.appendLiteral(gpa, byte, writer);
200
201 self.state = if (self.state < 4)
202 0
203 else if (self.state < 10)
204 self.state - 3
205 else
206 self.state - 6;
207 return .more;
208 }
209
210 var len: usize = undefined;
211 if (try decoder.decodeBit(reader, &self.is_rep[self.state], n_read)) {
212 if (!try decoder.decodeBit(reader, &self.is_rep_g0[self.state], n_read)) {
213 if (!try decoder.decodeBit(reader, &self.is_rep_0long[(self.state << 4) + pos_state], n_read)) {
214 self.state = if (self.state < 7) 9 else 11;
215 const dist = self.rep[0] + 1;
216 try buffer.appendLz(gpa, 1, dist, writer);
217 return .more;
218 }
219 } else {
220 const idx: usize = if (!try decoder.decodeBit(reader, &self.is_rep_g1[self.state], n_read))
221 1
222 else if (!try decoder.decodeBit(reader, &self.is_rep_g2[self.state], n_read))
223 2
224 else
225 3;
226 const dist = self.rep[idx];
227 var i = idx;
228 while (i > 0) : (i -= 1) {
229 self.rep[i] = self.rep[i - 1];
230 }
231 self.rep[0] = dist;
232 }
233
234 len = try self.rep_len_decoder.decode(reader, decoder, pos_state, n_read);
235
236 self.state = if (self.state < 7) 8 else 11;
237 } else {
238 self.rep[3] = self.rep[2];
239 self.rep[2] = self.rep[1];
240 self.rep[1] = self.rep[0];
241
242 len = try self.len_decoder.decode(reader, decoder, pos_state, n_read);
243
244 self.state = if (self.state < 7) 7 else 10;
245
246 const rep_0 = try self.decodeDistance(reader, decoder, len, n_read);
247
248 self.rep[0] = rep_0;
249 if (self.rep[0] == 0xFFFF_FFFF) {
250 if (decoder.isFinished()) {
251 return .finished;
252 }
253 return error.CorruptInput;
254 }
255 }
256
257 len += 2;
258
259 const dist = self.rep[0] + 1;
260 try buffer.appendLz(gpa, len, dist, writer);
261
262 return .more;
263 }
264
265 fn decodeLiteral(
266 self: *Decode,
267 reader: *Reader,
268 /// `CircularBuffer` or `std.compress.lzma2.AccumBuffer`.
269 buffer: anytype,
270 decoder: *RangeDecoder,
271 n_read: *u64,
272 ) !u8 {
273 const def_prev_byte = 0;
274 const prev_byte = @as(usize, buffer.lastOr(def_prev_byte));
275
276 var result: usize = 1;
277 const lit_state = ((buffer.len & ((@as(usize, 1) << self.properties.lp) - 1)) << self.properties.lc) +
278 (prev_byte >> (8 - self.properties.lc));
279 const probs = try self.literal_probs.get(lit_state);
280
281 if (self.state >= 7) {
282 var match_byte = @as(usize, try buffer.lastN(self.rep[0] + 1));
283
284 while (result < 0x100) {
285 const match_bit = (match_byte >> 7) & 1;
286 match_byte <<= 1;
287 const bit = @intFromBool(try decoder.decodeBit(
288 reader,
289 &probs[((@as(usize, 1) + match_bit) << 8) + result],
290 n_read,
291 ));
292 result = (result << 1) ^ bit;
293 if (match_bit != bit) {
294 break;
295 }
296 }
297 }
298
299 while (result < 0x100) {
300 result = (result << 1) ^ @intFromBool(try decoder.decodeBit(reader, &probs[result], n_read));
301 }
302
303 return @truncate(result - 0x100);
304 }
305
306 fn decodeDistance(
307 self: *Decode,
308 reader: *Reader,
309 decoder: *RangeDecoder,
310 length: usize,
311 n_read: *u64,
312 ) !usize {
313 const len_state = if (length > 3) 3 else length;
314
315 const pos_slot: usize = try self.pos_slot_decoder[len_state].parse(reader, decoder, n_read);
316 if (pos_slot < 4) return pos_slot;
317
318 const num_direct_bits = @as(u5, @intCast((pos_slot >> 1) - 1));
319 var result = (2 ^ (pos_slot & 1)) << num_direct_bits;
320
321 if (pos_slot < 14) {
322 result += try decoder.parseReverseBitTree(
323 reader,
324 num_direct_bits,
325 &self.pos_decoders,
326 result - pos_slot,
327 n_read,
328 );
329 } else {
330 result += @as(usize, try decoder.get(reader, num_direct_bits - 4, n_read)) << 4;
331 result += try self.align_decoder.parseReverse(reader, decoder, n_read);
332 }
333
334 return result;
335 }
336
337 /// A circular buffer for LZ sequences
338 pub const CircularBuffer = struct {
339 /// Circular buffer
340 buf: ArrayList(u8),
341 /// Length of the buffer
342 dict_size: usize,
343 /// Buffer memory limit
344 mem_limit: usize,
345 /// Current position
346 cursor: usize,
347 /// Total number of bytes sent through the buffer
348 len: usize,
349
350 pub fn init(dict_size: usize, mem_limit: usize) CircularBuffer {
351 return .{
352 .buf = .{},
353 .dict_size = dict_size,
354 .mem_limit = mem_limit,
355 .cursor = 0,
356 .len = 0,
357 };
358 }
359
360 pub fn get(self: CircularBuffer, index: usize) u8 {
361 return if (0 <= index and index < self.buf.items.len) self.buf.items[index] else 0;
362 }
363
364 pub fn set(self: *CircularBuffer, gpa: Allocator, index: usize, value: u8) !void {
365 if (index >= self.mem_limit) {
366 return error.CorruptInput;
367 }
368 try self.buf.ensureTotalCapacity(gpa, index + 1);
369 while (self.buf.items.len < index) {
370 self.buf.appendAssumeCapacity(0);
371 }
372 self.buf.appendAssumeCapacity(value);
373 }
374
375 /// Retrieve the last byte or return a default
376 pub fn lastOr(self: CircularBuffer, lit: u8) u8 {
377 return if (self.len == 0)
378 lit
379 else
380 self.get((self.dict_size + self.cursor - 1) % self.dict_size);
381 }
382
383 /// Retrieve the n-th last byte
384 pub fn lastN(self: CircularBuffer, dist: usize) !u8 {
385 if (dist > self.dict_size or dist > self.len) {
386 return error.CorruptInput;
387 }
388
389 const offset = (self.dict_size + self.cursor - dist) % self.dict_size;
390 return self.get(offset);
391 }
392
393 /// Append a literal
394 pub fn appendLiteral(
395 self: *CircularBuffer,
396 gpa: Allocator,
397 lit: u8,
398 writer: *Writer,
399 ) !void {
400 try self.set(gpa, self.cursor, lit);
401 self.cursor += 1;
402 self.len += 1;
403
404 // Flush the circular buffer to the output
405 if (self.cursor == self.dict_size) {
406 try writer.writeAll(self.buf.items);
407 self.cursor = 0;
408 }
409 }
410
411 /// Fetch an LZ sequence (length, distance) from inside the buffer
412 pub fn appendLz(
413 self: *CircularBuffer,
414 gpa: Allocator,
415 len: usize,
416 dist: usize,
417 writer: *Writer,
418 ) !void {
419 if (dist > self.dict_size or dist > self.len) {
420 return error.CorruptInput;
421 }
422
423 var offset = (self.dict_size + self.cursor - dist) % self.dict_size;
424 var i: usize = 0;
425 while (i < len) : (i += 1) {
426 const x = self.get(offset);
427 try self.appendLiteral(gpa, x, writer);
428 offset += 1;
429 if (offset == self.dict_size) {
430 offset = 0;
431 }
432 }
433 }
434
435 pub fn finish(self: *CircularBuffer, writer: *Writer) !void {
436 if (self.cursor > 0) {
437 try writer.writeAll(self.buf.items[0..self.cursor]);
438 self.cursor = 0;
439 }
440 }
441
442 pub fn deinit(self: *CircularBuffer, gpa: Allocator) void {
443 self.buf.deinit(gpa);
444 self.* = undefined;
445 }
446 };
447
448 pub fn BitTree(comptime num_bits: usize) type {
449 return struct {
450 probs: [1 << num_bits]u16 = @splat(0x400),
451
452 pub fn parse(self: *@This(), reader: *Reader, decoder: *RangeDecoder, n_read: *u64) !u32 {
453 return decoder.parseBitTree(reader, num_bits, &self.probs, n_read);
454 }
455
456 pub fn parseReverse(
457 self: *@This(),
458 reader: *Reader,
459 decoder: *RangeDecoder,
460 n_read: *u64,
461 ) !u32 {
462 return decoder.parseReverseBitTree(reader, num_bits, &self.probs, 0, n_read);
463 }
464
465 pub fn reset(self: *@This()) void {
466 @memset(&self.probs, 0x400);
467 }
468 };
469 }
470
471 pub const LenDecoder = struct {
472 choice: u16 = 0x400,
473 choice2: u16 = 0x400,
474 low_coder: [16]BitTree(3) = @splat(.{}),
475 mid_coder: [16]BitTree(3) = @splat(.{}),
476 high_coder: BitTree(8) = .{},
477
478 pub fn decode(
479 self: *LenDecoder,
480 reader: *Reader,
481 decoder: *RangeDecoder,
482 pos_state: usize,
483 n_read: *u64,
484 ) !usize {
485 if (!try decoder.decodeBit(reader, &self.choice, n_read)) {
486 return @as(usize, try self.low_coder[pos_state].parse(reader, decoder, n_read));
487 } else if (!try decoder.decodeBit(reader, &self.choice2, n_read)) {
488 return @as(usize, try self.mid_coder[pos_state].parse(reader, decoder, n_read)) + 8;
489 } else {
490 return @as(usize, try self.high_coder.parse(reader, decoder, n_read)) + 16;
491 }
492 }
493
494 pub fn reset(self: *LenDecoder) void {
495 self.choice = 0x400;
496 self.choice2 = 0x400;
497 for (&self.low_coder) |*t| t.reset();
498 for (&self.mid_coder) |*t| t.reset();
499 self.high_coder.reset();
500 }
501 };
502
503 pub const Vec2d = struct {
504 data: []u16,
505 cols: usize,
506
507 pub fn init(gpa: Allocator, value: u16, w: usize, h: usize) !Vec2d {
508 const len = try math.mul(usize, w, h);
509 const data = try gpa.alloc(u16, len);
510 @memset(data, value);
511 return .{
512 .data = data,
513 .cols = h,
514 };
515 }
516
517 pub fn deinit(v: *Vec2d, gpa: Allocator) void {
518 gpa.free(v.data);
519 v.* = undefined;
520 }
521
522 pub fn fill(v: *Vec2d, value: u16) void {
523 @memset(v.data, value);
524 }
525
526 fn get(v: Vec2d, row: usize) ![]u16 {
527 const start_row = try math.mul(usize, row, v.cols);
528 const end_row = try math.add(usize, start_row, v.cols);
529 return v.data[start_row..end_row];
530 }
531 };
532
533 pub const Options = struct {
534 unpacked_size: UnpackedSize = .read_from_header,
535 mem_limit: ?usize = null,
536 allow_incomplete: bool = false,
537 };
538
539 pub const UnpackedSize = union(enum) {
540 read_from_header,
541 read_header_but_use_provided: ?u64,
542 use_provided: ?u64,
543 };
544
545 const ProcessingStatus = enum {
546 more,
547 finished,
548 };
549
550 pub const Properties = struct {
551 lc: u4,
552 lp: u3,
553 pb: u3,
554
555 fn validate(self: Properties) void {
556 assert(self.lc <= 8);
557 assert(self.lp <= 4);
558 assert(self.pb <= 4);
559 }
560 };
561
562 pub const Params = struct {
563 properties: Properties,
564 dict_size: u32,
565 unpacked_size: ?u64,
566
567 pub fn readHeader(reader: *Reader, options: Options) !Params {
568 var props = try reader.takeByte();
569 if (props >= 225) return error.CorruptInput;
570
571 const lc: u4 = @intCast(props % 9);
572 props /= 9;
573 const lp: u3 = @intCast(props % 5);
574 props /= 5;
575 const pb: u3 = @intCast(props);
576
577 const dict_size_provided = try reader.takeInt(u32, .little);
578 const dict_size = @max(0x1000, dict_size_provided);
579
580 const unpacked_size = switch (options.unpacked_size) {
581 .read_from_header => blk: {
582 const unpacked_size_provided = try reader.takeInt(u64, .little);
583 const marker_mandatory = unpacked_size_provided == 0xFFFF_FFFF_FFFF_FFFF;
584 break :blk if (marker_mandatory) null else unpacked_size_provided;
585 },
586 .read_header_but_use_provided => |x| blk: {
587 _ = try reader.takeInt(u64, .little);
588 break :blk x;
589 },
590 .use_provided => |x| x,
591 };
592
593 return .{
594 .properties = .{ .lc = lc, .lp = lp, .pb = pb },
595 .dict_size = dict_size,
596 .unpacked_size = unpacked_size,
597 };
598 }
599 };
600};
601
602pub const Decompress = struct {
603 gpa: Allocator,
604 input: *Reader,
605 reader: Reader,
606 buffer: Decode.CircularBuffer,
607 range_decoder: RangeDecoder,
608 decode: Decode,
609 err: ?Error,
610 unpacked_size: ?u64,
611
612 pub const Error = error{
613 OutOfMemory,
614 ReadFailed,
615 CorruptInput,
616 DecompressedSizeMismatch,
617 EndOfStream,
618 Overflow,
619 };
620
621 /// Takes ownership of `buffer` which may be resized with `gpa`.
622 ///
623 /// LZMA was explicitly designed to take advantage of large heap memory
624 /// being available, with a dictionary size anywhere from 4K to 4G. Thus,
625 /// this API dynamically allocates the dictionary as-needed.
626 pub fn initParams(
627 input: *Reader,
628 gpa: Allocator,
629 buffer: []u8,
630 params: Decode.Params,
631 mem_limit: usize,
632 ) !Decompress {
633 return .{
634 .gpa = gpa,
635 .input = input,
636 .buffer = Decode.CircularBuffer.init(params.dict_size, mem_limit),
637 .range_decoder = try RangeDecoder.init(input),
638 .decode = try Decode.init(gpa, params.properties),
639 .reader = .{
640 .buffer = buffer,
641 .vtable = &.{
642 .readVec = readVec,
643 .stream = stream,
644 .discard = discard,
645 },
646 .seek = 0,
647 .end = 0,
648 },
649 .err = null,
650 .unpacked_size = params.unpacked_size,
651 };
652 }
653
654 /// Takes ownership of `buffer` which may be resized with `gpa`.
655 ///
656 /// LZMA was explicitly designed to take advantage of large heap memory
657 /// being available, with a dictionary size anywhere from 4K to 4G. Thus,
658 /// this API dynamically allocates the dictionary as-needed.
659 pub fn initOptions(
660 input: *Reader,
661 gpa: Allocator,
662 buffer: []u8,
663 options: Decode.Options,
664 mem_limit: usize,
665 ) !Decompress {
666 const params = try Decode.Params.readHeader(input, options);
667 return initParams(input, gpa, buffer, params, mem_limit);
668 }
669
670 /// Reclaim ownership of the buffer passed to `init`.
671 pub fn takeBuffer(d: *Decompress) []u8 {
672 const buffer = d.reader.buffer;
673 d.reader.buffer = &.{};
674 return buffer;
675 }
676
677 pub fn deinit(d: *Decompress) void {
678 const gpa = d.gpa;
679 gpa.free(d.reader.buffer);
680 d.buffer.deinit(gpa);
681 d.decode.deinit(gpa);
682 d.* = undefined;
683 }
684
685 fn readVec(r: *Reader, data: [][]u8) Reader.Error!usize {
686 _ = data;
687 return readIndirect(r);
688 }
689
690 fn stream(r: *Reader, w: *Writer, limit: std.Io.Limit) Reader.StreamError!usize {
691 _ = w;
692 _ = limit;
693 return readIndirect(r);
694 }
695
696 fn discard(r: *Reader, limit: std.Io.Limit) Reader.Error!usize {
697 const d: *Decompress = @alignCast(@fieldParentPtr("reader", r));
698 _ = d;
699 _ = limit;
700 @panic("TODO");
701 }
702
703 fn readIndirect(r: *Reader) Reader.Error!usize {
704 const d: *Decompress = @alignCast(@fieldParentPtr("reader", r));
705 const gpa = d.gpa;
706 var allocating = Writer.Allocating.initOwnedSlice(gpa, r.buffer);
707 allocating.writer.end = r.end;
708 defer {
709 r.buffer = allocating.writer.buffer;
710 r.end = allocating.writer.end;
711 }
712 if (d.decode.state == math.maxInt(usize)) return error.EndOfStream;
713
714 process_next: {
715 if (d.unpacked_size) |unpacked_size| {
716 if (d.buffer.len >= unpacked_size) break :process_next;
717 } else if (d.range_decoder.isFinished()) {
718 break :process_next;
719 }
720 var n_read: u64 = 0;
721 switch (d.decode.process(d.input, &allocating, &d.buffer, &d.range_decoder, &n_read) catch |err| switch (err) {
722 error.WriteFailed => {
723 d.err = error.OutOfMemory;
724 return error.ReadFailed;
725 },
726 error.EndOfStream => {
727 d.err = error.EndOfStream;
728 return error.ReadFailed;
729 },
730 else => |e| {
731 d.err = e;
732 return error.ReadFailed;
733 },
734 }) {
735 .more => return 0,
736 .finished => break :process_next,
737 }
738 }
739
740 if (d.unpacked_size) |unpacked_size| {
741 if (d.buffer.len != unpacked_size) {
742 d.err = error.DecompressedSizeMismatch;
743 return error.ReadFailed;
744 }
745 }
746
747 d.buffer.finish(&allocating.writer) catch |err| switch (err) {
748 error.WriteFailed => {
749 d.err = error.OutOfMemory;
750 return error.ReadFailed;
751 },
752 };
753 d.decode.state = math.maxInt(usize);
754 return 0;
755 }
756};
757
758test {
759 _ = @import("lzma/test.zig");
760}