master
  1//! Allocator that fails after N allocations, useful for making sure out of
  2//! memory conditions are handled correctly.
  3const std = @import("../std.zig");
  4const mem = std.mem;
  5const FailingAllocator = @This();
  6
  7alloc_index: usize,
  8resize_index: usize,
  9internal_allocator: mem.Allocator,
 10allocated_bytes: usize,
 11freed_bytes: usize,
 12allocations: usize,
 13deallocations: usize,
 14stack_addresses: [num_stack_frames]usize,
 15has_induced_failure: bool,
 16fail_index: usize,
 17resize_fail_index: usize,
 18
 19const num_stack_frames = if (std.debug.sys_can_stack_trace) 16 else 0;
 20
 21pub const Config = struct {
 22    /// The number of successful allocations you can expect from this allocator.
 23    /// The next allocation will fail.
 24    fail_index: usize = std.math.maxInt(usize),
 25
 26    /// Number of successful resizes to expect from this allocator. The next resize will fail.
 27    resize_fail_index: usize = std.math.maxInt(usize),
 28};
 29
 30pub fn init(internal_allocator: mem.Allocator, config: Config) FailingAllocator {
 31    return FailingAllocator{
 32        .internal_allocator = internal_allocator,
 33        .alloc_index = 0,
 34        .resize_index = 0,
 35        .allocated_bytes = 0,
 36        .freed_bytes = 0,
 37        .allocations = 0,
 38        .deallocations = 0,
 39        .stack_addresses = undefined,
 40        .has_induced_failure = false,
 41        .fail_index = config.fail_index,
 42        .resize_fail_index = config.resize_fail_index,
 43    };
 44}
 45
 46pub fn allocator(self: *FailingAllocator) mem.Allocator {
 47    return .{
 48        .ptr = self,
 49        .vtable = &.{
 50            .alloc = alloc,
 51            .resize = resize,
 52            .remap = remap,
 53            .free = free,
 54        },
 55    };
 56}
 57
 58fn alloc(
 59    ctx: *anyopaque,
 60    len: usize,
 61    alignment: mem.Alignment,
 62    return_address: usize,
 63) ?[*]u8 {
 64    const self: *FailingAllocator = @ptrCast(@alignCast(ctx));
 65    if (self.alloc_index == self.fail_index) {
 66        if (!self.has_induced_failure) {
 67            const st = std.debug.captureCurrentStackTrace(.{ .first_address = return_address }, &self.stack_addresses);
 68            @memset(self.stack_addresses[@min(st.index, self.stack_addresses.len)..], 0);
 69            self.has_induced_failure = true;
 70        }
 71        return null;
 72    }
 73    const result = self.internal_allocator.rawAlloc(len, alignment, return_address) orelse
 74        return null;
 75    self.allocated_bytes += len;
 76    self.allocations += 1;
 77    self.alloc_index += 1;
 78    return result;
 79}
 80
 81fn resize(
 82    ctx: *anyopaque,
 83    memory: []u8,
 84    alignment: mem.Alignment,
 85    new_len: usize,
 86    ra: usize,
 87) bool {
 88    const self: *FailingAllocator = @ptrCast(@alignCast(ctx));
 89    if (self.resize_index == self.resize_fail_index)
 90        return false;
 91    if (!self.internal_allocator.rawResize(memory, alignment, new_len, ra))
 92        return false;
 93    if (new_len < memory.len) {
 94        self.freed_bytes += memory.len - new_len;
 95    } else {
 96        self.allocated_bytes += new_len - memory.len;
 97    }
 98    self.resize_index += 1;
 99    return true;
100}
101
102fn remap(
103    ctx: *anyopaque,
104    memory: []u8,
105    alignment: mem.Alignment,
106    new_len: usize,
107    ra: usize,
108) ?[*]u8 {
109    const self: *FailingAllocator = @ptrCast(@alignCast(ctx));
110    if (self.resize_index == self.resize_fail_index) return null;
111    const new_ptr = self.internal_allocator.rawRemap(memory, alignment, new_len, ra) orelse return null;
112    if (new_len < memory.len) {
113        self.freed_bytes += memory.len - new_len;
114    } else {
115        self.allocated_bytes += new_len - memory.len;
116    }
117    self.resize_index += 1;
118    return new_ptr;
119}
120
121fn free(
122    ctx: *anyopaque,
123    old_mem: []u8,
124    alignment: mem.Alignment,
125    ra: usize,
126) void {
127    const self: *FailingAllocator = @ptrCast(@alignCast(ctx));
128    self.internal_allocator.rawFree(old_mem, alignment, ra);
129    self.deallocations += 1;
130    self.freed_bytes += old_mem.len;
131}
132
133/// Only valid once `has_induced_failure == true`
134pub fn getStackTrace(self: *FailingAllocator) std.builtin.StackTrace {
135    std.debug.assert(self.has_induced_failure);
136    var len: usize = 0;
137    while (len < self.stack_addresses.len and self.stack_addresses[len] != 0) {
138        len += 1;
139    }
140    return .{
141        .instruction_addresses = &self.stack_addresses,
142        .index = len,
143    };
144}
145
146test FailingAllocator {
147    // Fail on allocation
148    {
149        var failing_allocator_state = FailingAllocator.init(std.testing.allocator, .{
150            .fail_index = 2,
151        });
152        const failing_alloc = failing_allocator_state.allocator();
153
154        const a = try failing_alloc.create(i32);
155        defer failing_alloc.destroy(a);
156        const b = try failing_alloc.create(i32);
157        defer failing_alloc.destroy(b);
158        try std.testing.expectError(error.OutOfMemory, failing_alloc.create(i32));
159    }
160    // Fail on resize
161    {
162        var failing_allocator_state = FailingAllocator.init(std.testing.allocator, .{
163            .resize_fail_index = 1,
164        });
165        const failing_alloc = failing_allocator_state.allocator();
166
167        const resized_slice = blk: {
168            const slice = try failing_alloc.alloc(u8, 8);
169            errdefer failing_alloc.free(slice);
170
171            break :blk failing_alloc.remap(slice, 6) orelse return error.UnexpectedRemapFailure;
172        };
173        defer failing_alloc.free(resized_slice);
174
175        // Remap and resize should fail from here on out
176        try std.testing.expectEqual(null, failing_alloc.remap(resized_slice, 4));
177        try std.testing.expectEqual(false, failing_alloc.resize(resized_slice, 4));
178
179        // Note: realloc could succeed because it falls back to free+alloc
180    }
181}