master
  1const std = @import("../std.zig");
  2const Allocator = std.mem.Allocator;
  3const assert = std.debug.assert;
  4const mem = std.mem;
  5
  6const FixedBufferAllocator = @This();
  7
  8end_index: usize,
  9buffer: []u8,
 10
 11pub fn init(buffer: []u8) FixedBufferAllocator {
 12    return .{
 13        .buffer = buffer,
 14        .end_index = 0,
 15    };
 16}
 17
 18/// Using this at the same time as the interface returned by `threadSafeAllocator` is not thread safe.
 19pub fn allocator(self: *FixedBufferAllocator) Allocator {
 20    return .{
 21        .ptr = self,
 22        .vtable = &.{
 23            .alloc = alloc,
 24            .resize = resize,
 25            .remap = remap,
 26            .free = free,
 27        },
 28    };
 29}
 30
 31/// Provides a lock free thread safe `Allocator` interface to the underlying `FixedBufferAllocator`
 32///
 33/// Using this at the same time as the interface returned by `allocator` is not thread safe.
 34pub fn threadSafeAllocator(self: *FixedBufferAllocator) Allocator {
 35    return .{
 36        .ptr = self,
 37        .vtable = &.{
 38            .alloc = threadSafeAlloc,
 39            .resize = Allocator.noResize,
 40            .remap = Allocator.noRemap,
 41            .free = Allocator.noFree,
 42        },
 43    };
 44}
 45
 46pub fn ownsPtr(self: *FixedBufferAllocator, ptr: [*]u8) bool {
 47    return sliceContainsPtr(self.buffer, ptr);
 48}
 49
 50pub fn ownsSlice(self: *FixedBufferAllocator, slice: []u8) bool {
 51    return sliceContainsSlice(self.buffer, slice);
 52}
 53
 54/// This has false negatives when the last allocation had an
 55/// adjusted_index. In such case we won't be able to determine what the
 56/// last allocation was because the alignForward operation done in alloc is
 57/// not reversible.
 58pub fn isLastAllocation(self: *FixedBufferAllocator, buf: []u8) bool {
 59    return buf.ptr + buf.len == self.buffer.ptr + self.end_index;
 60}
 61
 62pub fn alloc(ctx: *anyopaque, n: usize, alignment: mem.Alignment, ra: usize) ?[*]u8 {
 63    const self: *FixedBufferAllocator = @ptrCast(@alignCast(ctx));
 64    _ = ra;
 65    const ptr_align = alignment.toByteUnits();
 66    const adjust_off = mem.alignPointerOffset(self.buffer.ptr + self.end_index, ptr_align) orelse return null;
 67    const adjusted_index = self.end_index + adjust_off;
 68    const new_end_index = adjusted_index + n;
 69    if (new_end_index > self.buffer.len) return null;
 70    self.end_index = new_end_index;
 71    return self.buffer.ptr + adjusted_index;
 72}
 73
 74pub fn resize(
 75    ctx: *anyopaque,
 76    buf: []u8,
 77    alignment: mem.Alignment,
 78    new_size: usize,
 79    return_address: usize,
 80) bool {
 81    const self: *FixedBufferAllocator = @ptrCast(@alignCast(ctx));
 82    _ = alignment;
 83    _ = return_address;
 84    assert(@inComptime() or self.ownsSlice(buf));
 85
 86    if (!self.isLastAllocation(buf)) {
 87        if (new_size > buf.len) return false;
 88        return true;
 89    }
 90
 91    if (new_size <= buf.len) {
 92        const sub = buf.len - new_size;
 93        self.end_index -= sub;
 94        return true;
 95    }
 96
 97    const add = new_size - buf.len;
 98    if (add + self.end_index > self.buffer.len) return false;
 99
100    self.end_index += add;
101    return true;
102}
103
104pub fn remap(
105    context: *anyopaque,
106    memory: []u8,
107    alignment: mem.Alignment,
108    new_len: usize,
109    return_address: usize,
110) ?[*]u8 {
111    return if (resize(context, memory, alignment, new_len, return_address)) memory.ptr else null;
112}
113
114pub fn free(
115    ctx: *anyopaque,
116    buf: []u8,
117    alignment: mem.Alignment,
118    return_address: usize,
119) void {
120    const self: *FixedBufferAllocator = @ptrCast(@alignCast(ctx));
121    _ = alignment;
122    _ = return_address;
123    assert(@inComptime() or self.ownsSlice(buf));
124
125    if (self.isLastAllocation(buf)) {
126        self.end_index -= buf.len;
127    }
128}
129
130fn threadSafeAlloc(ctx: *anyopaque, n: usize, alignment: mem.Alignment, ra: usize) ?[*]u8 {
131    const self: *FixedBufferAllocator = @ptrCast(@alignCast(ctx));
132    _ = ra;
133    const ptr_align = alignment.toByteUnits();
134    var end_index = @atomicLoad(usize, &self.end_index, .seq_cst);
135    while (true) {
136        const adjust_off = mem.alignPointerOffset(self.buffer.ptr + end_index, ptr_align) orelse return null;
137        const adjusted_index = end_index + adjust_off;
138        const new_end_index = adjusted_index + n;
139        if (new_end_index > self.buffer.len) return null;
140        end_index = @cmpxchgWeak(usize, &self.end_index, end_index, new_end_index, .seq_cst, .seq_cst) orelse
141            return self.buffer[adjusted_index..new_end_index].ptr;
142    }
143}
144
145pub fn reset(self: *FixedBufferAllocator) void {
146    self.end_index = 0;
147}
148
149fn sliceContainsPtr(container: []u8, ptr: [*]u8) bool {
150    return @intFromPtr(ptr) >= @intFromPtr(container.ptr) and
151        @intFromPtr(ptr) < (@intFromPtr(container.ptr) + container.len);
152}
153
154fn sliceContainsSlice(container: []u8, slice: []u8) bool {
155    return @intFromPtr(slice.ptr) >= @intFromPtr(container.ptr) and
156        (@intFromPtr(slice.ptr) + slice.len) <= (@intFromPtr(container.ptr) + container.len);
157}
158
159var test_fixed_buffer_allocator_memory: [800000 * @sizeOf(u64)]u8 = undefined;
160
161test FixedBufferAllocator {
162    var fixed_buffer_allocator = mem.validationWrap(FixedBufferAllocator.init(test_fixed_buffer_allocator_memory[0..]));
163    const a = fixed_buffer_allocator.allocator();
164
165    try std.heap.testAllocator(a);
166    try std.heap.testAllocatorAligned(a);
167    try std.heap.testAllocatorLargeAlignment(a);
168    try std.heap.testAllocatorAlignedShrink(a);
169}
170
171test reset {
172    var buf: [8]u8 align(@alignOf(u64)) = undefined;
173    var fba = FixedBufferAllocator.init(buf[0..]);
174    const a = fba.allocator();
175
176    const X = 0xeeeeeeeeeeeeeeee;
177    const Y = 0xffffffffffffffff;
178
179    const x = try a.create(u64);
180    x.* = X;
181    try std.testing.expectError(error.OutOfMemory, a.create(u64));
182
183    fba.reset();
184    const y = try a.create(u64);
185    y.* = Y;
186
187    // we expect Y to have overwritten X.
188    try std.testing.expect(x.* == y.*);
189    try std.testing.expect(y.* == Y);
190}
191
192test "reuse memory on realloc" {
193    var small_fixed_buffer: [10]u8 = undefined;
194    // check if we re-use the memory
195    {
196        var fixed_buffer_allocator = FixedBufferAllocator.init(small_fixed_buffer[0..]);
197        const a = fixed_buffer_allocator.allocator();
198
199        const slice0 = try a.alloc(u8, 5);
200        try std.testing.expect(slice0.len == 5);
201        const slice1 = try a.realloc(slice0, 10);
202        try std.testing.expect(slice1.ptr == slice0.ptr);
203        try std.testing.expect(slice1.len == 10);
204        try std.testing.expectError(error.OutOfMemory, a.realloc(slice1, 11));
205    }
206    // check that we don't re-use the memory if it's not the most recent block
207    {
208        var fixed_buffer_allocator = FixedBufferAllocator.init(small_fixed_buffer[0..]);
209        const a = fixed_buffer_allocator.allocator();
210
211        var slice0 = try a.alloc(u8, 2);
212        slice0[0] = 1;
213        slice0[1] = 2;
214        const slice1 = try a.alloc(u8, 2);
215        const slice2 = try a.realloc(slice0, 4);
216        try std.testing.expect(slice0.ptr != slice2.ptr);
217        try std.testing.expect(slice1.ptr != slice2.ptr);
218        try std.testing.expect(slice2[0] == 1);
219        try std.testing.expect(slice2[1] == 2);
220    }
221}
222
223test "thread safe version" {
224    var fixed_buffer_allocator = FixedBufferAllocator.init(test_fixed_buffer_allocator_memory[0..]);
225
226    try std.heap.testAllocator(fixed_buffer_allocator.threadSafeAllocator());
227    try std.heap.testAllocatorAligned(fixed_buffer_allocator.threadSafeAllocator());
228    try std.heap.testAllocatorLargeAlignment(fixed_buffer_allocator.threadSafeAllocator());
229    try std.heap.testAllocatorAlignedShrink(fixed_buffer_allocator.threadSafeAllocator());
230}