Commit fd0fb26aba
Changed files (1)
lib
std
lib/std/heap/arena_allocator.zig
@@ -41,6 +41,8 @@ pub const ArenaAllocator = struct {
}
pub fn deinit(self: ArenaAllocator) void {
+ // NOTE: When changing this, make sure `reset()` is adjusted accordingly!
+
var it = self.state.buffer_list.first;
while (it) |node| {
// this has to occur before the free because the free frees node
@@ -50,6 +52,113 @@ pub const ArenaAllocator = struct {
}
}
+ pub const ResetMode = union(enum) {
+ /// Releases all allocated memory in the arena.
+ free_all,
+ /// This will pre-heat the arena for future allocations by allocating a
+ /// large enough buffer for all previously done allocations.
+ /// Preheating will speed up the allocation process by invoking the backing allocator
+ /// less often than before. If `reset()` is used in a loop, this means that after the
+ /// biggest operation, no memory allocations are performed anymore.
+ retain_capacity,
+ /// This is the same as `retain_capacity`, but the memory will be shrunk to
+ /// this value if it exceeds the limit.
+ retain_with_limit: usize,
+ };
+ /// Queries the current memory use of this arena.
+ /// This will **not** include the storage required for internal keeping.
+ pub fn queryCapacity(self: ArenaAllocator) usize {
+ var size: usize = 0;
+ var it = self.state.buffer_list.first;
+ while (it) |node| : (it = node.next) {
+ // Compute the actually allocated size excluding the
+ // linked list node.
+ size += node.data.len - @sizeOf(BufNode);
+ }
+ return size;
+ }
+ /// Resets the arena allocator and frees all allocated memory.
+ ///
+ /// `mode` defines how the currently allocated memory is handled.
+ /// See the variant documentation for `ResetMode` for the effects of each mode.
+ ///
+ /// The function will return whether the reset operation was successful or not.
+ /// If the reallocation failed `false` is returned. The arena will still be fully
+ /// functional in that case, all memory is released. Future allocations just might
+ /// be slower.
+ ///
+ /// NOTE: If `mode` is `free_mode`, the function will always return `true`.
+ pub fn reset(self: *ArenaAllocator, mode: ResetMode) bool {
+ // Some words on the implementation:
+ // The reset function can be implemented with two basic approaches:
+ // - Counting how much bytes were allocated since the last reset, and storing that
+ // information in State. This will make reset fast and alloc only a teeny tiny bit
+ // slower.
+ // - Counting how much bytes were allocated by iterating the chunk linked list. This
+ // will make reset slower, but alloc() keeps the same speed when reset() as if reset()
+ // would not exist.
+ //
+ // The second variant was chosen for implementation, as with more and more calls to reset(),
+ // the function will get faster and faster. At one point, the complexity of the function
+ // will drop to amortized O(1), as we're only ever having a single chunk that will not be
+ // reallocated, and we're not even touching the backing allocator anymore.
+ //
+ // Thus, only the first hand full of calls to reset() will actually need to iterate the linked
+ // list, all future calls are just taking the first node, and only resetting the `end_index`
+ // value.
+ const current_capacity = if (mode != .free_all)
+ @sizeOf(BufNode) + self.queryCapacity() // we need at least space for exactly one node + the current capacity
+ else
+ 0;
+ if (mode == .free_all or current_capacity == 0) {
+ // just reset when we don't have anything to reallocate
+ self.deinit();
+ self.state = State{};
+ return true;
+ }
+ const total_size = switch (mode) {
+ .retain_capacity => current_capacity,
+ .retain_with_limit => |limit| std.math.min(limit, current_capacity),
+ .free_all => unreachable,
+ };
+ // Free all nodes except for the last one
+ var it = self.state.buffer_list.first;
+ const maybe_first_node = while (it) |node| {
+ // this has to occur before the free because the free frees node
+ const next_it = node.next;
+ if (next_it == null)
+ break node;
+ self.child_allocator.free(node.data);
+ it = next_it;
+ } else null;
+ std.debug.assert(maybe_first_node == null or maybe_first_node.?.next == null);
+ // reset the state before we try resizing the buffers, so we definitly have reset the arena to 0.
+ self.state.end_index = 0;
+ if (maybe_first_node) |first_node| {
+ // perfect, no need to invoke the child_allocator
+ if (first_node.data.len == total_size)
+ return true;
+ const align_bits = std.math.log2_int(usize, @alignOf(BufNode));
+ if (self.child_allocator.rawResize(first_node.data, align_bits, total_size, @returnAddress())) {
+ // successful resize
+ first_node.data.len = total_size;
+ } else {
+ // manual realloc
+ const new_ptr = self.child_allocator.rawAlloc(total_size, align_bits, @returnAddress()) orelse {
+ // we failed to preheat the arena properly, signal this to the user.
+ return false;
+ };
+ self.child_allocator.rawFree(first_node.data, align_bits, @returnAddress());
+ const node = @ptrCast(*BufNode, @alignCast(@alignOf(BufNode), new_ptr));
+ node.* = BufNode{
+ .data = new_ptr[0..total_size],
+ };
+ self.state.buffer_list.first = node;
+ }
+ }
+ return true;
+ }
+
fn createNode(self: *ArenaAllocator, prev_len: usize, minimum_size: usize) ?*BufNode {
const actual_min_size = minimum_size + (@sizeOf(BufNode) + 16);
const big_enough_len = prev_len + actual_min_size;
@@ -137,3 +246,26 @@ pub const ArenaAllocator = struct {
}
}
};
+
+test "ArenaAllocator (reset with preheating)" {
+ var arena_allocator = ArenaAllocator.init(std.testing.allocator);
+ defer arena_allocator.deinit();
+ // provides some variance in the allocated data
+ var rng_src = std.rand.DefaultPrng.init(19930913);
+ const random = rng_src.random();
+ var rounds: usize = 25;
+ while (rounds > 0) {
+ rounds -= 1;
+ _ = arena_allocator.reset(.retain_capacity);
+ var alloced_bytes: usize = 0;
+ var total_size: usize = random.intRangeAtMost(usize, 256, 16384);
+ while (alloced_bytes < total_size) {
+ const size = random.intRangeAtMost(usize, 16, 256);
+ const alignment = 32;
+ const slice = try arena_allocator.allocator().alignedAlloc(u8, alignment, size);
+ try std.testing.expect(std.mem.isAligned(@ptrToInt(slice.ptr), alignment));
+ try std.testing.expectEqual(size, slice.len);
+ alloced_bytes += slice.len;
+ }
+ }
+}