master
1//! The standard memory allocation interface.
2
3const std = @import("../std.zig");
4const assert = std.debug.assert;
5const math = std.math;
6const mem = std.mem;
7const Allocator = @This();
8const builtin = @import("builtin");
9const Alignment = std.mem.Alignment;
10
11pub const Error = error{OutOfMemory};
12pub const Log2Align = math.Log2Int(usize);
13
14/// The type erased pointer to the allocator implementation.
15///
16/// Any comparison of this field may result in illegal behavior, since it may
17/// be set to `undefined` in cases where the allocator implementation does not
18/// have any associated state.
19ptr: *anyopaque,
20vtable: *const VTable,
21
22pub const VTable = struct {
23 /// Return a pointer to `len` bytes with specified `alignment`, or return
24 /// `null` indicating the allocation failed.
25 ///
26 /// `ret_addr` is optionally provided as the first return address of the
27 /// allocation call stack. If the value is `0` it means no return address
28 /// has been provided.
29 alloc: *const fn (*anyopaque, len: usize, alignment: Alignment, ret_addr: usize) ?[*]u8,
30
31 /// Attempt to expand or shrink memory in place.
32 ///
33 /// `memory.len` must equal the length requested from the most recent
34 /// successful call to `alloc`, `resize`, or `remap`. `alignment` must
35 /// equal the same value that was passed as the `alignment` parameter to
36 /// the original `alloc` call.
37 ///
38 /// A result of `true` indicates the resize was successful and the
39 /// allocation now has the same address but a size of `new_len`. `false`
40 /// indicates the resize could not be completed without moving the
41 /// allocation to a different address.
42 ///
43 /// `new_len` must be greater than zero.
44 ///
45 /// `ret_addr` is optionally provided as the first return address of the
46 /// allocation call stack. If the value is `0` it means no return address
47 /// has been provided.
48 resize: *const fn (*anyopaque, memory: []u8, alignment: Alignment, new_len: usize, ret_addr: usize) bool,
49
50 /// Attempt to expand or shrink memory, allowing relocation.
51 ///
52 /// `memory.len` must equal the length requested from the most recent
53 /// successful call to `alloc`, `resize`, or `remap`. `alignment` must
54 /// equal the same value that was passed as the `alignment` parameter to
55 /// the original `alloc` call.
56 ///
57 /// A non-`null` return value indicates the resize was successful. The
58 /// allocation may have same address, or may have been relocated. In either
59 /// case, the allocation now has size of `new_len`. A `null` return value
60 /// indicates that the resize would be equivalent to allocating new memory,
61 /// copying the bytes from the old memory, and then freeing the old memory.
62 /// In such case, it is more efficient for the caller to perform the copy.
63 ///
64 /// `new_len` must be greater than zero.
65 ///
66 /// `ret_addr` is optionally provided as the first return address of the
67 /// allocation call stack. If the value is `0` it means no return address
68 /// has been provided.
69 remap: *const fn (*anyopaque, memory: []u8, alignment: Alignment, new_len: usize, ret_addr: usize) ?[*]u8,
70
71 /// Free and invalidate a region of memory.
72 ///
73 /// `memory.len` must equal the length requested from the most recent
74 /// successful call to `alloc`, `resize`, or `remap`. `alignment` must
75 /// equal the same value that was passed as the `alignment` parameter to
76 /// the original `alloc` call.
77 ///
78 /// `ret_addr` is optionally provided as the first return address of the
79 /// allocation call stack. If the value is `0` it means no return address
80 /// has been provided.
81 free: *const fn (*anyopaque, memory: []u8, alignment: Alignment, ret_addr: usize) void,
82};
83
84pub fn noAlloc(
85 self: *anyopaque,
86 len: usize,
87 alignment: Alignment,
88 ret_addr: usize,
89) ?[*]u8 {
90 _ = self;
91 _ = len;
92 _ = alignment;
93 _ = ret_addr;
94 return null;
95}
96
97pub fn noResize(
98 self: *anyopaque,
99 memory: []u8,
100 alignment: Alignment,
101 new_len: usize,
102 ret_addr: usize,
103) bool {
104 _ = self;
105 _ = memory;
106 _ = alignment;
107 _ = new_len;
108 _ = ret_addr;
109 return false;
110}
111
112pub fn noRemap(
113 self: *anyopaque,
114 memory: []u8,
115 alignment: Alignment,
116 new_len: usize,
117 ret_addr: usize,
118) ?[*]u8 {
119 _ = self;
120 _ = memory;
121 _ = alignment;
122 _ = new_len;
123 _ = ret_addr;
124 return null;
125}
126
127pub fn noFree(
128 self: *anyopaque,
129 memory: []u8,
130 alignment: Alignment,
131 ret_addr: usize,
132) void {
133 _ = self;
134 _ = memory;
135 _ = alignment;
136 _ = ret_addr;
137}
138
139/// This function is not intended to be called except from within the
140/// implementation of an `Allocator`.
141pub inline fn rawAlloc(a: Allocator, len: usize, alignment: Alignment, ret_addr: usize) ?[*]u8 {
142 return a.vtable.alloc(a.ptr, len, alignment, ret_addr);
143}
144
145/// This function is not intended to be called except from within the
146/// implementation of an `Allocator`.
147pub inline fn rawResize(a: Allocator, memory: []u8, alignment: Alignment, new_len: usize, ret_addr: usize) bool {
148 return a.vtable.resize(a.ptr, memory, alignment, new_len, ret_addr);
149}
150
151/// This function is not intended to be called except from within the
152/// implementation of an `Allocator`.
153pub inline fn rawRemap(a: Allocator, memory: []u8, alignment: Alignment, new_len: usize, ret_addr: usize) ?[*]u8 {
154 return a.vtable.remap(a.ptr, memory, alignment, new_len, ret_addr);
155}
156
157/// This function is not intended to be called except from within the
158/// implementation of an `Allocator`.
159pub inline fn rawFree(a: Allocator, memory: []u8, alignment: Alignment, ret_addr: usize) void {
160 return a.vtable.free(a.ptr, memory, alignment, ret_addr);
161}
162
163/// Returns a pointer to undefined memory.
164/// Call `destroy` with the result to free the memory.
165pub fn create(a: Allocator, comptime T: type) Error!*T {
166 if (@sizeOf(T) == 0) {
167 const ptr = comptime std.mem.alignBackward(usize, math.maxInt(usize), @alignOf(T));
168 return @ptrFromInt(ptr);
169 }
170 const ptr: *T = @ptrCast(try a.allocBytesWithAlignment(.of(T), @sizeOf(T), @returnAddress()));
171 return ptr;
172}
173
174/// `ptr` should be the return value of `create`, or otherwise
175/// have the same address and alignment property.
176pub fn destroy(self: Allocator, ptr: anytype) void {
177 const info = @typeInfo(@TypeOf(ptr)).pointer;
178 if (info.size != .one) @compileError("ptr must be a single item pointer");
179 const T = info.child;
180 if (@sizeOf(T) == 0) return;
181 const non_const_ptr = @as([*]u8, @ptrCast(@constCast(ptr)));
182 self.rawFree(non_const_ptr[0..@sizeOf(T)], .fromByteUnits(info.alignment), @returnAddress());
183}
184
185/// Allocates an array of `n` items of type `T` and sets all the
186/// items to `undefined`. Depending on the Allocator
187/// implementation, it may be required to call `free` once the
188/// memory is no longer needed, to avoid a resource leak. If the
189/// `Allocator` implementation is unknown, then correct code will
190/// call `free` when done.
191///
192/// For allocating a single item, see `create`.
193pub fn alloc(self: Allocator, comptime T: type, n: usize) Error![]T {
194 return self.allocAdvancedWithRetAddr(T, null, n, @returnAddress());
195}
196
197pub fn allocWithOptions(
198 self: Allocator,
199 comptime Elem: type,
200 n: usize,
201 /// null means naturally aligned
202 comptime optional_alignment: ?Alignment,
203 comptime optional_sentinel: ?Elem,
204) Error!AllocWithOptionsPayload(Elem, optional_alignment, optional_sentinel) {
205 return self.allocWithOptionsRetAddr(Elem, n, optional_alignment, optional_sentinel, @returnAddress());
206}
207
208pub fn allocWithOptionsRetAddr(
209 self: Allocator,
210 comptime Elem: type,
211 n: usize,
212 /// null means naturally aligned
213 comptime optional_alignment: ?Alignment,
214 comptime optional_sentinel: ?Elem,
215 return_address: usize,
216) Error!AllocWithOptionsPayload(Elem, optional_alignment, optional_sentinel) {
217 if (optional_sentinel) |sentinel| {
218 const ptr = try self.allocAdvancedWithRetAddr(Elem, optional_alignment, n + 1, return_address);
219 ptr[n] = sentinel;
220 return ptr[0..n :sentinel];
221 } else {
222 return self.allocAdvancedWithRetAddr(Elem, optional_alignment, n, return_address);
223 }
224}
225
226fn AllocWithOptionsPayload(comptime Elem: type, comptime alignment: ?Alignment, comptime sentinel: ?Elem) type {
227 if (sentinel) |s| {
228 return [:s]align(if (alignment) |a| a.toByteUnits() else @alignOf(Elem)) Elem;
229 } else {
230 return []align(if (alignment) |a| a.toByteUnits() else @alignOf(Elem)) Elem;
231 }
232}
233
234/// Allocates an array of `n + 1` items of type `T` and sets the first `n`
235/// items to `undefined` and the last item to `sentinel`. Depending on the
236/// Allocator implementation, it may be required to call `free` once the
237/// memory is no longer needed, to avoid a resource leak. If the
238/// `Allocator` implementation is unknown, then correct code will
239/// call `free` when done.
240///
241/// For allocating a single item, see `create`.
242pub fn allocSentinel(
243 self: Allocator,
244 comptime Elem: type,
245 n: usize,
246 comptime sentinel: Elem,
247) Error![:sentinel]Elem {
248 return self.allocWithOptionsRetAddr(Elem, n, null, sentinel, @returnAddress());
249}
250
251pub fn alignedAlloc(
252 self: Allocator,
253 comptime T: type,
254 /// null means naturally aligned
255 comptime alignment: ?Alignment,
256 n: usize,
257) Error![]align(if (alignment) |a| a.toByteUnits() else @alignOf(T)) T {
258 return self.allocAdvancedWithRetAddr(T, alignment, n, @returnAddress());
259}
260
261pub inline fn allocAdvancedWithRetAddr(
262 self: Allocator,
263 comptime T: type,
264 /// null means naturally aligned
265 comptime alignment: ?Alignment,
266 n: usize,
267 return_address: usize,
268) Error![]align(if (alignment) |a| a.toByteUnits() else @alignOf(T)) T {
269 const a = comptime (alignment orelse Alignment.of(T));
270 const ptr: [*]align(a.toByteUnits()) T = @ptrCast(try self.allocWithSizeAndAlignment(@sizeOf(T), a, n, return_address));
271 return ptr[0..n];
272}
273
274fn allocWithSizeAndAlignment(
275 self: Allocator,
276 comptime size: usize,
277 comptime alignment: Alignment,
278 n: usize,
279 return_address: usize,
280) Error![*]align(alignment.toByteUnits()) u8 {
281 const byte_count = math.mul(usize, size, n) catch return Error.OutOfMemory;
282 return self.allocBytesWithAlignment(alignment, byte_count, return_address);
283}
284
285fn allocBytesWithAlignment(
286 self: Allocator,
287 comptime alignment: Alignment,
288 byte_count: usize,
289 return_address: usize,
290) Error![*]align(alignment.toByteUnits()) u8 {
291 if (byte_count == 0) {
292 const ptr = comptime alignment.backward(math.maxInt(usize));
293 return @as([*]align(alignment.toByteUnits()) u8, @ptrFromInt(ptr));
294 }
295
296 const byte_ptr = self.rawAlloc(byte_count, alignment, return_address) orelse return Error.OutOfMemory;
297 @memset(byte_ptr[0..byte_count], undefined);
298 return @alignCast(byte_ptr);
299}
300
301/// Request to modify the size of an allocation.
302///
303/// It is guaranteed to not move the pointer, however the allocator
304/// implementation may refuse the resize request by returning `false`.
305///
306/// `allocation` may be an empty slice, in which case `false` is returned,
307/// unless `new_len` is also 0, in which case `true` is returned.
308///
309/// `new_len` may be zero, in which case the allocation is freed.
310pub fn resize(self: Allocator, allocation: anytype, new_len: usize) bool {
311 const Slice = @typeInfo(@TypeOf(allocation)).pointer;
312 const T = Slice.child;
313 const alignment = Slice.alignment;
314 if (new_len == 0) {
315 self.free(allocation);
316 return true;
317 }
318 if (allocation.len == 0) {
319 return false;
320 }
321 const old_memory = mem.sliceAsBytes(allocation);
322 // I would like to use saturating multiplication here, but LLVM cannot lower it
323 // on WebAssembly: https://github.com/ziglang/zig/issues/9660
324 //const new_len_bytes = new_len *| @sizeOf(T);
325 const new_len_bytes = math.mul(usize, @sizeOf(T), new_len) catch return false;
326 return self.rawResize(old_memory, .fromByteUnits(alignment), new_len_bytes, @returnAddress());
327}
328
329/// Request to modify the size of an allocation, allowing relocation.
330///
331/// A non-`null` return value indicates the resize was successful. The
332/// allocation may have same address, or may have been relocated. In either
333/// case, the allocation now has size of `new_len`. A `null` return value
334/// indicates that the resize would be equivalent to allocating new memory,
335/// copying the bytes from the old memory, and then freeing the old memory.
336/// In such case, it is more efficient for the caller to perform those
337/// operations.
338///
339/// `allocation` may be an empty slice, in which case `null` is returned,
340/// unless `new_len` is also 0, in which case `allocation` is returned.
341///
342/// `new_len` may be zero, in which case the allocation is freed.
343///
344/// If the allocation's elements' type is zero bytes sized, `allocation.len` is set to `new_len`.
345pub fn remap(self: Allocator, allocation: anytype, new_len: usize) t: {
346 const Slice = @typeInfo(@TypeOf(allocation)).pointer;
347 break :t ?[]align(Slice.alignment) Slice.child;
348} {
349 const Slice = @typeInfo(@TypeOf(allocation)).pointer;
350 const T = Slice.child;
351
352 const alignment = Slice.alignment;
353 if (new_len == 0) {
354 self.free(allocation);
355 return allocation[0..0];
356 }
357 if (allocation.len == 0) {
358 return null;
359 }
360 if (@sizeOf(T) == 0) {
361 var new_memory = allocation;
362 new_memory.len = new_len;
363 return new_memory;
364 }
365 const old_memory = mem.sliceAsBytes(allocation);
366 // I would like to use saturating multiplication here, but LLVM cannot lower it
367 // on WebAssembly: https://github.com/ziglang/zig/issues/9660
368 //const new_len_bytes = new_len *| @sizeOf(T);
369 const new_len_bytes = math.mul(usize, @sizeOf(T), new_len) catch return null;
370 const new_ptr = self.rawRemap(old_memory, .fromByteUnits(alignment), new_len_bytes, @returnAddress()) orelse return null;
371 const new_memory: []align(alignment) u8 = @alignCast(new_ptr[0..new_len_bytes]);
372 return mem.bytesAsSlice(T, new_memory);
373}
374
375/// This function requests a new size for an existing allocation, which
376/// can be larger, smaller, or the same size as the old memory allocation.
377/// The result is an array of `new_n` items of the same type as the existing
378/// allocation.
379///
380/// If `new_n` is 0, this is the same as `free` and it always succeeds.
381///
382/// `old_mem` may have length zero, which makes a new allocation.
383///
384/// This function only fails on out-of-memory conditions, unlike:
385/// * `remap` which returns `null` when the `Allocator` implementation cannot
386/// do the realloc more efficiently than the caller
387/// * `resize` which returns `false` when the `Allocator` implementation cannot
388/// change the size without relocating the allocation.
389pub fn realloc(self: Allocator, old_mem: anytype, new_n: usize) t: {
390 const Slice = @typeInfo(@TypeOf(old_mem)).pointer;
391 break :t Error![]align(Slice.alignment) Slice.child;
392} {
393 return self.reallocAdvanced(old_mem, new_n, @returnAddress());
394}
395
396pub fn reallocAdvanced(
397 self: Allocator,
398 old_mem: anytype,
399 new_n: usize,
400 return_address: usize,
401) t: {
402 const Slice = @typeInfo(@TypeOf(old_mem)).pointer;
403 break :t Error![]align(Slice.alignment) Slice.child;
404} {
405 const Slice = @typeInfo(@TypeOf(old_mem)).pointer;
406 const T = Slice.child;
407 if (old_mem.len == 0) {
408 return self.allocAdvancedWithRetAddr(T, .fromByteUnits(Slice.alignment), new_n, return_address);
409 }
410 if (new_n == 0) {
411 self.free(old_mem);
412 const ptr = comptime std.mem.alignBackward(usize, math.maxInt(usize), Slice.alignment);
413 return @as([*]align(Slice.alignment) T, @ptrFromInt(ptr))[0..0];
414 }
415
416 const old_byte_slice = mem.sliceAsBytes(old_mem);
417 const byte_count = math.mul(usize, @sizeOf(T), new_n) catch return Error.OutOfMemory;
418 // Note: can't set shrunk memory to undefined as memory shouldn't be modified on realloc failure
419 if (self.rawRemap(old_byte_slice, .fromByteUnits(Slice.alignment), byte_count, return_address)) |p| {
420 const new_bytes: []align(Slice.alignment) u8 = @alignCast(p[0..byte_count]);
421 return mem.bytesAsSlice(T, new_bytes);
422 }
423
424 const new_mem = self.rawAlloc(byte_count, .fromByteUnits(Slice.alignment), return_address) orelse
425 return error.OutOfMemory;
426 const copy_len = @min(byte_count, old_byte_slice.len);
427 @memcpy(new_mem[0..copy_len], old_byte_slice[0..copy_len]);
428 @memset(old_byte_slice, undefined);
429 self.rawFree(old_byte_slice, .fromByteUnits(Slice.alignment), return_address);
430
431 const new_bytes: []align(Slice.alignment) u8 = @alignCast(new_mem[0..byte_count]);
432 return mem.bytesAsSlice(T, new_bytes);
433}
434
435/// Free an array allocated with `alloc`.
436/// If memory has length 0, free is a no-op.
437/// To free a single item, see `destroy`.
438pub fn free(self: Allocator, memory: anytype) void {
439 const Slice = @typeInfo(@TypeOf(memory)).pointer;
440 const bytes = mem.sliceAsBytes(memory);
441 const bytes_len = bytes.len + if (Slice.sentinel() != null) @sizeOf(Slice.child) else 0;
442 if (bytes_len == 0) return;
443 const non_const_ptr = @constCast(bytes.ptr);
444 @memset(non_const_ptr[0..bytes_len], undefined);
445 self.rawFree(non_const_ptr[0..bytes_len], .fromByteUnits(Slice.alignment), @returnAddress());
446}
447
448/// Copies `m` to newly allocated memory. Caller owns the memory.
449pub fn dupe(allocator: Allocator, comptime T: type, m: []const T) Error![]T {
450 const new_buf = try allocator.alloc(T, m.len);
451 @memcpy(new_buf, m);
452 return new_buf;
453}
454
455/// Copies `m` to newly allocated memory, with a null-terminated element. Caller owns the memory.
456pub fn dupeZ(allocator: Allocator, comptime T: type, m: []const T) Error![:0]T {
457 const new_buf = try allocator.alloc(T, m.len + 1);
458 @memcpy(new_buf[0..m.len], m);
459 new_buf[m.len] = 0;
460 return new_buf[0..m.len :0];
461}
462
463/// An allocator that always fails to allocate.
464pub const failing: Allocator = .{
465 .ptr = undefined,
466 .vtable = &.{
467 .alloc = noAlloc,
468 .resize = unreachableResize,
469 .remap = unreachableRemap,
470 .free = unreachableFree,
471 },
472};
473
474fn unreachableResize(
475 self: *anyopaque,
476 memory: []u8,
477 alignment: Alignment,
478 new_len: usize,
479 ret_addr: usize,
480) bool {
481 _ = self;
482 _ = memory;
483 _ = alignment;
484 _ = new_len;
485 _ = ret_addr;
486 unreachable;
487}
488
489fn unreachableRemap(
490 self: *anyopaque,
491 memory: []u8,
492 alignment: Alignment,
493 new_len: usize,
494 ret_addr: usize,
495) ?[*]u8 {
496 _ = self;
497 _ = memory;
498 _ = alignment;
499 _ = new_len;
500 _ = ret_addr;
501 unreachable;
502}
503
504fn unreachableFree(
505 self: *anyopaque,
506 memory: []u8,
507 alignment: Alignment,
508 ret_addr: usize,
509) void {
510 _ = self;
511 _ = memory;
512 _ = alignment;
513 _ = ret_addr;
514 unreachable;
515}
516
517test failing {
518 const f: Allocator = .failing;
519 try std.testing.expectError(error.OutOfMemory, f.alloc(u8, 123));
520}