Commit 1093b09a98
Changed files (77)
ci
lib
std
build
crypto
event
fs
hash
heap
json
os
testing
src
test
ci/srht/update-download-page.zig
@@ -6,7 +6,7 @@ pub fn main() !void {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
const out_dir = "out";
try std.fs.cwd().makePath(out_dir);
doc/docgen.zig
@@ -21,7 +21,7 @@ pub fn main() !void {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
var args_it = process.args();
doc/langref.html.in
@@ -10061,7 +10061,7 @@ const expect = std.testing.expect;
test "using an allocator" {
var buffer: [100]u8 = undefined;
- const allocator = std.heap.FixedBufferAllocator.init(&buffer).getAllocator();
+ const allocator = std.heap.FixedBufferAllocator.init(&buffer).allocator();
const result = try concat(allocator, "foo", "bar");
try expect(std.mem.eql(u8, "foobar", result));
}
@@ -10114,7 +10114,7 @@ pub fn main() !void {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
const ptr = try allocator.create(i32);
std.debug.print("ptr={*}\n", .{ptr});
@@ -10820,7 +10820,7 @@ const std = @import("std");
pub fn main() !void {
var general_purpose_allocator = std.heap.GeneralPurposeAllocator(.{}){};
- const gpa = general_purpose_allocator.getAllocator();
+ const gpa = general_purpose_allocator.allocator();
const args = try std.process.argsAlloc(gpa);
defer std.process.argsFree(gpa, args);
@@ -10842,7 +10842,7 @@ const PreopenList = std.fs.wasi.PreopenList;
pub fn main() !void {
var general_purpose_allocator = std.heap.GeneralPurposeAllocator(.{}){};
- const gpa = general_purpose_allocator.getAllocator();
+ const gpa = general_purpose_allocator.allocator();
var preopens = PreopenList.init(gpa);
defer preopens.deinit();
lib/std/atomic/queue.zig
@@ -177,7 +177,7 @@ test "std.atomic.Queue" {
defer std.heap.page_allocator.free(plenty_of_memory);
var fixed_buffer_allocator = std.heap.FixedBufferAllocator.init(plenty_of_memory);
- var a = fixed_buffer_allocator.getThreadSafeAllocator();
+ var a = fixed_buffer_allocator.threadSafeAllocator();
var queue = Queue(i32).init();
var context = Context{
lib/std/atomic/stack.zig
@@ -89,7 +89,7 @@ test "std.atomic.stack" {
defer std.heap.page_allocator.free(plenty_of_memory);
var fixed_buffer_allocator = std.heap.FixedBufferAllocator.init(plenty_of_memory);
- var a = fixed_buffer_allocator.getThreadSafeAllocator();
+ var a = fixed_buffer_allocator.threadSafeAllocator();
var stack = Stack(i32).init();
var context = Context{
lib/std/build/OptionsStep.zig
@@ -274,7 +274,7 @@ test "OptionsStep" {
var arena = std.heap.ArenaAllocator.init(std.testing.allocator);
defer arena.deinit();
var builder = try Builder.create(
- arena.getAllocator(),
+ arena.allocator(),
"test",
"test",
"test",
lib/std/crypto/benchmark.zig
@@ -363,7 +363,7 @@ pub fn main() !void {
var buffer: [1024]u8 = undefined;
var fixed = std.heap.FixedBufferAllocator.init(buffer[0..]);
- const args = try std.process.argsAlloc(fixed.getAllocator());
+ const args = try std.process.argsAlloc(fixed.allocator());
var filter: ?[]u8 = "";
lib/std/event/loop.zig
@@ -173,12 +173,12 @@ pub const Loop = struct {
// We need at least one of these in case the fs thread wants to use onNextTick
const extra_thread_count = thread_count - 1;
const resume_node_count = std.math.max(extra_thread_count, 1);
- self.eventfd_resume_nodes = try self.arena.getAllocator().alloc(
+ self.eventfd_resume_nodes = try self.arena.allocator().alloc(
std.atomic.Stack(ResumeNode.EventFd).Node,
resume_node_count,
);
- self.extra_threads = try self.arena.getAllocator().alloc(Thread, extra_thread_count);
+ self.extra_threads = try self.arena.allocator().alloc(Thread, extra_thread_count);
try self.initOsData(extra_thread_count);
errdefer self.deinitOsData();
lib/std/fs/test.zig
@@ -52,7 +52,7 @@ test "accessAbsolute" {
var arena = ArenaAllocator.init(testing.allocator);
defer arena.deinit();
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
const base_path = blk: {
const relative_path = try fs.path.join(allocator, &[_][]const u8{ "zig-cache", "tmp", tmp.sub_path[0..] });
@@ -71,7 +71,7 @@ test "openDirAbsolute" {
try tmp.dir.makeDir("subdir");
var arena = ArenaAllocator.init(testing.allocator);
defer arena.deinit();
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
const base_path = blk: {
const relative_path = try fs.path.join(allocator, &[_][]const u8{ "zig-cache", "tmp", tmp.sub_path[0..], "subdir" });
@@ -111,7 +111,7 @@ test "readLinkAbsolute" {
// Get base abs path
var arena = ArenaAllocator.init(testing.allocator);
defer arena.deinit();
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
const base_path = blk: {
const relative_path = try fs.path.join(allocator, &[_][]const u8{ "zig-cache", "tmp", tmp.sub_path[0..] });
@@ -162,7 +162,7 @@ test "Dir.Iterator" {
var arena = ArenaAllocator.init(testing.allocator);
defer arena.deinit();
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
var entries = std.ArrayList(Dir.Entry).init(allocator);
@@ -207,7 +207,7 @@ test "Dir.realpath smoke test" {
var arena = ArenaAllocator.init(testing.allocator);
defer arena.deinit();
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
const base_path = blk: {
const relative_path = try fs.path.join(allocator, &[_][]const u8{ "zig-cache", "tmp", tmp_dir.sub_path[0..] });
@@ -482,7 +482,7 @@ test "renameAbsolute" {
// Get base abs path
var arena = ArenaAllocator.init(testing.allocator);
defer arena.deinit();
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
const base_path = blk: {
const relative_path = try fs.path.join(allocator, &[_][]const u8{ "zig-cache", "tmp", tmp_dir.sub_path[0..] });
@@ -993,7 +993,7 @@ test ". and .. in absolute functions" {
var arena = ArenaAllocator.init(testing.allocator);
defer arena.deinit();
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
const base_path = blk: {
const relative_path = try fs.path.join(allocator, &[_][]const u8{ "zig-cache", "tmp", tmp.sub_path[0..] });
lib/std/hash/benchmark.zig
@@ -165,7 +165,7 @@ pub fn main() !void {
var buffer: [1024]u8 = undefined;
var fixed = std.heap.FixedBufferAllocator.init(buffer[0..]);
- const args = try std.process.argsAlloc(fixed.getAllocator());
+ const args = try std.process.argsAlloc(fixed.allocator());
var filter: ?[]u8 = "";
var count: usize = mode(128 * MiB);
lib/std/heap/arena_allocator.zig
@@ -23,7 +23,7 @@ pub const ArenaAllocator = struct {
}
};
- pub fn getAllocator(self: *ArenaAllocator) Allocator {
+ pub fn allocator(self: *ArenaAllocator) Allocator {
return Allocator.init(self, alloc, resize);
}
lib/std/heap/general_purpose_allocator.zig
@@ -280,7 +280,7 @@ pub fn GeneralPurposeAllocator(comptime config: Config) type {
}
};
- pub fn getAllocator(self: *Self) Allocator {
+ pub fn allocator(self: *Self) Allocator {
return Allocator.init(self, alloc, resize);
}
@@ -830,7 +830,7 @@ const test_config = Config{};
test "small allocations - free in same order" {
var gpa = GeneralPurposeAllocator(test_config){};
defer std.testing.expect(!gpa.deinit()) catch @panic("leak");
- const allocator = gpa.getAllocator();
+ const allocator = gpa.allocator();
var list = std.ArrayList(*u64).init(std.testing.allocator);
defer list.deinit();
@@ -849,7 +849,7 @@ test "small allocations - free in same order" {
test "small allocations - free in reverse order" {
var gpa = GeneralPurposeAllocator(test_config){};
defer std.testing.expect(!gpa.deinit()) catch @panic("leak");
- const allocator = gpa.getAllocator();
+ const allocator = gpa.allocator();
var list = std.ArrayList(*u64).init(std.testing.allocator);
defer list.deinit();
@@ -868,7 +868,7 @@ test "small allocations - free in reverse order" {
test "large allocations" {
var gpa = GeneralPurposeAllocator(test_config){};
defer std.testing.expect(!gpa.deinit()) catch @panic("leak");
- const allocator = gpa.getAllocator();
+ const allocator = gpa.allocator();
const ptr1 = try allocator.alloc(u64, 42768);
const ptr2 = try allocator.alloc(u64, 52768);
@@ -881,7 +881,7 @@ test "large allocations" {
test "realloc" {
var gpa = GeneralPurposeAllocator(test_config){};
defer std.testing.expect(!gpa.deinit()) catch @panic("leak");
- const allocator = gpa.getAllocator();
+ const allocator = gpa.allocator();
var slice = try allocator.alignedAlloc(u8, @alignOf(u32), 1);
defer allocator.free(slice);
@@ -903,7 +903,7 @@ test "realloc" {
test "shrink" {
var gpa = GeneralPurposeAllocator(test_config){};
defer std.testing.expect(!gpa.deinit()) catch @panic("leak");
- const allocator = gpa.getAllocator();
+ const allocator = gpa.allocator();
var slice = try allocator.alloc(u8, 20);
defer allocator.free(slice);
@@ -926,7 +926,7 @@ test "shrink" {
test "large object - grow" {
var gpa = GeneralPurposeAllocator(test_config){};
defer std.testing.expect(!gpa.deinit()) catch @panic("leak");
- const allocator = gpa.getAllocator();
+ const allocator = gpa.allocator();
var slice1 = try allocator.alloc(u8, page_size * 2 - 20);
defer allocator.free(slice1);
@@ -944,7 +944,7 @@ test "large object - grow" {
test "realloc small object to large object" {
var gpa = GeneralPurposeAllocator(test_config){};
defer std.testing.expect(!gpa.deinit()) catch @panic("leak");
- const allocator = gpa.getAllocator();
+ const allocator = gpa.allocator();
var slice = try allocator.alloc(u8, 70);
defer allocator.free(slice);
@@ -961,7 +961,7 @@ test "realloc small object to large object" {
test "shrink large object to large object" {
var gpa = GeneralPurposeAllocator(test_config){};
defer std.testing.expect(!gpa.deinit()) catch @panic("leak");
- const allocator = gpa.getAllocator();
+ const allocator = gpa.allocator();
var slice = try allocator.alloc(u8, page_size * 2 + 50);
defer allocator.free(slice);
@@ -984,10 +984,10 @@ test "shrink large object to large object" {
test "shrink large object to large object with larger alignment" {
var gpa = GeneralPurposeAllocator(test_config){};
defer std.testing.expect(!gpa.deinit()) catch @panic("leak");
- const allocator = gpa.getAllocator();
+ const allocator = gpa.allocator();
var debug_buffer: [1000]u8 = undefined;
- const debug_allocator = std.heap.FixedBufferAllocator.init(&debug_buffer).getAllocator();
+ const debug_allocator = std.heap.FixedBufferAllocator.init(&debug_buffer).allocator();
const alloc_size = page_size * 2 + 50;
var slice = try allocator.alignedAlloc(u8, 16, alloc_size);
@@ -1019,7 +1019,7 @@ test "shrink large object to large object with larger alignment" {
test "realloc large object to small object" {
var gpa = GeneralPurposeAllocator(test_config){};
defer std.testing.expect(!gpa.deinit()) catch @panic("leak");
- const allocator = gpa.getAllocator();
+ const allocator = gpa.allocator();
var slice = try allocator.alloc(u8, page_size * 2 + 50);
defer allocator.free(slice);
@@ -1037,7 +1037,7 @@ test "overrideable mutexes" {
.mutex = std.Thread.Mutex{},
};
defer std.testing.expect(!gpa.deinit()) catch @panic("leak");
- const allocator = gpa.getAllocator();
+ const allocator = gpa.allocator();
const ptr = try allocator.create(i32);
defer allocator.destroy(ptr);
@@ -1046,7 +1046,7 @@ test "overrideable mutexes" {
test "non-page-allocator backing allocator" {
var gpa = GeneralPurposeAllocator(.{}){ .backing_allocator = std.testing.allocator };
defer std.testing.expect(!gpa.deinit()) catch @panic("leak");
- const allocator = gpa.getAllocator();
+ const allocator = gpa.allocator();
const ptr = try allocator.create(i32);
defer allocator.destroy(ptr);
@@ -1055,10 +1055,10 @@ test "non-page-allocator backing allocator" {
test "realloc large object to larger alignment" {
var gpa = GeneralPurposeAllocator(test_config){};
defer std.testing.expect(!gpa.deinit()) catch @panic("leak");
- const allocator = gpa.getAllocator();
+ const allocator = gpa.allocator();
var debug_buffer: [1000]u8 = undefined;
- const debug_allocator = std.heap.FixedBufferAllocator.init(&debug_buffer).getAllocator();
+ const debug_allocator = std.heap.FixedBufferAllocator.init(&debug_buffer).allocator();
var slice = try allocator.alignedAlloc(u8, 16, page_size * 2 + 50);
defer allocator.free(slice);
@@ -1094,9 +1094,9 @@ test "realloc large object to larger alignment" {
test "large object shrinks to small but allocation fails during shrink" {
var failing_allocator = std.testing.FailingAllocator.init(std.heap.page_allocator, 3);
- var gpa = GeneralPurposeAllocator(.{}){ .backing_allocator = failing_allocator.getAllocator() };
+ var gpa = GeneralPurposeAllocator(.{}){ .backing_allocator = failing_allocator.allocator() };
defer std.testing.expect(!gpa.deinit()) catch @panic("leak");
- const allocator = gpa.getAllocator();
+ const allocator = gpa.allocator();
var slice = try allocator.alloc(u8, page_size * 2 + 50);
defer allocator.free(slice);
@@ -1113,7 +1113,7 @@ test "large object shrinks to small but allocation fails during shrink" {
test "objects of size 1024 and 2048" {
var gpa = GeneralPurposeAllocator(test_config){};
defer std.testing.expect(!gpa.deinit()) catch @panic("leak");
- const allocator = gpa.getAllocator();
+ const allocator = gpa.allocator();
const slice = try allocator.alloc(u8, 1025);
const slice2 = try allocator.alloc(u8, 3000);
@@ -1125,7 +1125,7 @@ test "objects of size 1024 and 2048" {
test "setting a memory cap" {
var gpa = GeneralPurposeAllocator(.{ .enable_memory_limit = true }){};
defer std.testing.expect(!gpa.deinit()) catch @panic("leak");
- const allocator = gpa.getAllocator();
+ const allocator = gpa.allocator();
gpa.setRequestedMemoryLimit(1010);
@@ -1154,9 +1154,9 @@ test "double frees" {
defer std.testing.expect(!backing_gpa.deinit()) catch @panic("leak");
const GPA = GeneralPurposeAllocator(.{ .safety = true, .never_unmap = true, .retain_metadata = true });
- var gpa = GPA{ .backing_allocator = backing_gpa.getAllocator() };
+ var gpa = GPA{ .backing_allocator = backing_gpa.allocator() };
defer std.testing.expect(!gpa.deinit()) catch @panic("leak");
- const allocator = gpa.getAllocator();
+ const allocator = gpa.allocator();
// detect a small allocation double free, even though bucket is emptied
const index: usize = 6;
lib/std/heap/log_to_writer_allocator.zig
@@ -17,7 +17,7 @@ pub fn LogToWriterAllocator(comptime Writer: type) type {
};
}
- pub fn getAllocator(self: *Self) Allocator {
+ pub fn allocator(self: *Self) Allocator {
return Allocator.init(self, alloc, resize);
}
@@ -82,7 +82,7 @@ test "LogToWriterAllocator" {
var allocator_buf: [10]u8 = undefined;
var fixedBufferAllocator = std.mem.validationWrap(std.heap.FixedBufferAllocator.init(&allocator_buf));
- const allocator = logToWriterAllocator(fixedBufferAllocator.getAllocator(), fbs.writer()).getAllocator();
+ const allocator = logToWriterAllocator(fixedBufferAllocator.allocator(), fbs.writer()).allocator();
var a = try allocator.alloc(u8, 10);
a = allocator.shrink(a, 5);
lib/std/heap/logging_allocator.zig
@@ -32,7 +32,7 @@ pub fn ScopedLoggingAllocator(
};
}
- pub fn getAllocator(self: *Self) Allocator {
+ pub fn allocator(self: *Self) Allocator {
return Allocator.init(self, alloc, resize);
}
lib/std/json/write_stream.zig
@@ -243,7 +243,7 @@ test "json write stream" {
try w.beginObject();
try w.objectField("object");
- try w.emitJson(try getJsonObject(arena_allocator.getAllocator()));
+ try w.emitJson(try getJsonObject(arena_allocator.allocator()));
try w.objectField("string");
try w.emitString("This is a string");
lib/std/os/test.zig
@@ -58,7 +58,7 @@ test "open smoke test" {
// Get base abs path
var arena = ArenaAllocator.init(testing.allocator);
defer arena.deinit();
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
const base_path = blk: {
const relative_path = try fs.path.join(allocator, &[_][]const u8{ "zig-cache", "tmp", tmp.sub_path[0..] });
lib/std/special/build_runner.zig
@@ -16,7 +16,7 @@ pub fn main() !void {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
var args = try process.argsAlloc(allocator);
defer process.argsFree(allocator, args);
lib/std/special/test_runner.zig
@@ -10,7 +10,7 @@ var args_buffer: [std.fs.MAX_PATH_BYTES + std.mem.page_size]u8 = undefined;
var args_allocator = std.heap.FixedBufferAllocator.init(&args_buffer);
fn processArgs() void {
- const args = std.process.argsAlloc(args_allocator.getAllocator()) catch {
+ const args = std.process.argsAlloc(args_allocator.allocator()) catch {
@panic("Too many bytes passed over the CLI to the test runner");
};
if (args.len != 2) {
lib/std/testing/failing_allocator.zig
@@ -40,7 +40,7 @@ pub const FailingAllocator = struct {
};
}
- pub fn getAllocator(self: *FailingAllocator) mem.Allocator {
+ pub fn allocator(self: *FailingAllocator) mem.Allocator {
return mem.Allocator.init(self, alloc, resize);
}
lib/std/zig/parser_test.zig
@@ -5351,8 +5351,8 @@ fn testTransform(source: [:0]const u8, expected_source: []const u8) !void {
const needed_alloc_count = x: {
// Try it once with unlimited memory, make sure it works
var fixed_allocator = std.heap.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
- var failing_allocator = std.testing.FailingAllocator.init(fixed_allocator.getAllocator(), maxInt(usize));
- const allocator = failing_allocator.getAllocator();
+ var failing_allocator = std.testing.FailingAllocator.init(fixed_allocator.allocator(), maxInt(usize));
+ const allocator = failing_allocator.allocator();
var anything_changed: bool = undefined;
const result_source = try testParse(source, allocator, &anything_changed);
try std.testing.expectEqualStrings(expected_source, result_source);
@@ -5369,9 +5369,9 @@ fn testTransform(source: [:0]const u8, expected_source: []const u8) !void {
var fail_index: usize = 0;
while (fail_index < needed_alloc_count) : (fail_index += 1) {
var fixed_allocator = std.heap.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
- var failing_allocator = std.testing.FailingAllocator.init(fixed_allocator.getAllocator(), fail_index);
+ var failing_allocator = std.testing.FailingAllocator.init(fixed_allocator.allocator(), fail_index);
var anything_changed: bool = undefined;
- if (testParse(source, failing_allocator.getAllocator(), &anything_changed)) |_| {
+ if (testParse(source, failing_allocator.allocator(), &anything_changed)) |_| {
return error.NondeterministicMemoryUsage;
} else |err| switch (err) {
error.OutOfMemory => {
lib/std/zig/perf_test.zig
@@ -33,7 +33,7 @@ pub fn main() !void {
fn testOnce() usize {
var fixed_buf_alloc = std.heap.FixedBufferAllocator.init(fixed_buffer_mem[0..]);
- var allocator = fixed_buf_alloc.getAllocator();
+ var allocator = fixed_buf_alloc.allocator();
_ = std.zig.parse(allocator, source) catch @panic("parse failure");
return fixed_buf_alloc.end_index;
}
lib/std/zig/string_literal.zig
@@ -147,7 +147,7 @@ test "parse" {
var fixed_buf_mem: [32]u8 = undefined;
var fixed_buf_alloc = std.heap.FixedBufferAllocator.init(fixed_buf_mem[0..]);
- var alloc = fixed_buf_alloc.getAllocator();
+ var alloc = fixed_buf_alloc.allocator();
try expect(eql(u8, "foo", try parseAlloc(alloc, "\"foo\"")));
try expect(eql(u8, "foo", try parseAlloc(alloc, "\"f\x6f\x6f\"")));
lib/std/array_list.zig
@@ -1119,7 +1119,7 @@ test "std.ArrayList/ArrayListUnmanaged.insertSlice" {
test "std.ArrayList/ArrayListUnmanaged.replaceRange" {
var arena = std.heap.ArenaAllocator.init(testing.allocator);
defer arena.deinit();
- const a = arena.getAllocator();
+ const a = arena.allocator();
const init = [_]i32{ 1, 2, 3, 4, 5 };
const new = [_]i32{ 0, 0, 0 };
@@ -1263,7 +1263,7 @@ test "std.ArrayList/ArrayListUnmanaged.shrink still sets length on error.OutOfMe
// use an arena allocator to make sure realloc returns error.OutOfMemory
var arena = std.heap.ArenaAllocator.init(testing.allocator);
defer arena.deinit();
- const a = arena.getAllocator();
+ const a = arena.allocator();
{
var list = ArrayList(i32).init(a);
@@ -1361,7 +1361,7 @@ test "ArrayListAligned/ArrayListAlignedUnmanaged accepts unaligned slices" {
test "std.ArrayList(u0)" {
// An ArrayList on zero-sized types should not need to allocate
- const a = testing.FailingAllocator.init(testing.allocator, 0).getAllocator();
+ const a = testing.FailingAllocator.init(testing.allocator, 0).allocator();
var list = ArrayList(u0).init(a);
defer list.deinit();
lib/std/build.zig
@@ -1285,7 +1285,7 @@ test "builder.findProgram compiles" {
defer arena.deinit();
const builder = try Builder.create(
- arena.getAllocator(),
+ arena.allocator(),
"zig",
"zig-cache",
"zig-cache",
@@ -3207,7 +3207,7 @@ test "Builder.dupePkg()" {
var arena = std.heap.ArenaAllocator.init(std.testing.allocator);
defer arena.deinit();
var builder = try Builder.create(
- arena.getAllocator(),
+ arena.allocator(),
"test",
"test",
"test",
@@ -3252,7 +3252,7 @@ test "LibExeObjStep.addPackage" {
defer arena.deinit();
var builder = try Builder.create(
- arena.getAllocator(),
+ arena.allocator(),
"test",
"test",
"test",
lib/std/builtin.zig
@@ -75,7 +75,7 @@ pub const StackTrace = struct {
};
const tty_config = std.debug.detectTTYConfig();
try writer.writeAll("\n");
- std.debug.writeStackTrace(self, writer, arena.getAllocator(), debug_info, tty_config) catch |err| {
+ std.debug.writeStackTrace(self, writer, arena.allocator(), debug_info, tty_config) catch |err| {
try writer.print("Unable to print stack trace: {s}\n", .{@errorName(err)});
};
try writer.writeAll("\n");
lib/std/child_process.zig
@@ -541,7 +541,7 @@ pub const ChildProcess = struct {
var arena_allocator = std.heap.ArenaAllocator.init(self.allocator);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
// The POSIX standard does not allow malloc() between fork() and execve(),
// and `self.allocator` may be a libc allocator.
@@ -1149,7 +1149,7 @@ test "createNullDelimitedEnvMap" {
var arena = std.heap.ArenaAllocator.init(allocator);
defer arena.deinit();
- const environ = try createNullDelimitedEnvMap(arena.getAllocator(), &envmap);
+ const environ = try createNullDelimitedEnvMap(arena.allocator(), &envmap);
try testing.expectEqual(@as(usize, 5), environ.len);
lib/std/debug.zig
@@ -1566,7 +1566,7 @@ fn getDebugInfoAllocator() mem.Allocator {
if (debug_info_allocator) |a| return a;
debug_info_arena_allocator = std.heap.ArenaAllocator.init(std.heap.page_allocator);
- const allocator = debug_info_arena_allocator.getAllocator();
+ const allocator = debug_info_arena_allocator.allocator();
debug_info_allocator = allocator;
return allocator;
}
lib/std/heap.zig
@@ -573,7 +573,7 @@ pub const HeapAllocator = switch (builtin.os.tag) {
};
}
- pub fn getAllocator(self: *HeapAllocator) Allocator {
+ pub fn allocator(self: *HeapAllocator) Allocator {
return Allocator.init(self, alloc, resize);
}
@@ -680,14 +680,14 @@ pub const FixedBufferAllocator = struct {
};
}
- /// *WARNING* using this at the same time as the interface returned by `getThreadSafeAllocator` is not thread safe
- pub fn getAllocator(self: *FixedBufferAllocator) Allocator {
+ /// *WARNING* using this at the same time as the interface returned by `threadSafeAllocator` is not thread safe
+ pub fn allocator(self: *FixedBufferAllocator) Allocator {
return Allocator.init(self, alloc, resize);
}
/// Provides a lock free thread safe `Allocator` interface to the underlying `FixedBufferAllocator`
/// *WARNING* using this at the same time as the interface returned by `getAllocator` is not thread safe
- pub fn getThreadSafeAllocator(self: *FixedBufferAllocator) Allocator {
+ pub fn threadSafeAllocator(self: *FixedBufferAllocator) Allocator {
return Allocator.init(self, threadSafeAlloc, Allocator.NoResize(FixedBufferAllocator).noResize);
}
@@ -775,7 +775,7 @@ pub const FixedBufferAllocator = struct {
}
};
-pub const ThreadSafeFixedBufferAllocator = @compileError("ThreadSafeFixedBufferAllocator has been replaced with `getThreadSafeAllocator` on FixedBufferAllocator");
+pub const ThreadSafeFixedBufferAllocator = @compileError("ThreadSafeFixedBufferAllocator has been replaced with `threadSafeAllocator` on FixedBufferAllocator");
pub fn stackFallback(comptime size: usize, fallback_allocator: Allocator) StackFallbackAllocator(size) {
return StackFallbackAllocator(size){
@@ -909,7 +909,7 @@ test "HeapAllocator" {
if (builtin.os.tag == .windows) {
var heap_allocator = HeapAllocator.init();
defer heap_allocator.deinit();
- const allocator = heap_allocator.getAllocator();
+ const allocator = heap_allocator.allocator();
try testAllocator(allocator);
try testAllocatorAligned(allocator);
@@ -921,7 +921,7 @@ test "HeapAllocator" {
test "ArenaAllocator" {
var arena_allocator = ArenaAllocator.init(page_allocator);
defer arena_allocator.deinit();
- const allocator = arena_allocator.getAllocator();
+ const allocator = arena_allocator.allocator();
try testAllocator(allocator);
try testAllocatorAligned(allocator);
@@ -932,7 +932,7 @@ test "ArenaAllocator" {
var test_fixed_buffer_allocator_memory: [800000 * @sizeOf(u64)]u8 = undefined;
test "FixedBufferAllocator" {
var fixed_buffer_allocator = mem.validationWrap(FixedBufferAllocator.init(test_fixed_buffer_allocator_memory[0..]));
- const allocator = fixed_buffer_allocator.getAllocator();
+ const allocator = fixed_buffer_allocator.allocator();
try testAllocator(allocator);
try testAllocatorAligned(allocator);
@@ -943,7 +943,7 @@ test "FixedBufferAllocator" {
test "FixedBufferAllocator.reset" {
var buf: [8]u8 align(@alignOf(u64)) = undefined;
var fba = FixedBufferAllocator.init(buf[0..]);
- const allocator = fba.getAllocator();
+ const allocator = fba.allocator();
const X = 0xeeeeeeeeeeeeeeee;
const Y = 0xffffffffffffffff;
@@ -976,7 +976,7 @@ test "FixedBufferAllocator Reuse memory on realloc" {
// check if we re-use the memory
{
var fixed_buffer_allocator = FixedBufferAllocator.init(small_fixed_buffer[0..]);
- const allocator = fixed_buffer_allocator.getAllocator();
+ const allocator = fixed_buffer_allocator.allocator();
var slice0 = try allocator.alloc(u8, 5);
try testing.expect(slice0.len == 5);
@@ -988,7 +988,7 @@ test "FixedBufferAllocator Reuse memory on realloc" {
// check that we don't re-use the memory if it's not the most recent block
{
var fixed_buffer_allocator = FixedBufferAllocator.init(small_fixed_buffer[0..]);
- const allocator = fixed_buffer_allocator.getAllocator();
+ const allocator = fixed_buffer_allocator.allocator();
var slice0 = try allocator.alloc(u8, 2);
slice0[0] = 1;
@@ -1005,16 +1005,16 @@ test "FixedBufferAllocator Reuse memory on realloc" {
test "Thread safe FixedBufferAllocator" {
var fixed_buffer_allocator = FixedBufferAllocator.init(test_fixed_buffer_allocator_memory[0..]);
- try testAllocator(fixed_buffer_allocator.getThreadSafeAllocator());
- try testAllocatorAligned(fixed_buffer_allocator.getThreadSafeAllocator());
- try testAllocatorLargeAlignment(fixed_buffer_allocator.getThreadSafeAllocator());
- try testAllocatorAlignedShrink(fixed_buffer_allocator.getThreadSafeAllocator());
+ try testAllocator(fixed_buffer_allocator.threadSafeAllocator());
+ try testAllocatorAligned(fixed_buffer_allocator.threadSafeAllocator());
+ try testAllocatorLargeAlignment(fixed_buffer_allocator.threadSafeAllocator());
+ try testAllocatorAlignedShrink(fixed_buffer_allocator.threadSafeAllocator());
}
/// This one should not try alignments that exceed what C malloc can handle.
pub fn testAllocator(base_allocator: mem.Allocator) !void {
var validationAllocator = mem.validationWrap(base_allocator);
- const allocator = validationAllocator.getAllocator();
+ const allocator = validationAllocator.allocator();
var slice = try allocator.alloc(*i32, 100);
try testing.expect(slice.len == 100);
@@ -1060,7 +1060,7 @@ pub fn testAllocator(base_allocator: mem.Allocator) !void {
pub fn testAllocatorAligned(base_allocator: mem.Allocator) !void {
var validationAllocator = mem.validationWrap(base_allocator);
- const allocator = validationAllocator.getAllocator();
+ const allocator = validationAllocator.allocator();
// Test a few alignment values, smaller and bigger than the type's one
inline for ([_]u29{ 1, 2, 4, 8, 16, 32, 64 }) |alignment| {
@@ -1090,7 +1090,7 @@ pub fn testAllocatorAligned(base_allocator: mem.Allocator) !void {
pub fn testAllocatorLargeAlignment(base_allocator: mem.Allocator) !void {
var validationAllocator = mem.validationWrap(base_allocator);
- const allocator = validationAllocator.getAllocator();
+ const allocator = validationAllocator.allocator();
//Maybe a platform's page_size is actually the same as or
// very near usize?
@@ -1122,10 +1122,10 @@ pub fn testAllocatorLargeAlignment(base_allocator: mem.Allocator) !void {
pub fn testAllocatorAlignedShrink(base_allocator: mem.Allocator) !void {
var validationAllocator = mem.validationWrap(base_allocator);
- const allocator = validationAllocator.getAllocator();
+ const allocator = validationAllocator.allocator();
var debug_buffer: [1000]u8 = undefined;
- const debug_allocator = FixedBufferAllocator.init(&debug_buffer).getAllocator();
+ const debug_allocator = FixedBufferAllocator.init(&debug_buffer).allocator();
const alloc_size = mem.page_size * 2 + 50;
var slice = try allocator.alignedAlloc(u8, 16, alloc_size);
lib/std/json.zig
@@ -2033,7 +2033,7 @@ test "parse into tagged union" {
{ // failing allocations should be bubbled up instantly without trying next member
var fail_alloc = testing.FailingAllocator.init(testing.allocator, 0);
- const options = ParseOptions{ .allocator = fail_alloc.getAllocator() };
+ const options = ParseOptions{ .allocator = fail_alloc.allocator() };
const T = union(enum) {
// both fields here match the input
string: []const u8,
@@ -2081,7 +2081,7 @@ test "parse union bubbles up AllocatorRequired" {
test "parseFree descends into tagged union" {
var fail_alloc = testing.FailingAllocator.init(testing.allocator, 1);
- const options = ParseOptions{ .allocator = fail_alloc.getAllocator() };
+ const options = ParseOptions{ .allocator = fail_alloc.allocator() };
const T = union(enum) {
int: i32,
float: f64,
@@ -2364,7 +2364,7 @@ pub const Parser = struct {
var arena = ArenaAllocator.init(p.allocator);
errdefer arena.deinit();
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
while (try s.next()) |token| {
try p.transition(allocator, input, s.i - 1, token);
@@ -2746,13 +2746,13 @@ fn testParse(arena_allocator: std.mem.Allocator, json_str: []const u8) !Value {
test "parsing empty string gives appropriate error" {
var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
defer arena_allocator.deinit();
- try testing.expectError(error.UnexpectedEndOfJson, testParse(arena_allocator.getAllocator(), ""));
+ try testing.expectError(error.UnexpectedEndOfJson, testParse(arena_allocator.allocator(), ""));
}
test "integer after float has proper type" {
var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
defer arena_allocator.deinit();
- const json = try testParse(arena_allocator.getAllocator(),
+ const json = try testParse(arena_allocator.allocator(),
\\{
\\ "float": 3.14,
\\ "ints": [1, 2, 3]
@@ -2787,7 +2787,7 @@ test "escaped characters" {
\\}
;
- const obj = (try testParse(arena_allocator.getAllocator(), input)).Object;
+ const obj = (try testParse(arena_allocator.allocator(), input)).Object;
try testing.expectEqualSlices(u8, obj.get("backslash").?.String, "\\");
try testing.expectEqualSlices(u8, obj.get("forwardslash").?.String, "/");
@@ -2813,7 +2813,7 @@ test "string copy option" {
var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
defer arena_allocator.deinit();
- const allocator = arena_allocator.getAllocator();
+ const allocator = arena_allocator.allocator();
const tree_nocopy = try Parser.init(allocator, false).parse(input);
const obj_nocopy = tree_nocopy.root.Object;
lib/std/mem.zig
@@ -46,13 +46,13 @@ pub fn ValidationAllocator(comptime T: type) type {
};
}
- pub fn getAllocator(self: *Self) Allocator {
+ pub fn allocator(self: *Self) Allocator {
return Allocator.init(self, alloc, resize);
}
fn getUnderlyingAllocatorPtr(self: *Self) Allocator {
if (T == Allocator) return self.underlying_allocator;
- return self.underlying_allocator.getAllocator();
+ return self.underlying_allocator.allocator();
}
pub fn alloc(
lib/std/net.zig
@@ -704,7 +704,7 @@ pub fn getAddressList(allocator: mem.Allocator, name: []const u8, port: u16) !*A
var arena = std.heap.ArenaAllocator.init(allocator);
errdefer arena.deinit();
- const result = try arena.getAllocator().create(AddressList);
+ const result = try arena.allocator().create(AddressList);
result.* = AddressList{
.arena = arena,
.addrs = undefined,
@@ -712,7 +712,7 @@ pub fn getAddressList(allocator: mem.Allocator, name: []const u8, port: u16) !*A
};
break :blk result;
};
- const arena = result.arena.getAllocator();
+ const arena = result.arena.allocator();
errdefer result.arena.deinit();
if (builtin.target.os.tag == .windows or builtin.link_libc) {
lib/std/process.zig
@@ -854,7 +854,7 @@ pub fn execve(
var arena_allocator = std.heap.ArenaAllocator.init(allocator);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
const argv_buf = try arena.allocSentinel(?[*:0]u8, argv.len, null);
for (argv) |arg, i| argv_buf[i] = (try arena.dupeZ(u8, arg)).ptr;
lib/std/testing.zig
@@ -7,11 +7,11 @@ const print = std.debug.print;
pub const FailingAllocator = @import("testing/failing_allocator.zig").FailingAllocator;
/// This should only be used in temporary test programs.
-pub const allocator = allocator_instance.getAllocator();
+pub const allocator = allocator_instance.allocator();
pub var allocator_instance = std.heap.GeneralPurposeAllocator(.{}){};
-pub const failing_allocator = failing_allocator_instance.getAllocator();
-pub var failing_allocator_instance = FailingAllocator.init(base_allocator_instance.getAllocator(), 0);
+pub const failing_allocator = failing_allocator_instance.allocator();
+pub var failing_allocator_instance = FailingAllocator.init(base_allocator_instance.allocator(), 0);
pub var base_allocator_instance = std.heap.FixedBufferAllocator.init("");
lib/std/Thread.zig
@@ -460,7 +460,7 @@ const WindowsThreadImpl = struct {
errdefer assert(windows.kernel32.HeapFree(heap_handle, 0, alloc_ptr) != 0);
const instance_bytes = @ptrCast([*]u8, alloc_ptr)[0..alloc_bytes];
- const instance = std.heap.FixedBufferAllocator.init(instance_bytes).getAllocator().create(Instance) catch unreachable;
+ const instance = std.heap.FixedBufferAllocator.init(instance_bytes).allocator().create(Instance) catch unreachable;
instance.* = .{
.fn_args = args,
.thread = .{
src/codegen/c.zig
@@ -390,7 +390,7 @@ pub const DeclGen = struct {
// Fall back to generic implementation.
var arena = std.heap.ArenaAllocator.init(dg.module.gpa);
defer arena.deinit();
- const arena_allocator = arena.getAllocator();
+ const arena_allocator = arena.allocator();
try writer.writeAll("{");
var index: usize = 0;
src/codegen/llvm.zig
@@ -331,7 +331,7 @@ pub const Object = struct {
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
const mod = comp.bin_file.options.module.?;
const cache_dir = mod.zig_cache_artifact_directory;
@@ -779,7 +779,7 @@ pub const DeclGen = struct {
// The Type memory is ephemeral; since we want to store a longer-lived
// reference, we need to copy it here.
- gop.key_ptr.* = try t.copy(dg.object.type_map_arena.getAllocator());
+ gop.key_ptr.* = try t.copy(dg.object.type_map_arena.allocator());
const opaque_obj = t.castTag(.@"opaque").?.data;
const name = try opaque_obj.getFullyQualifiedName(gpa);
@@ -837,7 +837,7 @@ pub const DeclGen = struct {
// The Type memory is ephemeral; since we want to store a longer-lived
// reference, we need to copy it here.
- gop.key_ptr.* = try t.copy(dg.object.type_map_arena.getAllocator());
+ gop.key_ptr.* = try t.copy(dg.object.type_map_arena.allocator());
const struct_obj = t.castTag(.@"struct").?.data;
@@ -871,7 +871,7 @@ pub const DeclGen = struct {
// The Type memory is ephemeral; since we want to store a longer-lived
// reference, we need to copy it here.
- gop.key_ptr.* = try t.copy(dg.object.type_map_arena.getAllocator());
+ gop.key_ptr.* = try t.copy(dg.object.type_map_arena.allocator());
const union_obj = t.cast(Type.Payload.Union).?.data;
const target = dg.module.getTarget();
@@ -2485,7 +2485,7 @@ pub const FuncGen = struct {
var arena_allocator = std.heap.ArenaAllocator.init(self.gpa);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
const llvm_params_len = args.len;
const llvm_param_types = try arena.alloc(*const llvm.Type, llvm_params_len);
src/link/tapi/yaml.zig
@@ -248,7 +248,7 @@ pub const Yaml = struct {
pub fn load(allocator: Allocator, source: []const u8) !Yaml {
var arena = ArenaAllocator.init(allocator);
- const arena_allocator = arena.getAllocator();
+ const arena_allocator = arena.allocator();
var tree = Tree.init(arena_allocator);
try tree.parse(source);
@@ -300,7 +300,7 @@ pub const Yaml = struct {
.Pointer => |info| {
switch (info.size) {
.Slice => {
- var parsed = try self.arena.getAllocator().alloc(info.child, self.docs.items.len);
+ var parsed = try self.arena.allocator().alloc(info.child, self.docs.items.len);
for (self.docs.items) |doc, i| {
parsed[i] = try self.parseValue(info.child, doc);
}
@@ -362,7 +362,7 @@ pub const Yaml = struct {
inline for (struct_info.fields) |field| {
const value: ?Value = map.get(field.name) orelse blk: {
- const field_name = try mem.replaceOwned(u8, self.arena.getAllocator(), field.name, "_", "-");
+ const field_name = try mem.replaceOwned(u8, self.arena.allocator(), field.name, "_", "-");
break :blk map.get(field_name);
};
@@ -383,7 +383,7 @@ pub const Yaml = struct {
fn parsePointer(self: *Yaml, comptime T: type, value: Value) Error!T {
const ptr_info = @typeInfo(T).Pointer;
- const arena = self.arena.getAllocator();
+ const arena = self.arena.allocator();
switch (ptr_info.size) {
.Slice => {
src/link/C.zig
@@ -128,7 +128,7 @@ pub fn updateFunc(self: *C, module: *Module, func: *Module.Fn, air: Air, livenes
.decl = decl,
.fwd_decl = fwd_decl.toManaged(module.gpa),
.typedefs = typedefs.promote(module.gpa),
- .typedefs_arena = self.arena.getAllocator(),
+ .typedefs_arena = self.arena.allocator(),
},
.code = code.toManaged(module.gpa),
.indent_writer = undefined, // set later so we can get a pointer to object.code
@@ -193,7 +193,7 @@ pub fn updateDecl(self: *C, module: *Module, decl: *Module.Decl) !void {
.decl = decl,
.fwd_decl = fwd_decl.toManaged(module.gpa),
.typedefs = typedefs.promote(module.gpa),
- .typedefs_arena = self.arena.getAllocator(),
+ .typedefs_arena = self.arena.allocator(),
},
.code = code.toManaged(module.gpa),
.indent_writer = undefined, // set later so we can get a pointer to object.code
src/link/Coff.zig
@@ -877,7 +877,7 @@ fn linkWithLLD(self: *Coff, comp: *Compilation) !void {
var arena_allocator = std.heap.ArenaAllocator.init(self.base.allocator);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
const directory = self.base.options.emit.?.directory; // Just an alias to make it shorter to type.
src/link/Elf.zig
@@ -1243,7 +1243,7 @@ fn linkWithLLD(self: *Elf, comp: *Compilation) !void {
var arena_allocator = std.heap.ArenaAllocator.init(self.base.allocator);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
const directory = self.base.options.emit.?.directory; // Just an alias to make it shorter to type.
src/link/MachO.zig
@@ -412,7 +412,7 @@ pub fn flushModule(self: *MachO, comp: *Compilation) !void {
var arena_allocator = std.heap.ArenaAllocator.init(self.base.allocator);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
const directory = self.base.options.emit.?.directory; // Just an alias to make it shorter to type.
@@ -5379,7 +5379,7 @@ fn snapshotState(self: *MachO) !void {
var arena_allocator = std.heap.ArenaAllocator.init(self.base.allocator);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
const out_file = try emit.directory.handle.createFile("snapshots.json", .{
.truncate = self.cold_start,
src/link/Plan9.zig
@@ -168,7 +168,7 @@ fn putFn(self: *Plan9, decl: *Module.Decl, out: FnDeclOutput) !void {
try fn_map_res.value_ptr.functions.put(gpa, decl, out);
} else {
const file = decl.getFileScope();
- const arena = self.path_arena.getAllocator();
+ const arena = self.path_arena.allocator();
// each file gets a symbol
fn_map_res.value_ptr.* = .{
.sym_index = blk: {
src/link/tapi.zig
@@ -120,7 +120,7 @@ pub const LibStub = struct {
err: {
log.debug("trying to parse as []TbdV4", .{});
const inner = lib_stub.yaml.parse([]TbdV4) catch break :err;
- var out = try lib_stub.yaml.arena.getAllocator().alloc(Tbd, inner.len);
+ var out = try lib_stub.yaml.arena.allocator().alloc(Tbd, inner.len);
for (inner) |doc, i| {
out[i] = .{ .v4 = doc };
}
@@ -130,7 +130,7 @@ pub const LibStub = struct {
err: {
log.debug("trying to parse as TbdV4", .{});
const inner = lib_stub.yaml.parse(TbdV4) catch break :err;
- var out = try lib_stub.yaml.arena.getAllocator().alloc(Tbd, 1);
+ var out = try lib_stub.yaml.arena.allocator().alloc(Tbd, 1);
out[0] = .{ .v4 = inner };
break :blk out;
}
@@ -148,7 +148,7 @@ pub const LibStub = struct {
err: {
log.debug("trying to parse as TbdV3", .{});
const inner = lib_stub.yaml.parse(TbdV3) catch break :err;
- var out = try lib_stub.yaml.arena.getAllocator().alloc(Tbd, 1);
+ var out = try lib_stub.yaml.arena.allocator().alloc(Tbd, 1);
out[0] = .{ .v3 = inner };
break :blk out;
}
src/link/Wasm.zig
@@ -950,7 +950,7 @@ fn linkWithLLD(self: *Wasm, comp: *Compilation) !void {
var arena_allocator = std.heap.ArenaAllocator.init(self.base.allocator);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
const directory = self.base.options.emit.?.directory; // Just an alias to make it shorter to type.
src/AstGen.zig
@@ -98,7 +98,7 @@ pub fn generate(gpa: Allocator, tree: Ast) Allocator.Error!Zir {
var astgen: AstGen = .{
.gpa = gpa,
- .arena = arena.getAllocator(),
+ .arena = arena.allocator(),
.tree = &tree,
};
defer astgen.deinit(gpa);
@@ -1939,7 +1939,7 @@ fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const Ast.Nod
var block_arena = std.heap.ArenaAllocator.init(gz.astgen.gpa);
defer block_arena.deinit();
- const block_arena_allocator = block_arena.getAllocator();
+ const block_arena_allocator = block_arena.allocator();
var noreturn_src_node: Ast.Node.Index = 0;
var scope = parent_scope;
src/Compilation.zig
@@ -412,7 +412,7 @@ pub const AllErrors = struct {
errors: *std.ArrayList(Message),
module_err_msg: Module.ErrorMsg,
) !void {
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
const notes = try allocator.alloc(Message, module_err_msg.notes.len);
for (notes) |*note, i| {
const module_note = module_err_msg.notes[i];
@@ -549,7 +549,7 @@ pub const AllErrors = struct {
msg: []const u8,
optional_children: ?AllErrors,
) !void {
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
const duped_msg = try allocator.dupe(u8, msg);
if (optional_children) |*children| {
try errors.append(.{ .plain = .{
@@ -788,7 +788,7 @@ fn addPackageTableToCacheHash(
seen_table: *std.AutoHashMap(*Package, void),
hash_type: union(enum) { path_bytes, files: *Cache.Manifest },
) (error{OutOfMemory} || std.os.GetCwdError)!void {
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
const packages = try allocator.alloc(Package.Table.KV, pkg_table.count());
{
@@ -852,7 +852,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
// initialization and then is freed in deinit().
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
errdefer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
// We put the `Compilation` itself in the arena. Freeing the arena will free the module.
// It's initialized later after we prepare the initialization options.
@@ -1210,7 +1210,7 @@ pub fn create(gpa: Allocator, options: InitOptions) !*Compilation {
{
var local_arena = std.heap.ArenaAllocator.init(gpa);
defer local_arena.deinit();
- var seen_table = std.AutoHashMap(*Package, void).init(local_arena.getAllocator());
+ var seen_table = std.AutoHashMap(*Package, void).init(local_arena.allocator());
try addPackageTableToCacheHash(&hash, &local_arena, main_pkg.table, &seen_table, .path_bytes);
}
hash.add(valgrind);
@@ -2013,7 +2013,7 @@ pub fn totalErrorCount(self: *Compilation) usize {
pub fn getAllErrorsAlloc(self: *Compilation) !AllErrors {
var arena = std.heap.ArenaAllocator.init(self.gpa);
errdefer arena.deinit();
- const arena_allocator = arena.getAllocator();
+ const arena_allocator = arena.allocator();
var errors = std.ArrayList(AllErrors.Message).init(self.gpa);
defer errors.deinit();
@@ -2295,7 +2295,7 @@ fn processOneJob(comp: *Compilation, job: Job, main_progress_node: *std.Progress
var tmp_arena = std.heap.ArenaAllocator.init(gpa);
defer tmp_arena.deinit();
- const sema_arena = tmp_arena.getAllocator();
+ const sema_arena = tmp_arena.allocator();
const sema_frame = tracy.namedFrame("sema");
var sema_frame_ended = false;
@@ -2390,7 +2390,7 @@ fn processOneJob(comp: *Compilation, job: Job, main_progress_node: *std.Progress
.decl = decl,
.fwd_decl = fwd_decl.toManaged(gpa),
.typedefs = c_codegen.TypedefMap.init(gpa),
- .typedefs_arena = typedefs_arena.getAllocator(),
+ .typedefs_arena = typedefs_arena.allocator(),
};
defer dg.fwd_decl.deinit();
defer dg.typedefs.deinit();
@@ -2844,7 +2844,7 @@ pub fn cImport(comp: *Compilation, c_src: []const u8) !CImportResult {
const digest = if (!actual_hit) digest: {
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
const tmp_digest = man.hash.peek();
const tmp_dir_sub_path = try std.fs.path.join(arena, &[_][]const u8{ "o", &tmp_digest });
@@ -3099,7 +3099,7 @@ fn updateCObject(comp: *Compilation, c_object: *CObject, c_obj_prog_node: *std.P
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
const c_source_basename = std.fs.path.basename(c_object.src.src_path);
@@ -4420,7 +4420,7 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
// Here we use the legacy stage1 C++ compiler to compile Zig code.
const mod = comp.bin_file.options.module.?;
@@ -4457,7 +4457,7 @@ fn updateStage1Module(comp: *Compilation, main_progress_node: *std.Progress.Node
_ = try man.addFile(main_zig_file, null);
{
- var seen_table = std.AutoHashMap(*Package, void).init(arena_allocator.getAllocator());
+ var seen_table = std.AutoHashMap(*Package, void).init(arena_allocator.allocator());
try addPackageTableToCacheHash(&man.hash, &arena_allocator, mod.main_pkg.table, &seen_table, .{ .files = &man });
}
man.hash.add(comp.bin_file.options.valgrind);
src/crash_report.zig
@@ -85,7 +85,7 @@ fn dumpStatusReport() !void {
const anal = zir_state orelse return;
// Note: We have the panic mutex here, so we can safely use the global crash heap.
var fba = std.heap.FixedBufferAllocator.init(&crash_heap);
- const allocator = fba.getAllocator();
+ const allocator = fba.allocator();
const stderr = io.getStdErr().writer();
const block: *Sema.Block = anal.block;
src/DepTokenizer.zig
@@ -878,7 +878,7 @@ test "error prereq - continuation expecting end-of-line" {
// - tokenize input, emit textual representation, and compare to expect
fn depTokenizer(input: []const u8, expect: []const u8) !void {
var arena_allocator = std.heap.ArenaAllocator.init(std.testing.allocator);
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
defer arena_allocator.deinit();
var it: Tokenizer = .{ .bytes = input };
src/glibc.zig
@@ -65,7 +65,7 @@ pub fn loadMetaData(gpa: Allocator, zig_lib_dir: std.fs.Dir) LoadMetaDataError!*
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
errdefer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
var all_versions = std.ArrayListUnmanaged(std.builtin.Version){};
var all_functions = std.ArrayListUnmanaged(Fn){};
@@ -256,7 +256,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
const gpa = comp.gpa;
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
switch (crt_file) {
.crti_o => {
@@ -711,7 +711,7 @@ pub fn buildSharedObjects(comp: *Compilation) !void {
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
const target = comp.getTarget();
const target_version = target.os.version_range.linux.glibc;
src/libcxx.zig
@@ -89,7 +89,7 @@ pub fn buildLibCXX(comp: *Compilation) !void {
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
const root_name = "c++";
const output_mode = .Lib;
@@ -236,7 +236,7 @@ pub fn buildLibCXXABI(comp: *Compilation) !void {
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
const root_name = "c++abi";
const output_mode = .Lib;
src/libtsan.zig
@@ -15,7 +15,7 @@ pub fn buildTsan(comp: *Compilation) !void {
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
const root_name = "tsan";
const output_mode = .Lib;
src/libunwind.zig
@@ -17,7 +17,7 @@ pub fn buildStaticLib(comp: *Compilation) !void {
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
const root_name = "unwind";
const output_mode = .Lib;
src/link.zig
@@ -628,7 +628,7 @@ pub const File = struct {
var arena_allocator = std.heap.ArenaAllocator.init(base.allocator);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
const directory = base.options.emit.?.directory; // Just an alias to make it shorter to type.
src/main.zig
@@ -139,7 +139,7 @@ pub fn main() anyerror!void {
const gpa = gpa: {
if (!builtin.link_libc) {
gpa_need_deinit = true;
- break :gpa general_purpose_allocator.getAllocator();
+ break :gpa general_purpose_allocator.allocator();
}
// We would prefer to use raw libc allocator here, but cannot
// use it if it won't support the alignment we need.
@@ -153,7 +153,7 @@ pub fn main() anyerror!void {
};
var arena_instance = std.heap.ArenaAllocator.init(gpa);
defer arena_instance.deinit();
- const arena = arena_instance.getAllocator();
+ const arena = arena_instance.allocator();
const args = try process.argsAlloc(arena);
@@ -3619,7 +3619,7 @@ pub fn cmdFmt(gpa: Allocator, arena: Allocator, args: []const []const u8) !void
var errors = std.ArrayList(Compilation.AllErrors.Message).init(gpa);
defer errors.deinit();
- try Compilation.AllErrors.addZir(arena_instance.getAllocator(), &errors, &file);
+ try Compilation.AllErrors.addZir(arena_instance.allocator(), &errors, &file);
const ttyconf: std.debug.TTY.Config = switch (color) {
.auto => std.debug.detectTTYConfig(),
.on => .escape_codes,
@@ -3818,7 +3818,7 @@ fn fmtPathFile(
var errors = std.ArrayList(Compilation.AllErrors.Message).init(fmt.gpa);
defer errors.deinit();
- try Compilation.AllErrors.addZir(arena_instance.getAllocator(), &errors, &file);
+ try Compilation.AllErrors.addZir(arena_instance.allocator(), &errors, &file);
const ttyconf: std.debug.TTY.Config = switch (fmt.color) {
.auto => std.debug.detectTTYConfig(),
.on => .escape_codes,
src/mingw.zig
@@ -25,7 +25,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
}
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
switch (crt_file) {
.crt2_o => {
@@ -281,7 +281,7 @@ fn add_cc_args(
pub fn buildImportLib(comp: *Compilation, lib_name: []const u8) !void {
var arena_allocator = std.heap.ArenaAllocator.init(comp.gpa);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
const def_file_path = findDef(comp, arena, lib_name) catch |err| switch (err) {
error.FileNotFound => {
src/Module.zig
@@ -517,7 +517,7 @@ pub const Decl = struct {
pub fn finalizeNewArena(decl: *Decl, arena: *std.heap.ArenaAllocator) !void {
assert(decl.value_arena == null);
- const arena_state = try arena.getAllocator().create(std.heap.ArenaAllocator.State);
+ const arena_state = try arena.allocator().create(std.heap.ArenaAllocator.State);
arena_state.* = arena.state;
decl.value_arena = arena_state;
}
@@ -3159,7 +3159,7 @@ pub fn semaFile(mod: *Module, file: *File) SemaError!void {
const gpa = mod.gpa;
var new_decl_arena = std.heap.ArenaAllocator.init(gpa);
errdefer new_decl_arena.deinit();
- const new_decl_arena_allocator = new_decl_arena.getAllocator();
+ const new_decl_arena_allocator = new_decl_arena.allocator();
const struct_obj = try new_decl_arena_allocator.create(Module.Struct);
const struct_ty = try Type.Tag.@"struct".create(new_decl_arena_allocator, struct_obj);
@@ -3203,7 +3203,7 @@ pub fn semaFile(mod: *Module, file: *File) SemaError!void {
var sema_arena = std.heap.ArenaAllocator.init(gpa);
defer sema_arena.deinit();
- const sema_arena_allocator = sema_arena.getAllocator();
+ const sema_arena_allocator = sema_arena.allocator();
var sema: Sema = .{
.mod = mod,
@@ -3267,11 +3267,11 @@ fn semaDecl(mod: *Module, decl: *Decl) !bool {
// We need the memory for the Type to go into the arena for the Decl
var decl_arena = std.heap.ArenaAllocator.init(gpa);
errdefer decl_arena.deinit();
- const decl_arena_allocator = decl_arena.getAllocator();
+ const decl_arena_allocator = decl_arena.allocator();
var analysis_arena = std.heap.ArenaAllocator.init(gpa);
defer analysis_arena.deinit();
- const analysis_arena_allocator = analysis_arena.getAllocator();
+ const analysis_arena_allocator = analysis_arena.allocator();
var sema: Sema = .{
.mod = mod,
@@ -4132,7 +4132,7 @@ pub fn analyzeFnBody(mod: *Module, decl: *Decl, func: *Fn, arena: Allocator) Sem
// Use the Decl's arena for captured values.
var decl_arena = decl.value_arena.?.promote(gpa);
defer decl.value_arena.?.* = decl_arena.state;
- const decl_arena_allocator = decl_arena.getAllocator();
+ const decl_arena_allocator = decl_arena.allocator();
var sema: Sema = .{
.mod = mod,
@@ -4756,7 +4756,7 @@ pub fn populateTestFunctions(mod: *Module) !void {
// decl reference it as a slice.
var new_decl_arena = std.heap.ArenaAllocator.init(gpa);
errdefer new_decl_arena.deinit();
- const arena = new_decl_arena.getAllocator();
+ const arena = new_decl_arena.allocator();
const test_fn_vals = try arena.alloc(Value, mod.test_functions.count());
const array_decl = try mod.createAnonymousDeclFromDecl(decl, decl.src_namespace, null, .{
@@ -4807,7 +4807,7 @@ pub fn populateTestFunctions(mod: *Module) !void {
{
var new_decl_arena = std.heap.ArenaAllocator.init(gpa);
errdefer new_decl_arena.deinit();
- const arena = new_decl_arena.getAllocator();
+ const arena = new_decl_arena.allocator();
// This copy accesses the old Decl Type/Value so it must be done before `clearValues`.
const new_ty = try Type.Tag.const_slice.create(arena, try tmp_test_fn_ty.copy(arena));
src/musl.zig
@@ -25,7 +25,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
const gpa = comp.gpa;
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
switch (crt_file) {
.crti_o => {
src/print_air.zig
@@ -47,7 +47,7 @@ pub fn dump(gpa: Allocator, air: Air, zir: Zir, liveness: Liveness) void {
var writer: Writer = .{
.gpa = gpa,
- .arena = arena.getAllocator(),
+ .arena = arena.allocator(),
.air = air,
.zir = zir,
.liveness = liveness,
src/print_zir.zig
@@ -19,7 +19,7 @@ pub fn renderAsTextToFile(
var writer: Writer = .{
.gpa = gpa,
- .arena = arena.getAllocator(),
+ .arena = arena.allocator(),
.file = scope_file,
.code = scope_file.zir,
.indent = 0,
@@ -74,7 +74,7 @@ pub fn renderInstructionContext(
var writer: Writer = .{
.gpa = gpa,
- .arena = arena.getAllocator(),
+ .arena = arena.allocator(),
.file = scope_file,
.code = scope_file.zir,
.indent = if (indent < 2) 2 else indent,
@@ -106,7 +106,7 @@ pub fn renderSingleInstruction(
var writer: Writer = .{
.gpa = gpa,
- .arena = arena.getAllocator(),
+ .arena = arena.allocator(),
.file = scope_file,
.code = scope_file.zir,
.indent = indent,
src/Sema.zig
@@ -418,7 +418,7 @@ pub const Block = struct {
finished: bool,
pub fn arena(wad: *WipAnonDecl) Allocator {
- return wad.new_decl_arena.getAllocator();
+ return wad.new_decl_arena.allocator();
}
pub fn deinit(wad: *WipAnonDecl) void {
@@ -1594,7 +1594,7 @@ fn zirStructDecl(
var new_decl_arena = std.heap.ArenaAllocator.init(sema.gpa);
errdefer new_decl_arena.deinit();
- const new_decl_arena_allocator = new_decl_arena.getAllocator();
+ const new_decl_arena_allocator = new_decl_arena.allocator();
const struct_obj = try new_decl_arena_allocator.create(Module.Struct);
const struct_ty = try Type.Tag.@"struct".create(new_decl_arena_allocator, struct_obj);
@@ -1699,7 +1699,7 @@ fn zirEnumDecl(
var new_decl_arena = std.heap.ArenaAllocator.init(gpa);
errdefer new_decl_arena.deinit();
- const new_decl_arena_allocator = new_decl_arena.getAllocator();
+ const new_decl_arena_allocator = new_decl_arena.allocator();
const enum_obj = try new_decl_arena_allocator.create(Module.EnumFull);
const enum_ty_payload = try new_decl_arena_allocator.create(Type.Payload.EnumFull);
@@ -1889,7 +1889,7 @@ fn zirUnionDecl(
var new_decl_arena = std.heap.ArenaAllocator.init(sema.gpa);
errdefer new_decl_arena.deinit();
- const new_decl_arena_allocator = new_decl_arena.getAllocator();
+ const new_decl_arena_allocator = new_decl_arena.allocator();
const union_obj = try new_decl_arena_allocator.create(Module.Union);
const type_tag: Type.Tag = if (small.has_tag_type or small.auto_enum_tag) .union_tagged else .@"union";
@@ -1958,7 +1958,7 @@ fn zirOpaqueDecl(
var new_decl_arena = std.heap.ArenaAllocator.init(gpa);
errdefer new_decl_arena.deinit();
- const new_decl_arena_allocator = new_decl_arena.getAllocator();
+ const new_decl_arena_allocator = new_decl_arena.allocator();
const opaque_obj = try new_decl_arena_allocator.create(Module.Opaque);
const opaque_ty_payload = try new_decl_arena_allocator.create(Type.Payload.Opaque);
@@ -2012,7 +2012,7 @@ fn zirErrorSetDecl(
var new_decl_arena = std.heap.ArenaAllocator.init(gpa);
errdefer new_decl_arena.deinit();
- const new_decl_arena_allocator = new_decl_arena.getAllocator();
+ const new_decl_arena_allocator = new_decl_arena.allocator();
const error_set = try new_decl_arena_allocator.create(Module.ErrorSet);
const error_set_ty = try Type.Tag.error_set.create(new_decl_arena_allocator, error_set);
@@ -3940,7 +3940,7 @@ fn analyzeCall(
{
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
errdefer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
for (memoized_call_key.args) |*arg| {
arg.* = try arg.*.copy(arena);
@@ -4074,7 +4074,7 @@ fn analyzeCall(
var new_decl_arena = std.heap.ArenaAllocator.init(sema.gpa);
errdefer new_decl_arena.deinit();
- const new_decl_arena_allocator = new_decl_arena.getAllocator();
+ const new_decl_arena_allocator = new_decl_arena.allocator();
// Re-run the block that creates the function, with the comptime parameters
// pre-populated inside `inst_map`. This causes `param_comptime` and
@@ -6053,8 +6053,8 @@ fn zirSwitchBlock(sema: *Sema, block: *Block, inst: Zir.Inst.Index) CompileError
defer arena.deinit();
const target = sema.mod.getTarget();
- const min_int = try operand_ty.minInt(arena.getAllocator(), target);
- const max_int = try operand_ty.maxInt(arena.getAllocator(), target);
+ const min_int = try operand_ty.minInt(arena.allocator(), target);
+ const max_int = try operand_ty.maxInt(arena.allocator(), target);
if (try range_set.spans(min_int, max_int, operand_ty)) {
if (special_prong == .@"else") {
return sema.fail(
@@ -12801,7 +12801,7 @@ const ComptimePtrMutationKit = struct {
fn beginArena(self: *ComptimePtrMutationKit, gpa: Allocator) Allocator {
self.decl_arena = self.decl_ref_mut.decl.value_arena.?.promote(gpa);
- return self.decl_arena.getAllocator();
+ return self.decl_arena.allocator();
}
fn finishArena(self: *ComptimePtrMutationKit) void {
@@ -14293,7 +14293,7 @@ fn semaStructFields(
var decl_arena = decl.value_arena.?.promote(gpa);
defer decl.value_arena.?.* = decl_arena.state;
- const decl_arena_allocator = decl_arena.getAllocator();
+ const decl_arena_allocator = decl_arena.allocator();
var analysis_arena = std.heap.ArenaAllocator.init(gpa);
defer analysis_arena.deinit();
@@ -14301,7 +14301,7 @@ fn semaStructFields(
var sema: Sema = .{
.mod = mod,
.gpa = gpa,
- .arena = analysis_arena.getAllocator(),
+ .arena = analysis_arena.allocator(),
.perm_arena = decl_arena_allocator,
.code = zir,
.owner_decl = decl,
@@ -14461,7 +14461,7 @@ fn semaUnionFields(mod: *Module, union_obj: *Module.Union) CompileError!void {
var decl_arena = union_obj.owner_decl.value_arena.?.promote(gpa);
defer union_obj.owner_decl.value_arena.?.* = decl_arena.state;
- const decl_arena_allocator = decl_arena.getAllocator();
+ const decl_arena_allocator = decl_arena.allocator();
var analysis_arena = std.heap.ArenaAllocator.init(gpa);
defer analysis_arena.deinit();
@@ -14469,7 +14469,7 @@ fn semaUnionFields(mod: *Module, union_obj: *Module.Union) CompileError!void {
var sema: Sema = .{
.mod = mod,
.gpa = gpa,
- .arena = analysis_arena.getAllocator(),
+ .arena = analysis_arena.allocator(),
.perm_arena = decl_arena_allocator,
.code = zir,
.owner_decl = decl,
@@ -14623,7 +14623,7 @@ fn generateUnionTagTypeNumbered(
var new_decl_arena = std.heap.ArenaAllocator.init(sema.gpa);
errdefer new_decl_arena.deinit();
- const new_decl_arena_allocator = new_decl_arena.getAllocator();
+ const new_decl_arena_allocator = new_decl_arena.allocator();
const enum_obj = try new_decl_arena_allocator.create(Module.EnumNumbered);
const enum_ty_payload = try new_decl_arena_allocator.create(Type.Payload.EnumNumbered);
@@ -14660,7 +14660,7 @@ fn generateUnionTagTypeSimple(sema: *Sema, block: *Block, fields_len: u32) !Type
var new_decl_arena = std.heap.ArenaAllocator.init(sema.gpa);
errdefer new_decl_arena.deinit();
- const new_decl_arena_allocator = new_decl_arena.getAllocator();
+ const new_decl_arena_allocator = new_decl_arena.allocator();
const enum_obj = try new_decl_arena_allocator.create(Module.EnumSimple);
const enum_ty_payload = try new_decl_arena_allocator.create(Type.Payload.EnumSimple);
src/stage1.zig
@@ -38,7 +38,7 @@ pub fn main(argc: c_int, argv: [*][*:0]u8) callconv(.C) c_int {
const gpa = std.heap.c_allocator;
var arena_instance = std.heap.ArenaAllocator.init(gpa);
defer arena_instance.deinit();
- const arena = arena_instance.getAllocator();
+ const arena = arena_instance.allocator();
const args = arena.alloc([]const u8, @intCast(usize, argc)) catch fatal("{s}", .{"OutOfMemory"});
for (args) |*arg, i| {
src/test.zig
@@ -692,7 +692,7 @@ pub const TestContext = struct {
var arena_allocator = std.heap.ArenaAllocator.init(allocator);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
var tmp = std.testing.tmpDir(.{});
defer tmp.cleanup();
src/translate_c.zig
@@ -373,7 +373,7 @@ pub fn translate(
// from this function.
var arena = std.heap.ArenaAllocator.init(gpa);
errdefer arena.deinit();
- const arena_allocator = arena.getAllocator();
+ const arena_allocator = arena.allocator();
var context = Context{
.gpa = gpa,
src/wasi_libc.zig
@@ -67,7 +67,7 @@ pub fn buildCRTFile(comp: *Compilation, crt_file: CRTFile) !void {
const gpa = comp.gpa;
var arena_allocator = std.heap.ArenaAllocator.init(gpa);
defer arena_allocator.deinit();
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
switch (crt_file) {
.crt1_reactor_o => {
test/standalone/brace_expansion/main.zig
@@ -16,7 +16,7 @@ const Token = union(enum) {
};
var gpa = std.heap.GeneralPurposeAllocator(.{}){};
-const global_allocator = gpa.getAllocator();
+const global_allocator = gpa.allocator();
fn tokenize(input: []const u8) !ArrayList(Token) {
const State = enum {
test/standalone/cat/main.zig
@@ -8,7 +8,7 @@ const warn = std.log.warn;
pub fn main() !void {
var arena_instance = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena_instance.deinit();
- const arena = arena_instance.getAllocator();
+ const arena = arena_instance.allocator();
const args = try process.argsAlloc(arena);
test/cli.zig
@@ -16,7 +16,7 @@ pub fn main() !void {
// skip my own exe name
_ = arg_it.skip();
- a = arena.getAllocator();
+ a = arena.allocator();
const zig_exe_rel = try (arg_it.next(a) orelse {
std.debug.print("Expected first argument to be path to zig compiler\n", .{});
test/compare_output.zig
@@ -491,7 +491,7 @@ pub fn addCases(cases: *tests.CompareOutputContext) void {
\\pub fn main() !void {
\\ var allocator_buf: [10]u8 = undefined;
\\ var fixedBufferAllocator = std.mem.validationWrap(std.heap.FixedBufferAllocator.init(&allocator_buf));
- \\ const allocator = std.heap.loggingAllocator(fixedBufferAllocator.getAllocator()).getAllocator();
+ \\ const allocator = std.heap.loggingAllocator(fixedBufferAllocator.allocator()).allocator();
\\
\\ var a = try allocator.alloc(u8, 10);
\\ a = allocator.shrink(a, 5);
tools/gen_spirv_spec.zig
@@ -4,7 +4,7 @@ const g = @import("spirv/grammar.zig");
pub fn main() !void {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
const args = try std.process.argsAlloc(allocator);
if (args.len != 2) {
tools/gen_stubs.zig
@@ -25,7 +25,7 @@ pub fn main() !void {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();
- const ally = arena.getAllocator();
+ const ally = arena.allocator();
var symbols = std.ArrayList(Symbol).init(ally);
var sections = std.ArrayList([]const u8).init(ally);
tools/merge_anal_dumps.zig
@@ -9,7 +9,7 @@ pub fn main() anyerror!void {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
const args = try std.process.argsAlloc(allocator);
tools/process_headers.zig
@@ -284,7 +284,7 @@ const LibCVendor = enum {
pub fn main() !void {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
const args = try std.process.argsAlloc(allocator);
var search_paths = std.ArrayList([]const u8).init(allocator);
var opt_out_dir: ?[]const u8 = null;
tools/update-license-headers.zig
@@ -10,7 +10,7 @@ pub fn main() !void {
defer root_node.end();
var arena_allocator = std.heap.ArenaAllocator.init(std.heap.page_allocator);
- const arena = arena_allocator.getAllocator();
+ const arena = arena_allocator.allocator();
const args = try std.process.argsAlloc(arena);
const path_to_walk = args[1];
tools/update_clang_options.zig
@@ -450,13 +450,8 @@ const cpu_targets = struct {
pub fn main() anyerror!void {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();
-<<<<<<< HEAD
- const allocator = &arena.allocator;
-=======
-
- const allocator = arena.getAllocator();
->>>>>>> 11157e318 (allocgate: stage 1 and 2 building)
+ const allocator = arena.allocator();
const args = try std.process.argsAlloc(allocator);
if (args.len <= 1) {
tools/update_cpu_features.zig
@@ -769,7 +769,7 @@ const llvm_targets = [_]LlvmTarget{
pub fn main() anyerror!void {
var arena_state = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena_state.deinit();
- const arena = arena_state.getAllocator();
+ const arena = arena_state.allocator();
const args = try std.process.argsAlloc(arena);
if (args.len <= 1) {
@@ -845,7 +845,7 @@ fn processOneTarget(job: Job) anyerror!void {
var arena_state = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena_state.deinit();
- const arena = arena_state.getAllocator();
+ const arena = arena_state.allocator();
var progress_node = job.root_progress.start(llvm_target.zig_name, 3);
progress_node.activate();
tools/update_glibc.zig
@@ -133,7 +133,7 @@ const Function = struct {
pub fn main() !void {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
const args = try std.process.argsAlloc(allocator);
const in_glibc_dir = args[1]; // path to the unzipped tarball of glibc, e.g. ~/downloads/glibc-2.25
const zig_src_dir = args[2]; // path to the source checkout of zig, lib dir, e.g. ~/zig-src/lib
tools/update_spirv_features.zig
@@ -48,7 +48,7 @@ const Version = struct {
pub fn main() !void {
var arena = std.heap.ArenaAllocator.init(std.heap.page_allocator);
defer arena.deinit();
- const allocator = arena.getAllocator();
+ const allocator = arena.allocator();
const args = try std.process.argsAlloc(allocator);