Commit f205d23e65
Changed files (9)
lib
std
event
src-self-hosted
test
stage1
behavior
doc/langref.html.in
@@ -6880,6 +6880,9 @@ pub const CallOptions = struct {
/// Equivalent to function call syntax.
auto,
+ /// Equivalent to async keyword used with function call syntax.
+ async_kw,
+
/// Prevents tail call optimization. This guarantees that the return
/// address will point to the callsite, as opposed to the callsite's
/// callsite. If the call is otherwise required to be tail-called
lib/std/event/group.zig
@@ -60,16 +60,19 @@ pub fn Group(comptime ReturnType: type) type {
/// allocated by the group and freed by `wait`.
/// `func` must be async and have return type `ReturnType`.
/// Thread-safe.
- pub fn call(self: *Self, comptime func: var, args: ...) error{OutOfMemory}!void {
- var frame = try self.allocator.create(@Frame(func));
+ pub fn call(self: *Self, comptime func: var, args: var) error{OutOfMemory}!void {
+ var frame = try self.allocator.create(@typeOf(@call(.{ .modifier = .async_kw }, func, args)));
+ errdefer self.allocator.destroy(frame);
const node = try self.allocator.create(AllocStack.Node);
+ errdefer self.allocator.destroy(node);
node.* = AllocStack.Node{
.next = undefined,
.data = Node{
- .handle = @asyncCall(frame, {}, func, args),
+ .handle = frame,
.bytes = std.mem.asBytes(frame),
},
};
+ frame.* = @call(.{ .modifier = .async_kw }, func, args);
self.alloc_stack.push(node);
}
lib/std/builtin.zig
@@ -382,6 +382,9 @@ pub const CallOptions = struct {
/// Equivalent to function call syntax.
auto,
+ /// Equivalent to async keyword used with function call syntax.
+ async_kw,
+
/// Prevents tail call optimization. This guarantees that the return
/// address will point to the callsite, as opposed to the callsite's
/// callsite. If the call is otherwise required to be tail-called
src/all_types.hpp
@@ -782,6 +782,7 @@ struct AstNodeUnwrapOptional {
// Must be synchronized with std.builtin.CallOptions.Modifier
enum CallModifier {
CallModifierNone,
+ CallModifierAsync,
CallModifierNeverTail,
CallModifierNeverInline,
CallModifierNoAsync,
@@ -791,7 +792,6 @@ enum CallModifier {
// These are additional tags in the compiler, but not exposed in the std lib.
CallModifierBuiltin,
- CallModifierAsync,
};
struct AstNodeFnCallExpr {
src/ir.cpp
@@ -18330,10 +18330,7 @@ static IrInstruction *ir_analyze_call_extra(IrAnalyze *ira, IrInstruction *sourc
if (modifier_val == nullptr)
return ira->codegen->invalid_instruction;
CallModifier modifier = (CallModifier)bigint_as_u32(&modifier_val->data.x_enum_tag);
- if (modifier == CallModifierAsync) {
- ir_add_error(ira, source_instr, buf_sprintf("TODO: @call with async modifier"));
- return ira->codegen->invalid_instruction;
- }
+
if (ir_should_inline(ira->new_irb.exec, source_instr->scope)) {
switch (modifier) {
case CallModifierBuiltin:
src-self-hosted/compilation.zig
@@ -778,7 +778,7 @@ pub const Compilation = struct {
continue;
};
const root_scope = ev.data;
- group.call(rebuildFile, self, root_scope) catch |err| {
+ group.call(rebuildFile, .{ self, root_scope }) catch |err| {
build_result = err;
continue;
};
@@ -787,7 +787,7 @@ pub const Compilation = struct {
while (self.fs_watch.channel.getOrNull()) |ev_or_err| {
if (ev_or_err) |ev| {
const root_scope = ev.data;
- group.call(rebuildFile, self, root_scope) catch |err| {
+ group.call(rebuildFile, .{ self, root_scope }) catch |err| {
build_result = err;
continue;
};
@@ -868,7 +868,7 @@ pub const Compilation = struct {
// TODO connect existing comptime decls to updated source files
- try self.prelink_group.call(addCompTimeBlock, self, tree_scope, &decl_scope.base, comptime_node);
+ try self.prelink_group.call(addCompTimeBlock, .{ self, tree_scope, &decl_scope.base, comptime_node });
},
.VarDecl => @panic("TODO"),
.FnProto => {
@@ -921,7 +921,7 @@ pub const Compilation = struct {
tree_scope.base.ref();
errdefer self.gpa().destroy(fn_decl);
- try group.call(addTopLevelDecl, self, &fn_decl.base, locked_table);
+ try group.call(addTopLevelDecl, .{ self, &fn_decl.base, locked_table });
}
},
.TestDecl => @panic("TODO"),
@@ -1042,8 +1042,8 @@ pub const Compilation = struct {
const is_export = decl.isExported(decl.tree_scope.tree);
if (is_export) {
- try self.prelink_group.call(verifyUniqueSymbol, self, decl);
- try self.prelink_group.call(resolveDecl, self, decl);
+ try self.prelink_group.call(verifyUniqueSymbol, .{ self, decl });
+ try self.prelink_group.call(resolveDecl, .{ self, decl });
}
const gop = try locked_table.getOrPut(decl.name);
@@ -1062,7 +1062,7 @@ pub const Compilation = struct {
const msg = try Msg.createFromScope(self, tree_scope, span, text);
errdefer msg.destroy();
- try self.prelink_group.call(addCompileErrorAsync, self, msg);
+ try self.prelink_group.call(addCompileErrorAsync, .{ self, msg });
}
fn addCompileErrorCli(self: *Compilation, realpath: []const u8, comptime fmt: []const u8, args: var) !void {
@@ -1072,7 +1072,7 @@ pub const Compilation = struct {
const msg = try Msg.createFromCli(self, realpath, text);
errdefer msg.destroy();
- try self.prelink_group.call(addCompileErrorAsync, self, msg);
+ try self.prelink_group.call(addCompileErrorAsync, .{ self, msg });
}
async fn addCompileErrorAsync(
@@ -1131,7 +1131,7 @@ pub const Compilation = struct {
// get a head start on looking for the native libc
if (self.target == Target.Native and self.override_libc == null) {
- try self.deinit_group.call(startFindingNativeLibC, self);
+ try self.deinit_group.call(startFindingNativeLibC, .{self});
}
}
return link_lib;
@@ -1339,8 +1339,8 @@ fn generateDeclFn(comp: *Compilation, fn_decl: *Decl.Fn) !void {
// Kick off rendering to LLVM module, but it doesn't block the fn decl
// analysis from being complete.
- try comp.prelink_group.call(codegen.renderToLlvm, comp, fn_val, analyzed_code);
- try comp.prelink_group.call(addFnToLinkSet, comp, fn_val);
+ try comp.prelink_group.call(codegen.renderToLlvm, .{ comp, fn_val, analyzed_code });
+ try comp.prelink_group.call(addFnToLinkSet, .{ comp, fn_val });
}
async fn addFnToLinkSet(comp: *Compilation, fn_val: *Value.Fn) Compilation.BuildError!void {
src-self-hosted/libc_installation.zig
@@ -158,9 +158,9 @@ pub const LibCInstallation = struct {
if (sdk.msvc_lib_dir_ptr != 0) {
self.msvc_lib_dir = try std.mem.dupe(allocator, u8, sdk.msvc_lib_dir_ptr[0..sdk.msvc_lib_dir_len]);
}
- try group.call(findNativeKernel32LibDir, allocator, self, sdk);
- try group.call(findNativeIncludeDirWindows, self, allocator, sdk);
- try group.call(findNativeLibDirWindows, self, allocator, sdk);
+ try group.call(findNativeKernel32LibDir, .{ allocator, self, sdk });
+ try group.call(findNativeIncludeDirWindows, .{ self, allocator, sdk });
+ try group.call(findNativeLibDirWindows, .{ self, allocator, sdk });
},
c.ZigFindWindowsSdkError.OutOfMemory => return error.OutOfMemory,
c.ZigFindWindowsSdkError.NotFound => return error.NotFound,
@@ -168,10 +168,10 @@ pub const LibCInstallation = struct {
}
},
.linux => {
- try group.call(findNativeIncludeDirLinux, self, allocator);
- try group.call(findNativeLibDirLinux, self, allocator);
- try group.call(findNativeStaticLibDir, self, allocator);
- try group.call(findNativeDynamicLinker, self, allocator);
+ try group.call(findNativeIncludeDirLinux, .{ self, allocator });
+ try group.call(findNativeLibDirLinux, .{ self, allocator });
+ try group.call(findNativeStaticLibDir, .{ self, allocator });
+ try group.call(findNativeDynamicLinker, .{ self, allocator });
},
.macosx, .freebsd, .netbsd => {
self.include_dir = try std.mem.dupe(allocator, u8, "/usr/include");
@@ -322,7 +322,7 @@ pub const LibCInstallation = struct {
var group = event.Group(FindError!void).init(allocator);
errdefer group.wait() catch {};
for (dyn_tests) |*dyn_test| {
- try group.call(testNativeDynamicLinker, self, allocator, dyn_test);
+ try group.call(testNativeDynamicLinker, .{ self, allocator, dyn_test });
}
try group.wait();
for (dyn_tests) |*dyn_test| {
src-self-hosted/main.zig
@@ -654,7 +654,7 @@ fn cmdFmt(allocator: *Allocator, args: []const []const u8) !void {
var group = event.Group(FmtError!void).init(allocator);
for (flags.positionals.toSliceConst()) |file_path| {
- try group.call(fmtPath, &fmt, file_path, check_mode);
+ try group.call(fmtPath, .{ &fmt, file_path, check_mode });
}
try group.wait();
if (fmt.any_error) {
@@ -710,7 +710,7 @@ async fn fmtPath(fmt: *Fmt, file_path_ref: []const u8, check_mode: bool) FmtErro
if (entry.kind == .Directory or mem.endsWith(u8, entry.name, ".zig")) {
const full_path = try fs.path.join(fmt.allocator, &[_][]const u8{ file_path, entry.name });
@panic("TODO https://github.com/ziglang/zig/issues/3777");
- // try group.call(fmtPath, fmt, full_path, check_mode);
+ // try group.call(fmtPath, .{fmt, full_path, check_mode});
}
}
return group.wait();
test/stage1/behavior/async_fn.zig
@@ -1271,3 +1271,25 @@ test "spill target expr in a for loop, with a var decl in the loop body" {
resume S.global_frame;
resume S.global_frame;
}
+
+test "async call with @call" {
+ const S = struct {
+ var global_frame: anyframe = undefined;
+ fn doTheTest() void {
+ _ = @call(.{ .modifier = .async_kw }, atest, .{});
+ resume global_frame;
+ }
+ fn atest() void {
+ var frame = @call(.{ .modifier = .async_kw }, afoo, .{});
+ const res = await frame;
+ expect(res == 42);
+ }
+ fn afoo() i32 {
+ suspend {
+ global_frame = @frame();
+ }
+ return 42;
+ }
+ };
+ S.doTheTest();
+}