Commit cb20093614

Vexu <15308111+Vexu@users.noreply.github.com>
2019-11-06 18:38:59
self hosted compiler: remove await async pattern
1 parent b06e5b8
src-self-hosted/codegen.zig
@@ -17,7 +17,7 @@ pub async fn renderToLlvm(comp: *Compilation, fn_val: *Value.Fn, code: *ir.Code)
     defer fn_val.base.deref(comp);
     defer code.destroy(comp.gpa());
 
-    var output_path = try await (async comp.createRandomOutputPath(comp.target.objFileExt()) catch unreachable);
+    var output_path = try comp.createRandomOutputPath(comp.target.objFileExt());
     errdefer output_path.deinit();
 
     const llvm_handle = try comp.zig_compiler.getAnyLlvmContext();
src-self-hosted/compilation.zig
@@ -93,8 +93,8 @@ pub const ZigCompiler = struct {
     }
 
     pub async fn getNativeLibC(self: *ZigCompiler) !*LibCInstallation {
-        if (await (async self.native_libc.start() catch unreachable)) |ptr| return ptr;
-        try await (async self.native_libc.data.findNative(self.loop) catch unreachable);
+        if (self.native_libc.start()) |ptr| return ptr;
+        try self.native_libc.data.findNative(self.loop);
         self.native_libc.resolve();
         return &self.native_libc.data;
     }
@@ -227,8 +227,8 @@ pub const Compilation = struct {
     /// need to wait on this group before deinitializing
     deinit_group: event.Group(void),
 
-    destroy_handle: promise,
-    main_loop_handle: promise,
+    // destroy_frame: @Frame(createAsync),
+    main_loop_frame: @Frame(Compilation.mainLoop),
     main_loop_future: event.Future(void),
 
     have_err_ret_tracing: bool,
@@ -348,7 +348,7 @@ pub const Compilation = struct {
         zig_lib_dir: []const u8,
     ) !*Compilation {
         var optional_comp: ?*Compilation = null;
-        const handle = try async<zig_compiler.loop.allocator> createAsync(
+        const frame = async createAsync(
             &optional_comp,
             zig_compiler,
             name,
@@ -359,10 +359,7 @@ pub const Compilation = struct {
             is_static,
             zig_lib_dir,
         );
-        return optional_comp orelse if (getAwaitResult(
-            zig_compiler.loop.allocator,
-            handle,
-        )) |_| unreachable else |err| err;
+        return optional_comp orelse await frame;
     }
 
     async fn createAsync(
@@ -376,10 +373,6 @@ pub const Compilation = struct {
         is_static: bool,
         zig_lib_dir: []const u8,
     ) !void {
-        // workaround for https://github.com/ziglang/zig/issues/1194
-        suspend {
-            resume @handle();
-        }
 
         const loop = zig_compiler.loop;
         var comp = Compilation{
@@ -395,8 +388,8 @@ pub const Compilation = struct {
             .zig_lib_dir = zig_lib_dir,
             .zig_std_dir = undefined,
             .tmp_dir = event.Future(BuildError![]u8).init(loop),
-            .destroy_handle = @handle(),
-            .main_loop_handle = undefined,
+            .destroy_frame = @frame(),
+            .main_loop_frame = undefined,
             .main_loop_future = event.Future(void).init(loop),
 
             .name = undefined,
@@ -546,7 +539,7 @@ pub const Compilation = struct {
         try comp.initTypes();
         defer comp.primitive_type_table.deinit();
 
-        comp.main_loop_handle = async comp.mainLoop() catch unreachable;
+        comp.main_loop_frame = async comp.mainLoop() catch unreachable;
         // Set this to indicate that initialization completed successfully.
         // from here on out we must not return an error.
         // This must occur before the first suspend/await.
@@ -555,7 +548,7 @@ pub const Compilation = struct {
         suspend;
         // From here on is cleanup.
 
-        await (async comp.deinit_group.wait() catch unreachable);
+        comp.deinit_group.wait();
 
         if (comp.tmp_dir.getOrNull()) |tmp_dir_result| if (tmp_dir_result.*) |tmp_dir| {
             // TODO evented I/O?
@@ -578,10 +571,10 @@ pub const Compilation = struct {
                         error.Overflow => return error.Overflow,
                         error.InvalidCharacter => unreachable, // we just checked the characters above
                     };
-                    const int_type = try await (async Type.Int.get(comp, Type.Int.Key{
+                    const int_type = try Type.Int.get(comp, Type.Int.Key{
                         .bit_count = bit_count,
                         .is_signed = is_signed,
-                    }) catch unreachable);
+                    });
                     errdefer int_type.base.base.deref();
                     return &int_type.base;
                 },
@@ -758,8 +751,8 @@ pub const Compilation = struct {
     }
 
     pub fn destroy(self: *Compilation) void {
-        cancel self.main_loop_handle;
-        resume self.destroy_handle;
+        await self.main_loop_frame;
+        resume self.destroy_frame;
     }
 
     fn start(self: *Compilation) void {
@@ -768,13 +761,13 @@ pub const Compilation = struct {
 
     async fn mainLoop(self: *Compilation) void {
         // wait until start() is called
-        _ = await (async self.main_loop_future.get() catch unreachable);
+        _ = self.main_loop_future.get();
 
-        var build_result = await (async self.initialCompile() catch unreachable);
+        var build_result = self.initialCompile();
 
         while (true) {
             const link_result = if (build_result) blk: {
-                break :blk await (async self.maybeLink() catch unreachable);
+                break :blk self.maybeLink();
             } else |err| err;
             // this makes a handy error return trace and stack trace in debug mode
             if (std.debug.runtime_safety) {
@@ -782,28 +775,28 @@ pub const Compilation = struct {
             }
 
             const compile_errors = blk: {
-                const held = await (async self.compile_errors.acquire() catch unreachable);
+                const held = self.compile_errors.acquire();
                 defer held.release();
                 break :blk held.value.toOwnedSlice();
             };
 
             if (link_result) |_| {
                 if (compile_errors.len == 0) {
-                    await (async self.events.put(Event.Ok) catch unreachable);
+                    self.events.put(Event.Ok);
                 } else {
-                    await (async self.events.put(Event{ .Fail = compile_errors }) catch unreachable);
+                    self.events.put(Event{ .Fail = compile_errors });
                 }
             } else |err| {
                 // if there's an error then the compile errors have dangling references
                 self.gpa().free(compile_errors);
 
-                await (async self.events.put(Event{ .Error = err }) catch unreachable);
+                self.events.put(Event{ .Error = err });
             }
 
             // First, get an item from the watch channel, waiting on the channel.
             var group = event.Group(BuildError!void).init(self.loop);
             {
-                const ev = (await (async self.fs_watch.channel.get() catch unreachable)) catch |err| {
+                const ev = (self.fs_watch.channel.get()) catch |err| {
                     build_result = err;
                     continue;
                 };
@@ -814,7 +807,7 @@ pub const Compilation = struct {
                 };
             }
             // Next, get all the items from the channel that are buffered up.
-            while (await (async self.fs_watch.channel.getOrNull() catch unreachable)) |ev_or_err| {
+            while (self.fs_watch.channel.getOrNull()) |ev_or_err| {
                 if (ev_or_err) |ev| {
                     const root_scope = ev.data;
                     group.call(rebuildFile, self, root_scope) catch |err| {
@@ -826,17 +819,17 @@ pub const Compilation = struct {
                     continue;
                 }
             }
-            build_result = await (async group.wait() catch unreachable);
+            build_result = group.wait();
         }
     }
 
     async fn rebuildFile(self: *Compilation, root_scope: *Scope.Root) !void {
         const tree_scope = blk: {
-            const source_code = (await (async fs.readFile(
+            const source_code = fs.readFile(
                 self.loop,
                 root_scope.realpath,
                 max_src_size,
-            ) catch unreachable)) catch |err| {
+            ) catch |err| {
                 try self.addCompileErrorCli(root_scope.realpath, "unable to open: {}", @errorName(err));
                 return;
             };
@@ -856,13 +849,13 @@ pub const Compilation = struct {
             const msg = try Msg.createFromParseErrorAndScope(self, tree_scope, parse_error);
             errdefer msg.destroy();
 
-            try await (async self.addCompileErrorAsync(msg) catch unreachable);
+            try self.addCompileErrorAsync(msg);
         }
         if (tree_scope.tree.errors.len != 0) {
             return;
         }
 
-        const locked_table = await (async root_scope.decls.table.acquireWrite() catch unreachable);
+        const locked_table = root_scope.decls.table.acquireWrite();
         defer locked_table.release();
 
         var decl_group = event.Group(BuildError!void).init(self.loop);
@@ -876,7 +869,7 @@ pub const Compilation = struct {
             tree_scope,
         );
 
-        try await (async decl_group.wait() catch unreachable);
+        try decl_group.wait();
     }
 
     async fn rebuildChangedDecls(
@@ -988,20 +981,20 @@ pub const Compilation = struct {
     }
 
     async fn maybeLink(self: *Compilation) !void {
-        (await (async self.prelink_group.wait() catch unreachable)) catch |err| switch (err) {
+        (self.prelink_group.wait()) catch |err| switch (err) {
             error.SemanticAnalysisFailed => {},
             else => return err,
         };
 
         const any_prelink_errors = blk: {
-            const compile_errors = await (async self.compile_errors.acquire() catch unreachable);
+            const compile_errors = self.compile_errors.acquire();
             defer compile_errors.release();
 
             break :blk compile_errors.value.len != 0;
         };
 
         if (!any_prelink_errors) {
-            try await (async link(self) catch unreachable);
+            try link(self);
         }
     }
 
@@ -1013,12 +1006,12 @@ pub const Compilation = struct {
         node: *ast.Node,
         expected_type: ?*Type,
     ) !*ir.Code {
-        const unanalyzed_code = try await (async ir.gen(
+        const unanalyzed_code = try ir.gen(
             comp,
             node,
             tree_scope,
             scope,
-        ) catch unreachable);
+        );
         defer unanalyzed_code.destroy(comp.gpa());
 
         if (comp.verbose_ir) {
@@ -1026,11 +1019,11 @@ pub const Compilation = struct {
             unanalyzed_code.dump();
         }
 
-        const analyzed_code = try await (async ir.analyze(
+        const analyzed_code = try ir.analyze(
             comp,
             unanalyzed_code,
             expected_type,
-        ) catch unreachable);
+        );
         errdefer analyzed_code.destroy(comp.gpa());
 
         if (comp.verbose_ir) {
@@ -1050,13 +1043,13 @@ pub const Compilation = struct {
         const void_type = Type.Void.get(comp);
         defer void_type.base.base.deref(comp);
 
-        const analyzed_code = (await (async genAndAnalyzeCode(
+        const analyzed_code = genAndAnalyzeCode(
             comp,
             tree_scope,
             scope,
             comptime_node.expr,
             &void_type.base,
-        ) catch unreachable)) catch |err| switch (err) {
+        ) catch |err| switch (err) {
             // This poison value should not cause the errdefers to run. It simply means
             // that comp.compile_errors is populated.
             error.SemanticAnalysisFailed => return {},
@@ -1112,14 +1105,14 @@ pub const Compilation = struct {
     ) !void {
         errdefer msg.destroy();
 
-        const compile_errors = await (async self.compile_errors.acquire() catch unreachable);
+        const compile_errors = self.compile_errors.acquire();
         defer compile_errors.release();
 
         try compile_errors.value.append(msg);
     }
 
     async fn verifyUniqueSymbol(self: *Compilation, decl: *Decl) !void {
-        const exported_symbol_names = await (async self.exported_symbol_names.acquire() catch unreachable);
+        const exported_symbol_names = self.exported_symbol_names.acquire();
         defer exported_symbol_names.release();
 
         if (try exported_symbol_names.value.put(decl.name, decl)) |other_decl| {
@@ -1173,9 +1166,9 @@ pub const Compilation = struct {
 
     /// cancels itself so no need to await or cancel the promise.
     async fn startFindingNativeLibC(self: *Compilation) void {
-        await (async self.loop.yield() catch unreachable);
+        self.loop.yield();
         // we don't care if it fails, we're just trying to kick off the future resolution
-        _ = (await (async self.zig_compiler.getNativeLibC() catch unreachable)) catch return;
+        _ = (self.zig_compiler.getNativeLibC()) catch return;
     }
 
     /// General Purpose Allocator. Must free when done.
@@ -1191,8 +1184,8 @@ pub const Compilation = struct {
     /// If the temporary directory for this compilation has not been created, it creates it.
     /// Then it creates a random file name in that dir and returns it.
     pub async fn createRandomOutputPath(self: *Compilation, suffix: []const u8) !Buffer {
-        const tmp_dir = try await (async self.getTmpDir() catch unreachable);
-        const file_prefix = await (async self.getRandomFileName() catch unreachable);
+        const tmp_dir = try self.getTmpDir();
+        const file_prefix = self.getRandomFileName();
 
         const file_name = try std.fmt.allocPrint(self.gpa(), "{}{}", file_prefix[0..], suffix);
         defer self.gpa().free(file_name);
@@ -1207,14 +1200,14 @@ pub const Compilation = struct {
     /// Then returns it. The directory is unique to this Compilation and cleaned up when
     /// the Compilation deinitializes.
     async fn getTmpDir(self: *Compilation) ![]const u8 {
-        if (await (async self.tmp_dir.start() catch unreachable)) |ptr| return ptr.*;
-        self.tmp_dir.data = await (async self.getTmpDirImpl() catch unreachable);
+        if (self.tmp_dir.start()) |ptr| return ptr.*;
+        self.tmp_dir.data = self.getTmpDirImpl();
         self.tmp_dir.resolve();
         return self.tmp_dir.data;
     }
 
     async fn getTmpDirImpl(self: *Compilation) ![]u8 {
-        const comp_dir_name = await (async self.getRandomFileName() catch unreachable);
+        const comp_dir_name = self.getRandomFileName();
         const zig_dir_path = try getZigDir(self.gpa());
         defer self.gpa().free(zig_dir_path);
 
@@ -1233,7 +1226,7 @@ pub const Compilation = struct {
         var rand_bytes: [9]u8 = undefined;
 
         {
-            const held = await (async self.zig_compiler.prng.acquire() catch unreachable);
+            const held = self.zig_compiler.prng.acquire();
             defer held.release();
 
             held.value.random.bytes(rand_bytes[0..]);
@@ -1256,7 +1249,7 @@ pub const Compilation = struct {
         node: *ast.Node,
         expected_type: *Type,
     ) !*Value {
-        const analyzed_code = try await (async comp.genAndAnalyzeCode(tree_scope, scope, node, expected_type) catch unreachable);
+        const analyzed_code = try comp.genAndAnalyzeCode(tree_scope, scope, node, expected_type);
         defer analyzed_code.destroy(comp.gpa());
 
         return analyzed_code.getCompTimeResult(comp);
@@ -1266,7 +1259,7 @@ pub const Compilation = struct {
         const meta_type = &Type.MetaType.get(comp).base;
         defer meta_type.base.deref(comp);
 
-        const result_val = try await (async comp.analyzeConstValue(tree_scope, scope, node, meta_type) catch unreachable);
+        const result_val = try comp.analyzeConstValue(tree_scope, scope, node, meta_type);
         errdefer result_val.base.deref(comp);
 
         return result_val.cast(Type).?;
@@ -1274,9 +1267,9 @@ pub const Compilation = struct {
 
     /// This declaration has been blessed as going into the final code generation.
     pub async fn resolveDecl(comp: *Compilation, decl: *Decl) !void {
-        if (await (async decl.resolution.start() catch unreachable)) |ptr| return ptr.*;
+        if (decl.resolution.start()) |ptr| return ptr.*;
 
-        decl.resolution.data = try await (async generateDecl(comp, decl) catch unreachable);
+        decl.resolution.data = try generateDecl(comp, decl);
         decl.resolution.resolve();
         return decl.resolution.data;
     }
@@ -1298,7 +1291,7 @@ async fn generateDecl(comp: *Compilation, decl: *Decl) !void {
         Decl.Id.Var => @panic("TODO"),
         Decl.Id.Fn => {
             const fn_decl = @fieldParentPtr(Decl.Fn, "base", decl);
-            return await (async generateDeclFn(comp, fn_decl) catch unreachable);
+            return generateDeclFn(comp, fn_decl);
         },
         Decl.Id.CompTime => @panic("TODO"),
     }
@@ -1307,12 +1300,12 @@ async fn generateDecl(comp: *Compilation, decl: *Decl) !void {
 async fn generateDeclFn(comp: *Compilation, fn_decl: *Decl.Fn) !void {
     const tree_scope = fn_decl.base.tree_scope;
 
-    const body_node = fn_decl.fn_proto.body_node orelse return await (async generateDeclFnProto(comp, fn_decl) catch unreachable);
+    const body_node = fn_decl.fn_proto.body_node orelse return generateDeclFnProto(comp, fn_decl);
 
     const fndef_scope = try Scope.FnDef.create(comp, fn_decl.base.parent_scope);
     defer fndef_scope.base.deref(comp);
 
-    const fn_type = try await (async analyzeFnType(comp, tree_scope, fn_decl.base.parent_scope, fn_decl.fn_proto) catch unreachable);
+    const fn_type = try analyzeFnType(comp, tree_scope, fn_decl.base.parent_scope, fn_decl.fn_proto);
     defer fn_type.base.base.deref(comp);
 
     var symbol_name = try std.Buffer.init(comp.gpa(), fn_decl.base.name);
@@ -1356,12 +1349,12 @@ async fn generateDeclFn(comp: *Compilation, fn_decl: *Decl.Fn) !void {
         try fn_type.non_key.Normal.variable_list.append(var_scope);
     }
 
-    const analyzed_code = try await (async comp.genAndAnalyzeCode(
+    const analyzed_code = try comp.genAndAnalyzeCode(
         tree_scope,
         fn_val.child_scope,
         body_node,
         fn_type.key.data.Normal.return_type,
-    ) catch unreachable);
+    );
     errdefer analyzed_code.destroy(comp.gpa());
 
     assert(fn_val.block_scope != null);
@@ -1378,7 +1371,7 @@ async fn addFnToLinkSet(comp: *Compilation, fn_val: *Value.Fn) void {
 
     fn_val.link_set_node.data = fn_val;
 
-    const held = await (async comp.fn_link_set.acquire() catch unreachable);
+    const held = comp.fn_link_set.acquire();
     defer held.release();
 
     held.value.append(fn_val.link_set_node);
@@ -1398,7 +1391,7 @@ async fn analyzeFnType(
         ast.Node.FnProto.ReturnType.Explicit => |n| n,
         ast.Node.FnProto.ReturnType.InferErrorSet => |n| n,
     };
-    const return_type = try await (async comp.analyzeTypeExpr(tree_scope, scope, return_type_node) catch unreachable);
+    const return_type = try comp.analyzeTypeExpr(tree_scope, scope, return_type_node);
     return_type.base.deref(comp);
 
     var params = ArrayList(Type.Fn.Param).init(comp.gpa());
@@ -1414,7 +1407,7 @@ async fn analyzeFnType(
         var it = fn_proto.params.iterator(0);
         while (it.next()) |param_node_ptr| {
             const param_node = param_node_ptr.*.cast(ast.Node.ParamDecl).?;
-            const param_type = try await (async comp.analyzeTypeExpr(tree_scope, scope, param_node.type_node) catch unreachable);
+            const param_type = try comp.analyzeTypeExpr(tree_scope, scope, param_node.type_node);
             errdefer param_type.base.deref(comp);
             try params.append(Type.Fn.Param{
                 .typ = param_type,
@@ -1443,7 +1436,7 @@ async fn analyzeFnType(
         comp.gpa().free(key.data.Normal.params);
     };
 
-    const fn_type = try await (async Type.Fn.get(comp, key) catch unreachable);
+    const fn_type = try Type.Fn.get(comp, key);
     key_consumed = true;
     errdefer fn_type.base.base.deref(comp);
 
@@ -1451,12 +1444,12 @@ async fn analyzeFnType(
 }
 
 async fn generateDeclFnProto(comp: *Compilation, fn_decl: *Decl.Fn) !void {
-    const fn_type = try await (async analyzeFnType(
+    const fn_type = try analyzeFnType(
         comp,
         fn_decl.base.tree_scope,
         fn_decl.base.parent_scope,
         fn_decl.fn_proto,
-    ) catch unreachable);
+    );
     defer fn_type.base.base.deref(comp);
 
     var symbol_name = try std.Buffer.init(comp.gpa(), fn_decl.base.name);
@@ -1468,14 +1461,3 @@ async fn generateDeclFnProto(comp: *Compilation, fn_decl: *Decl.Fn) !void {
     fn_decl.value = Decl.Fn.Val{ .FnProto = fn_proto_val };
     symbol_name_consumed = true;
 }
-
-// TODO these are hacks which should probably be solved by the language
-fn getAwaitResult(allocator: *Allocator, handle: var) @typeInfo(@typeOf(handle)).Promise.child.? {
-    var result: ?@typeInfo(@typeOf(handle)).Promise.child.? = null;
-    cancel (async<allocator> getAwaitResultAsync(handle, &result) catch unreachable);
-    return result.?;
-}
-
-async fn getAwaitResultAsync(handle: var, out: *?@typeInfo(@typeOf(handle)).Promise.child.?) void {
-    out.* = await handle;
-}
src-self-hosted/ir.zig
@@ -116,16 +116,16 @@ pub const Inst = struct {
             Id.Return => return @fieldParentPtr(Return, "base", base).analyze(ira),
             Id.Const => return @fieldParentPtr(Const, "base", base).analyze(ira),
             Id.Call => return @fieldParentPtr(Call, "base", base).analyze(ira),
-            Id.DeclRef => return await (async @fieldParentPtr(DeclRef, "base", base).analyze(ira) catch unreachable),
-            Id.Ref => return await (async @fieldParentPtr(Ref, "base", base).analyze(ira) catch unreachable),
+            Id.DeclRef => return @fieldParentPtr(DeclRef, "base", base).analyze(ira),
+            Id.Ref => return @fieldParentPtr(Ref, "base", base).analyze(ira),
             Id.DeclVar => return @fieldParentPtr(DeclVar, "base", base).analyze(ira),
             Id.CheckVoidStmt => return @fieldParentPtr(CheckVoidStmt, "base", base).analyze(ira),
             Id.Phi => return @fieldParentPtr(Phi, "base", base).analyze(ira),
             Id.Br => return @fieldParentPtr(Br, "base", base).analyze(ira),
             Id.AddImplicitReturnType => return @fieldParentPtr(AddImplicitReturnType, "base", base).analyze(ira),
-            Id.PtrType => return await (async @fieldParentPtr(PtrType, "base", base).analyze(ira) catch unreachable),
-            Id.VarPtr => return await (async @fieldParentPtr(VarPtr, "base", base).analyze(ira) catch unreachable),
-            Id.LoadPtr => return await (async @fieldParentPtr(LoadPtr, "base", base).analyze(ira) catch unreachable),
+            Id.PtrType => return @fieldParentPtr(PtrType, "base", base).analyze(ira),
+            Id.VarPtr => return @fieldParentPtr(VarPtr, "base", base).analyze(ira),
+            Id.LoadPtr => return @fieldParentPtr(LoadPtr, "base", base).analyze(ira),
         }
     }
 
@@ -441,13 +441,13 @@ pub const Inst = struct {
                 .volatility = self.params.volatility,
             });
             const elem_type = target.getKnownType();
-            const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
+            const ptr_type = try Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
                 .child_type = elem_type,
                 .mut = self.params.mut,
                 .vol = self.params.volatility,
                 .size = Type.Pointer.Size.One,
                 .alignment = Type.Pointer.Align.Abi,
-            }) catch unreachable);
+            });
             // TODO: potentially set the hint that this is a stack pointer. But it might not be - this
             // could be a ref of a global, for example
             new_inst.val = IrVal{ .KnownType = &ptr_type.base };
@@ -474,7 +474,7 @@ pub const Inst = struct {
         }
 
         pub async fn analyze(self: *const DeclRef, ira: *Analyze) !*Inst {
-            (await (async ira.irb.comp.resolveDecl(self.params.decl) catch unreachable)) catch |err| switch (err) {
+            (ira.irb.comp.resolveDecl(self.params.decl)) catch |err| switch (err) {
                 error.OutOfMemory => return error.OutOfMemory,
                 else => return error.SemanticAnalysisFailed,
             };
@@ -527,13 +527,13 @@ pub const Inst = struct {
                         self.base.span,
                         Inst.VarPtr.Params{ .var_scope = self.params.var_scope },
                     );
-                    const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
+                    const ptr_type = try Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
                         .child_type = param.typ,
                         .mut = Type.Pointer.Mut.Const,
                         .vol = Type.Pointer.Vol.Non,
                         .size = Type.Pointer.Size.One,
                         .alignment = Type.Pointer.Align.Abi,
-                    }) catch unreachable);
+                    });
                     new_inst.val = IrVal{ .KnownType = &ptr_type.base };
                     return new_inst;
                 },
@@ -661,13 +661,13 @@ pub const Inst = struct {
             } else blk: {
                 break :blk Type.Pointer.Align{ .Abi = {} };
             };
-            const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
+            const ptr_type = try Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
                 .child_type = child_type,
                 .mut = self.params.mut,
                 .vol = self.params.vol,
                 .size = self.params.size,
                 .alignment = alignment,
-            }) catch unreachable);
+            });
             ptr_type.base.base.deref(ira.irb.comp);
 
             return ira.irb.buildConstValue(self.base.scope, self.base.span, &ptr_type.base.base);
@@ -1101,7 +1101,7 @@ pub const Builder = struct {
                     ast.Node.PrefixOp.Op.NegationWrap => return error.Unimplemented,
                     ast.Node.PrefixOp.Op.Resume => return error.Unimplemented,
                     ast.Node.PrefixOp.Op.PtrType => |ptr_info| {
-                        const inst = try await (async irb.genPtrType(prefix_op, ptr_info, scope) catch unreachable);
+                        const inst = try irb.genPtrType(prefix_op, ptr_info, scope);
                         return irb.lvalWrap(scope, inst, lval);
                     },
                     ast.Node.PrefixOp.Op.SliceType => |ptr_info| return error.Unimplemented,
@@ -1112,7 +1112,7 @@ pub const Builder = struct {
                 const suffix_op = @fieldParentPtr(ast.Node.SuffixOp, "base", node);
                 switch (suffix_op.op) {
                     @TagType(ast.Node.SuffixOp.Op).Call => |*call| {
-                        const inst = try await (async irb.genCall(suffix_op, call, scope) catch unreachable);
+                        const inst = try irb.genCall(suffix_op, call, scope);
                         return irb.lvalWrap(scope, inst, lval);
                     },
                     @TagType(ast.Node.SuffixOp.Op).ArrayAccess => |n| return error.Unimplemented,
@@ -1129,7 +1129,7 @@ pub const Builder = struct {
             ast.Node.Id.If => return error.Unimplemented,
             ast.Node.Id.ControlFlowExpression => {
                 const control_flow_expr = @fieldParentPtr(ast.Node.ControlFlowExpression, "base", node);
-                return await (async irb.genControlFlowExpr(control_flow_expr, scope, lval) catch unreachable);
+                return irb.genControlFlowExpr(control_flow_expr, scope, lval);
             },
             ast.Node.Id.Suspend => return error.Unimplemented,
             ast.Node.Id.VarType => return error.Unimplemented,
@@ -1143,7 +1143,7 @@ pub const Builder = struct {
             ast.Node.Id.FloatLiteral => return error.Unimplemented,
             ast.Node.Id.StringLiteral => {
                 const str_lit = @fieldParentPtr(ast.Node.StringLiteral, "base", node);
-                const inst = try await (async irb.genStrLit(str_lit, scope) catch unreachable);
+                const inst = try irb.genStrLit(str_lit, scope);
                 return irb.lvalWrap(scope, inst, lval);
             },
             ast.Node.Id.MultilineStringLiteral => return error.Unimplemented,
@@ -1154,11 +1154,11 @@ pub const Builder = struct {
             ast.Node.Id.Unreachable => return error.Unimplemented,
             ast.Node.Id.Identifier => {
                 const identifier = @fieldParentPtr(ast.Node.Identifier, "base", node);
-                return await (async irb.genIdentifier(identifier, scope, lval) catch unreachable);
+                return irb.genIdentifier(identifier, scope, lval);
             },
             ast.Node.Id.GroupedExpression => {
                 const grouped_expr = @fieldParentPtr(ast.Node.GroupedExpression, "base", node);
-                return await (async irb.genNode(grouped_expr.expr, scope, lval) catch unreachable);
+                return irb.genNode(grouped_expr.expr, scope, lval);
             },
             ast.Node.Id.BuiltinCall => return error.Unimplemented,
             ast.Node.Id.ErrorSetDecl => return error.Unimplemented,
@@ -1167,7 +1167,7 @@ pub const Builder = struct {
             ast.Node.Id.Comptime => return error.Unimplemented,
             ast.Node.Id.Block => {
                 const block = @fieldParentPtr(ast.Node.Block, "base", node);
-                const inst = try await (async irb.genBlock(block, scope) catch unreachable);
+                const inst = try irb.genBlock(block, scope);
                 return irb.lvalWrap(scope, inst, lval);
             },
             ast.Node.Id.DocComment => return error.Unimplemented,
@@ -1188,13 +1188,13 @@ pub const Builder = struct {
     }
 
     async fn genCall(irb: *Builder, suffix_op: *ast.Node.SuffixOp, call: *ast.Node.SuffixOp.Op.Call, scope: *Scope) !*Inst {
-        const fn_ref = try await (async irb.genNode(suffix_op.lhs, scope, LVal.None) catch unreachable);
+        const fn_ref = try irb.genNode(suffix_op.lhs, scope, LVal.None);
 
         const args = try irb.arena().alloc(*Inst, call.params.len);
         var it = call.params.iterator(0);
         var i: usize = 0;
         while (it.next()) |arg_node_ptr| : (i += 1) {
-            args[i] = try await (async irb.genNode(arg_node_ptr.*, scope, LVal.None) catch unreachable);
+            args[i] = try irb.genNode(arg_node_ptr.*, scope, LVal.None);
         }
 
         //bool is_async = node->data.fn_call_expr.is_async;
@@ -1239,7 +1239,7 @@ pub const Builder = struct {
         //} else {
         //    align_value = nullptr;
         //}
-        const child_type = try await (async irb.genNode(prefix_op.rhs, scope, LVal.None) catch unreachable);
+        const child_type = try irb.genNode(prefix_op.rhs, scope, LVal.None);
 
         //uint32_t bit_offset_start = 0;
         //if (node->data.pointer_type.bit_offset_start != nullptr) {
@@ -1366,23 +1366,23 @@ pub const Builder = struct {
             buf[buf.len - 1] = 0;
 
             // next make an array value
-            const array_val = try await (async Value.Array.createOwnedBuffer(irb.comp, buf) catch unreachable);
+            const array_val = try Value.Array.createOwnedBuffer(irb.comp, buf);
             buf_cleaned = true;
             defer array_val.base.deref(irb.comp);
 
             // then make a pointer value pointing at the first element
-            const ptr_val = try await (async Value.Ptr.createArrayElemPtr(
+            const ptr_val = try Value.Ptr.createArrayElemPtr(
                 irb.comp,
                 array_val,
                 Type.Pointer.Mut.Const,
                 Type.Pointer.Size.Many,
                 0,
-            ) catch unreachable);
+            );
             defer ptr_val.base.deref(irb.comp);
 
             return irb.buildConstValue(scope, src_span, &ptr_val.base);
         } else {
-            const array_val = try await (async Value.Array.createOwnedBuffer(irb.comp, buf) catch unreachable);
+            const array_val = try Value.Array.createOwnedBuffer(irb.comp, buf);
             buf_cleaned = true;
             defer array_val.base.deref(irb.comp);
 
@@ -1438,7 +1438,7 @@ pub const Builder = struct {
                 child_scope = &defer_child_scope.base;
                 continue;
             }
-            const statement_value = try await (async irb.genNode(statement_node, child_scope, LVal.None) catch unreachable);
+            const statement_value = try irb.genNode(statement_node, child_scope, LVal.None);
 
             is_continuation_unreachable = statement_value.isNoReturn();
             if (is_continuation_unreachable) {
@@ -1481,7 +1481,7 @@ pub const Builder = struct {
             try block_scope.incoming_values.append(
                 try irb.buildConstVoid(parent_scope, Span.token(block.rbrace), true),
             );
-            _ = try await (async irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
+            _ = try irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit);
 
             _ = try irb.buildGen(Inst.Br, parent_scope, Span.token(block.rbrace), Inst.Br.Params{
                 .dest_block = block_scope.end_block,
@@ -1496,7 +1496,7 @@ pub const Builder = struct {
             });
         }
 
-        _ = try await (async irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
+        _ = try irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit);
         return irb.buildConstVoid(child_scope, Span.token(block.rbrace), true);
     }
 
@@ -1534,7 +1534,7 @@ pub const Builder = struct {
 
                 const outer_scope = irb.begin_scope.?;
                 const return_value = if (control_flow_expr.rhs) |rhs| blk: {
-                    break :blk try await (async irb.genNode(rhs, scope, LVal.None) catch unreachable);
+                    break :blk try irb.genNode(rhs, scope, LVal.None);
                 } else blk: {
                     break :blk try irb.buildConstVoid(scope, src_span, true);
                 };
@@ -1545,7 +1545,7 @@ pub const Builder = struct {
                     const err_block = try irb.createBasicBlock(scope, c"ErrRetErr");
                     const ok_block = try irb.createBasicBlock(scope, c"ErrRetOk");
                     if (!have_err_defers) {
-                        _ = try await (async irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
+                        _ = try irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ScopeExit);
                     }
 
                     const is_err = try irb.build(
@@ -1568,7 +1568,7 @@ pub const Builder = struct {
 
                     try irb.setCursorAtEndAndAppendBlock(err_block);
                     if (have_err_defers) {
-                        _ = try await (async irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ErrorExit) catch unreachable);
+                        _ = try irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ErrorExit);
                     }
                     if (irb.comp.have_err_ret_tracing and !irb.isCompTime(scope)) {
                         _ = try irb.build(Inst.SaveErrRetAddr, scope, src_span, Inst.SaveErrRetAddr.Params{});
@@ -1580,7 +1580,7 @@ pub const Builder = struct {
 
                     try irb.setCursorAtEndAndAppendBlock(ok_block);
                     if (have_err_defers) {
-                        _ = try await (async irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
+                        _ = try irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ScopeExit);
                     }
                     _ = try irb.build(Inst.Br, scope, src_span, Inst.Br.Params{
                         .dest_block = ret_stmt_block,
@@ -1590,7 +1590,7 @@ pub const Builder = struct {
                     try irb.setCursorAtEndAndAppendBlock(ret_stmt_block);
                     return irb.genAsyncReturn(scope, src_span, return_value, false);
                 } else {
-                    _ = try await (async irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ScopeExit) catch unreachable);
+                    _ = try irb.genDefersForBlock(scope, outer_scope, Scope.Defer.Kind.ScopeExit);
                     return irb.genAsyncReturn(scope, src_span, return_value, false);
                 }
             },
@@ -1610,7 +1610,7 @@ pub const Builder = struct {
         //    return &const_instruction->base;
         //}
 
-        if (await (async irb.comp.getPrimitiveType(name) catch unreachable)) |result| {
+        if (irb.comp.getPrimitiveType(name)) |result| {
             if (result) |primitive_type| {
                 defer primitive_type.base.deref(irb.comp);
                 switch (lval) {
@@ -1628,7 +1628,7 @@ pub const Builder = struct {
             error.OutOfMemory => return error.OutOfMemory,
         }
 
-        switch (await (async irb.findIdent(scope, name) catch unreachable)) {
+        switch (irb.findIdent(scope, name)) {
             Ident.Decl => |decl| {
                 return irb.build(Inst.DeclRef, scope, src_span, Inst.DeclRef.Params{
                     .decl = decl,
@@ -1713,11 +1713,11 @@ pub const Builder = struct {
                     };
                     if (generate) {
                         const defer_expr_scope = defer_scope.defer_expr_scope;
-                        const instruction = try await (async irb.genNode(
+                        const instruction = try irb.genNode(
                             defer_expr_scope.expr_node,
                             &defer_expr_scope.base,
                             LVal.None,
-                        ) catch unreachable);
+                        );
                         if (instruction.isNoReturn()) {
                             is_noreturn = true;
                         } else {
@@ -1918,7 +1918,7 @@ pub const Builder = struct {
                 Scope.Id.Root => return Ident.NotFound,
                 Scope.Id.Decls => {
                     const decls = @fieldParentPtr(Scope.Decls, "base", s);
-                    const locked_table = await (async decls.table.acquireRead() catch unreachable);
+                    const locked_table = decls.table.acquireRead();
                     defer locked_table.release();
                     if (locked_table.value.get(name)) |entry| {
                         return Ident{ .Decl = entry.value };
@@ -2534,7 +2534,7 @@ pub async fn gen(
     entry_block.ref(&irb); // Entry block gets a reference because we enter it to begin.
     try irb.setCursorAtEndAndAppendBlock(entry_block);
 
-    const result = try await (async irb.genNode(body_node, scope, LVal.None) catch unreachable);
+    const result = try irb.genNode(body_node, scope, LVal.None);
     if (!result.isNoReturn()) {
         // no need for save_err_ret_addr because this cannot return error
         _ = try irb.genAsyncReturn(scope, Span.token(body_node.lastToken()), result, true);
@@ -2564,7 +2564,7 @@ pub async fn analyze(comp: *Compilation, old_code: *Code, expected_type: ?*Type)
             continue;
         }
 
-        const return_inst = try await (async old_instruction.analyze(&ira) catch unreachable);
+        const return_inst = try old_instruction.analyze(&ira);
         assert(return_inst.val != IrVal.Unknown); // at least the type should be known at this point
         return_inst.linkToParent(old_instruction);
         // Note: if we ever modify the above to handle error.CompileError by continuing analysis,
src-self-hosted/libc_installation.zig
@@ -178,7 +178,7 @@ pub const LibCInstallation = struct {
             },
             else => @compileError("unimplemented: find libc for this OS"),
         }
-        return await (async group.wait() catch unreachable);
+        return group.wait();
     }
 
     async fn findNativeIncludeDirLinux(self: *LibCInstallation, loop: *event.Loop) !void {
@@ -301,11 +301,11 @@ pub const LibCInstallation = struct {
     }
 
     async fn findNativeLibDirLinux(self: *LibCInstallation, loop: *event.Loop) FindError!void {
-        self.lib_dir = try await (async ccPrintFileName(loop, "crt1.o", true) catch unreachable);
+        self.lib_dir = try ccPrintFileName(loop, "crt1.o", true);
     }
 
     async fn findNativeStaticLibDir(self: *LibCInstallation, loop: *event.Loop) FindError!void {
-        self.static_lib_dir = try await (async ccPrintFileName(loop, "crtbegin.o", true) catch unreachable);
+        self.static_lib_dir = try ccPrintFileName(loop, "crtbegin.o", true);
     }
 
     async fn findNativeDynamicLinker(self: *LibCInstallation, loop: *event.Loop) FindError!void {
@@ -324,7 +324,7 @@ pub const LibCInstallation = struct {
         for (dyn_tests) |*dyn_test| {
             try group.call(testNativeDynamicLinker, self, loop, dyn_test);
         }
-        try await (async group.wait() catch unreachable);
+        try group.wait();
         for (dyn_tests) |*dyn_test| {
             if (dyn_test.result) |result| {
                 self.dynamic_linker_path = result;
@@ -339,7 +339,7 @@ pub const LibCInstallation = struct {
     };
 
     async fn testNativeDynamicLinker(self: *LibCInstallation, loop: *event.Loop, dyn_test: *DynTest) FindError!void {
-        if (await (async ccPrintFileName(loop, dyn_test.name, false) catch unreachable)) |result| {
+        if (ccPrintFileName(loop, dyn_test.name, false)) |result| {
             dyn_test.result = result;
             return;
         } else |err| switch (err) {
@@ -398,7 +398,7 @@ async fn ccPrintFileName(loop: *event.Loop, o_file: []const u8, want_dirname: bo
     const argv = [_][]const u8{ cc_exe, arg1 };
 
     // TODO This simulates evented I/O for the child process exec
-    await (async loop.yield() catch unreachable);
+    loop.yield();
     const errorable_result = std.ChildProcess.exec(loop.allocator, argv, null, null, 1024 * 1024);
     const exec_result = if (std.debug.runtime_safety) blk: {
         break :blk errorable_result catch unreachable;
src-self-hosted/link.zig
@@ -61,7 +61,7 @@ pub async fn link(comp: *Compilation) !void {
         ctx.libc = ctx.comp.override_libc orelse blk: {
             switch (comp.target) {
                 Target.Native => {
-                    break :blk (await (async comp.zig_compiler.getNativeLibC() catch unreachable)) catch return error.LibCRequiredButNotProvidedOrFound;
+                    break :blk comp.zig_compiler.getNativeLibC() catch return error.LibCRequiredButNotProvidedOrFound;
                 },
                 else => return error.LibCRequiredButNotProvidedOrFound,
             }
@@ -83,7 +83,7 @@ pub async fn link(comp: *Compilation) !void {
 
     {
         // LLD is not thread-safe, so we grab a global lock.
-        const held = await (async comp.zig_compiler.lld_lock.acquire() catch unreachable);
+        const held = comp.zig_compiler.lld_lock.acquire();
         defer held.release();
 
         // Not evented I/O. LLD does its own multithreading internally.
src-self-hosted/main.zig
@@ -466,7 +466,7 @@ fn buildOutputType(allocator: *Allocator, args: []const []const u8, out_type: Co
     comp.link_objects = link_objects;
 
     comp.start();
-    // TODO const process_build_events_handle = try async<loop.allocator> processBuildEvents(comp, color);
+    const frame = try async processBuildEvents(comp, color);
     loop.run();
 }
 
@@ -474,7 +474,7 @@ async fn processBuildEvents(comp: *Compilation, color: errmsg.Color) void {
     var count: usize = 0;
     while (true) {
         // TODO directly awaiting async should guarantee memory allocation elision
-        const build_event = await (async comp.events.get() catch unreachable);
+        const build_event = comp.events.get();
         count += 1;
 
         switch (build_event) {
@@ -577,13 +577,13 @@ fn cmdLibC(allocator: *Allocator, args: []const []const u8) !void {
     var zig_compiler = try ZigCompiler.init(&loop);
     defer zig_compiler.deinit();
 
-    // TODO const handle = try async<loop.allocator> findLibCAsync(&zig_compiler);
+    const frame = async findLibCAsync(&zig_compiler);
 
     loop.run();
 }
 
 async fn findLibCAsync(zig_compiler: *ZigCompiler) void {
-    const libc = (await (async zig_compiler.getNativeLibC() catch unreachable)) catch |err| {
+    const libc = zig_compiler.getNativeLibC() catch |err| {
         stderr.print("unable to find libc: {}\n", @errorName(err)) catch process.exit(1);
         process.exit(1);
     };
@@ -660,24 +660,11 @@ fn cmdFmt(allocator: *Allocator, args: []const []const u8) !void {
     try loop.initMultiThreaded(allocator);
     defer loop.deinit();
 
-    var result: FmtError!void = undefined;
-    // TODO const main_handle = try async<allocator> asyncFmtMainChecked(
-    // TODO     &result,
-    // TODO     &loop,
-    // TODO     &flags,
-    // TODO     color,
-    // TODO );
-    loop.run();
-    return result;
-}
-
-async fn asyncFmtMainChecked(
-    result: *(FmtError!void),
-    loop: *event.Loop,
-    flags: *const Args,
-    color: errmsg.Color,
-) void {
-    result.* = await (async asyncFmtMain(loop, flags, color) catch unreachable);
+    return asyncFmtMain(
+        &flags,
+        color,
+    );
+    // loop.run();
 }
 
 const FmtError = error{
@@ -707,9 +694,6 @@ async fn asyncFmtMain(
     flags: *const Args,
     color: errmsg.Color,
 ) FmtError!void {
-    suspend {
-        resume @handle();
-    }
     var fmt = Fmt{
         .seen = event.Locked(Fmt.SeenMap).init(loop, Fmt.SeenMap.init(loop.allocator)),
         .any_error = false,
@@ -723,7 +707,7 @@ async fn asyncFmtMain(
     for (flags.positionals.toSliceConst()) |file_path| {
         try group.call(fmtPath, &fmt, file_path, check_mode);
     }
-    try await (async group.wait() catch unreachable);
+    try group.wait();
     if (fmt.any_error) {
         process.exit(1);
     }
@@ -734,7 +718,7 @@ async fn fmtPath(fmt: *Fmt, file_path_ref: []const u8, check_mode: bool) FmtErro
     defer fmt.loop.allocator.free(file_path);
 
     {
-        const held = await (async fmt.seen.acquire() catch unreachable);
+        const held = fmt.seen.acquire();
         defer held.release();
 
         if (try held.value.put(file_path, {})) |_| return;
@@ -757,7 +741,7 @@ async fn fmtPath(fmt: *Fmt, file_path_ref: []const u8, check_mode: bool) FmtErro
                     try group.call(fmtPath, fmt, full_path, check_mode);
                 }
             }
-            return await (async group.wait() catch unreachable);
+            return group.wait();
         },
         else => {
             // TODO lock stderr printing
src-self-hosted/test.zig
@@ -68,13 +68,13 @@ pub const TestContext = struct {
 
     fn run(self: *TestContext) !void {
         const handle = try self.loop.call(waitForGroup, self);
-        defer cancel handle;
+        defer await handle;
         self.loop.run();
         return self.any_err;
     }
 
     async fn waitForGroup(self: *TestContext) void {
-        self.any_err = await (async self.group.wait() catch unreachable);
+        self.any_err = self.group.wait();
     }
 
     fn testCompileError(
@@ -158,7 +158,7 @@ pub const TestContext = struct {
         const exe_file_2 = try std.mem.dupe(allocator, u8, exe_file);
 
         defer comp.destroy();
-        const build_event = await (async comp.events.get() catch unreachable);
+        const build_event = comp.events.get();
 
         switch (build_event) {
             Compilation.Event.Ok => {
@@ -200,7 +200,7 @@ pub const TestContext = struct {
         text: []const u8,
     ) !void {
         defer comp.destroy();
-        const build_event = await (async comp.events.get() catch unreachable);
+        const build_event = comp.events.get();
 
         switch (build_event) {
             Compilation.Event.Ok => {
src-self-hosted/type.zig
@@ -181,7 +181,7 @@ pub const Type = struct {
     /// If you happen to have an llvm context handy, use getAbiAlignmentInContext instead.
     /// Otherwise, this one will grab one from the pool and then release it.
     pub async fn getAbiAlignment(base: *Type, comp: *Compilation) !u32 {
-        if (await (async base.abi_alignment.start() catch unreachable)) |ptr| return ptr.*;
+        if (base.abi_alignment.start()) |ptr| return ptr.*;
 
         {
             const held = try comp.zig_compiler.getAnyLlvmContext();
@@ -189,7 +189,7 @@ pub const Type = struct {
 
             const llvm_context = held.node.data;
 
-            base.abi_alignment.data = await (async base.resolveAbiAlignment(comp, llvm_context) catch unreachable);
+            base.abi_alignment.data = base.resolveAbiAlignment(comp, llvm_context);
         }
         base.abi_alignment.resolve();
         return base.abi_alignment.data;
@@ -197,9 +197,9 @@ pub const Type = struct {
 
     /// If you have an llvm conext handy, you can use it here.
     pub async fn getAbiAlignmentInContext(base: *Type, comp: *Compilation, llvm_context: *llvm.Context) !u32 {
-        if (await (async base.abi_alignment.start() catch unreachable)) |ptr| return ptr.*;
+        if (base.abi_alignment.start()) |ptr| return ptr.*;
 
-        base.abi_alignment.data = await (async base.resolveAbiAlignment(comp, llvm_context) catch unreachable);
+        base.abi_alignment.data = base.resolveAbiAlignment(comp, llvm_context);
         base.abi_alignment.resolve();
         return base.abi_alignment.data;
     }
@@ -401,7 +401,7 @@ pub const Type = struct {
         /// takes ownership of key.Normal.params on success
         pub async fn get(comp: *Compilation, key: Key) !*Fn {
             {
-                const held = await (async comp.fn_type_table.acquire() catch unreachable);
+                const held = comp.fn_type_table.acquire();
                 defer held.release();
 
                 if (held.value.get(&key)) |entry| {
@@ -430,15 +430,8 @@ pub const Type = struct {
             switch (key.data) {
                 Kind.Generic => |generic| {
                     self.non_key = NonKey{ .Generic = {} };
-                    switch (generic.cc) {
-                        CallingConvention.Async => |async_allocator_type| {
-                            try name_stream.print("async<{}> ", async_allocator_type.name);
-                        },
-                        else => {
-                            const cc_str = ccFnTypeStr(generic.cc);
-                            try name_stream.write(cc_str);
-                        },
-                    }
+                    const cc_str = ccFnTypeStr(generic.cc);
+                    try name_stream.write(cc_str);
                     try name_stream.write("fn(");
                     var param_i: usize = 0;
                     while (param_i < generic.param_count) : (param_i += 1) {
@@ -477,7 +470,7 @@ pub const Type = struct {
             self.base.init(comp, Id.Fn, name_buf.toOwnedSlice());
 
             {
-                const held = await (async comp.fn_type_table.acquire() catch unreachable);
+                const held = comp.fn_type_table.acquire();
                 defer held.release();
 
                 _ = try held.value.put(&self.key, self);
@@ -606,7 +599,7 @@ pub const Type = struct {
 
         pub async fn get(comp: *Compilation, key: Key) !*Int {
             {
-                const held = await (async comp.int_type_table.acquire() catch unreachable);
+                const held = comp.int_type_table.acquire();
                 defer held.release();
 
                 if (held.value.get(&key)) |entry| {
@@ -630,7 +623,7 @@ pub const Type = struct {
             self.base.init(comp, Id.Int, name);
 
             {
-                const held = await (async comp.int_type_table.acquire() catch unreachable);
+                const held = comp.int_type_table.acquire();
                 defer held.release();
 
                 _ = try held.value.put(&self.key, self);
@@ -648,7 +641,7 @@ pub const Type = struct {
 
         pub async fn gcDestroy(self: *Int, comp: *Compilation) void {
             {
-                const held = await (async comp.int_type_table.acquire() catch unreachable);
+                const held = comp.int_type_table.acquire();
                 defer held.release();
 
                 _ = held.value.remove(&self.key).?;
@@ -742,7 +735,7 @@ pub const Type = struct {
 
         pub async fn gcDestroy(self: *Pointer, comp: *Compilation) void {
             {
-                const held = await (async comp.ptr_type_table.acquire() catch unreachable);
+                const held = comp.ptr_type_table.acquire();
                 defer held.release();
 
                 _ = held.value.remove(&self.key).?;
@@ -753,7 +746,7 @@ pub const Type = struct {
 
         pub async fn getAlignAsInt(self: *Pointer, comp: *Compilation) u32 {
             switch (self.key.alignment) {
-                Align.Abi => return await (async self.key.child_type.getAbiAlignment(comp) catch unreachable),
+                Align.Abi => return self.key.child_type.getAbiAlignment(comp),
                 Align.Override => |alignment| return alignment,
             }
         }
@@ -766,14 +759,14 @@ pub const Type = struct {
             switch (key.alignment) {
                 Align.Abi => {},
                 Align.Override => |alignment| {
-                    const abi_align = try await (async key.child_type.getAbiAlignment(comp) catch unreachable);
+                    const abi_align = try key.child_type.getAbiAlignment(comp);
                     if (abi_align == alignment) {
                         normal_key.alignment = Align.Abi;
                     }
                 },
             }
             {
-                const held = await (async comp.ptr_type_table.acquire() catch unreachable);
+                const held = comp.ptr_type_table.acquire();
                 defer held.release();
 
                 if (held.value.get(&normal_key)) |entry| {
@@ -828,7 +821,7 @@ pub const Type = struct {
             self.base.init(comp, Id.Pointer, name);
 
             {
-                const held = await (async comp.ptr_type_table.acquire() catch unreachable);
+                const held = comp.ptr_type_table.acquire();
                 defer held.release();
 
                 _ = try held.value.put(&self.key, self);
@@ -873,7 +866,7 @@ pub const Type = struct {
             errdefer key.elem_type.base.deref(comp);
 
             {
-                const held = await (async comp.array_type_table.acquire() catch unreachable);
+                const held = comp.array_type_table.acquire();
                 defer held.release();
 
                 if (held.value.get(&key)) |entry| {
@@ -896,7 +889,7 @@ pub const Type = struct {
             self.base.init(comp, Id.Array, name);
 
             {
-                const held = await (async comp.array_type_table.acquire() catch unreachable);
+                const held = comp.array_type_table.acquire();
                 defer held.release();
 
                 _ = try held.value.put(&self.key, self);
src-self-hosted/value.zig
@@ -346,13 +346,13 @@ pub const Value = struct {
             errdefer array_val.base.deref(comp);
 
             const elem_type = array_val.base.typ.cast(Type.Array).?.key.elem_type;
-            const ptr_type = try await (async Type.Pointer.get(comp, Type.Pointer.Key{
+            const ptr_type = try Type.Pointer.get(comp, Type.Pointer.Key{
                 .child_type = elem_type,
                 .mut = mut,
                 .vol = Type.Pointer.Vol.Non,
                 .size = size,
                 .alignment = Type.Pointer.Align.Abi,
-            }) catch unreachable);
+            });
             var ptr_type_consumed = false;
             errdefer if (!ptr_type_consumed) ptr_type.base.base.deref(comp);
 
@@ -428,10 +428,10 @@ pub const Value = struct {
             const u8_type = Type.Int.get_u8(comp);
             defer u8_type.base.base.deref(comp);
 
-            const array_type = try await (async Type.Array.get(comp, Type.Array.Key{
+            const array_type = try Type.Array.get(comp, Type.Array.Key{
                 .elem_type = &u8_type.base,
                 .len = buffer.len,
-            }) catch unreachable);
+            });
             errdefer array_type.base.base.deref(comp);
 
             const self = try comp.gpa().create(Value.Array);