Commit 717b0e8275

Andrew Kelley <andrew@ziglang.org>
2020-09-01 08:34:58
stage2: introduce the ability for Scope.Block to be comptime
This gives zir_sema analysis the ability to check if the current scope is expected to be comptime.
1 parent 2614067
Changed files (6)
src-self-hosted/astgen.zig
@@ -258,7 +258,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
         .OptionalType => return rlWrap(mod, scope, rl, try optionalType(mod, scope, node.castTag(.OptionalType).?)),
         .UnwrapOptional => return unwrapOptional(mod, scope, rl, node.castTag(.UnwrapOptional).?),
         .Block => return rlWrapVoid(mod, scope, rl, node, try blockExpr(mod, scope, node.castTag(.Block).?)),
-        .LabeledBlock => return labeledBlockExpr(mod, scope, rl, node.castTag(.LabeledBlock).?),
+        .LabeledBlock => return labeledBlockExpr(mod, scope, rl, node.castTag(.LabeledBlock).?, .block),
         .Break => return rlWrap(mod, scope, rl, try breakExpr(mod, scope, node.castTag(.Break).?)),
         .PtrType => return rlWrap(mod, scope, rl, try ptrType(mod, scope, node.castTag(.PtrType).?)),
         .GroupedExpression => return expr(mod, scope, rl, node.castTag(.GroupedExpression).?.expr),
@@ -276,6 +276,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
         .For => return forExpr(mod, scope, rl, node.castTag(.For).?),
         .ArrayAccess => return arrayAccess(mod, scope, rl, node.castTag(.ArrayAccess).?),
         .Catch => return catchExpr(mod, scope, rl, node.castTag(.Catch).?),
+        .Comptime => return comptimeKeyword(mod, scope, rl, node.castTag(.Comptime).?),
 
         .Defer => return mod.failNode(scope, node, "TODO implement astgen.expr for .Defer", .{}),
         .Range => return mod.failNode(scope, node, "TODO implement astgen.expr for .Range", .{}),
@@ -294,11 +295,46 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
         .AnyType => return mod.failNode(scope, node, "TODO implement astgen.expr for .AnyType", .{}),
         .FnProto => return mod.failNode(scope, node, "TODO implement astgen.expr for .FnProto", .{}),
         .ContainerDecl => return mod.failNode(scope, node, "TODO implement astgen.expr for .ContainerDecl", .{}),
-        .Comptime => return mod.failNode(scope, node, "TODO implement astgen.expr for .Comptime", .{}),
         .Nosuspend => return mod.failNode(scope, node, "TODO implement astgen.expr for .Nosuspend", .{}),
     }
 }
 
+fn comptimeKeyword(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Comptime) InnerError!*zir.Inst {
+    const tracy = trace(@src());
+    defer tracy.end();
+
+    return comptimeExpr(mod, scope, rl, node.expr);
+}
+
+pub fn comptimeExpr(mod: *Module, parent_scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerError!*zir.Inst {
+    const tree = parent_scope.tree();
+    const src = tree.token_locs[node.firstToken()].start;
+
+    // Optimization for labeled blocks: don't need to have 2 layers of blocks, we can reuse the existing one.
+    if (node.castTag(.LabeledBlock)) |block_node| {
+        return labeledBlockExpr(mod, parent_scope, rl, block_node, .block_comptime);
+    }
+
+    // Make a scope to collect generated instructions in the sub-expression.
+    var block_scope: Scope.GenZIR = .{
+        .parent = parent_scope,
+        .decl = parent_scope.decl().?,
+        .arena = parent_scope.arena(),
+        .instructions = .{},
+    };
+    defer block_scope.instructions.deinit(mod.gpa);
+
+    // No need to capture the result here because block_comptime_flat implies that the final
+    // instruction is the block's result value.
+    _ = try expr(mod, &block_scope.base, rl, node);
+
+    const block = try addZIRInstBlock(mod, parent_scope, src, .block_comptime_flat, .{
+        .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
+    });
+
+    return &block.base;
+}
+
 fn breakExpr(mod: *Module, parent_scope: *Scope, node: *ast.Node.ControlFlowExpression) InnerError!*zir.Inst {
     const tree = parent_scope.tree();
     const src = tree.token_locs[node.ltoken].start;
@@ -360,10 +396,13 @@ fn labeledBlockExpr(
     parent_scope: *Scope,
     rl: ResultLoc,
     block_node: *ast.Node.LabeledBlock,
+    zir_tag: zir.Inst.Tag,
 ) InnerError!*zir.Inst {
     const tracy = trace(@src());
     defer tracy.end();
 
+    assert(zir_tag == .block or zir_tag == .block_comptime);
+
     const tree = parent_scope.tree();
     const src = tree.token_locs[block_node.lbrace].start;
 
@@ -373,7 +412,7 @@ fn labeledBlockExpr(
     const block_inst = try gen_zir.arena.create(zir.Inst.Block);
     block_inst.* = .{
         .base = .{
-            .tag = .block,
+            .tag = zir_tag,
             .src = src,
         },
         .positionals = .{
@@ -773,7 +812,7 @@ fn catchExpr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node.Catch)
         .else_body = undefined, // populated below
     }, .{});
 
-    const block = try addZIRInstBlock(mod, scope, src, .{
+    const block = try addZIRInstBlock(mod, scope, src, .block, .{
         .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
     });
 
@@ -946,7 +985,7 @@ fn boolBinOp(
         .else_body = undefined, // populated below
     }, .{});
 
-    const block = try addZIRInstBlock(mod, scope, src, .{
+    const block = try addZIRInstBlock(mod, scope, src, .block, .{
         .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
     });
 
@@ -1095,7 +1134,7 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
         .else_body = undefined, // populated below
     }, .{});
 
-    const block = try addZIRInstBlock(mod, scope, if_src, .{
+    const block = try addZIRInstBlock(mod, scope, if_src, .block, .{
         .instructions = try block_scope.arena.dupe(*zir.Inst, block_scope.instructions.items),
     });
 
@@ -1218,7 +1257,7 @@ fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.W
         .then_body = undefined, // populated below
         .else_body = undefined, // populated below
     }, .{});
-    const cond_block = try addZIRInstBlock(mod, &loop_scope.base, while_src, .{
+    const cond_block = try addZIRInstBlock(mod, &loop_scope.base, while_src, .block, .{
         .instructions = try loop_scope.arena.dupe(*zir.Inst, continue_scope.instructions.items),
     });
     // TODO avoid emitting the continue expr when there
@@ -1231,7 +1270,7 @@ fn whileExpr(mod: *Module, scope: *Scope, rl: ResultLoc, while_node: *ast.Node.W
     const loop = try addZIRInstLoop(mod, &expr_scope.base, while_src, .{
         .instructions = try expr_scope.arena.dupe(*zir.Inst, loop_scope.instructions.items),
     });
-    const while_block = try addZIRInstBlock(mod, scope, while_src, .{
+    const while_block = try addZIRInstBlock(mod, scope, while_src, .block, .{
         .instructions = try expr_scope.arena.dupe(*zir.Inst, expr_scope.instructions.items),
     });
 
@@ -1365,7 +1404,7 @@ fn forExpr(mod: *Module, scope: *Scope, rl: ResultLoc, for_node: *ast.Node.For)
         .then_body = undefined, // populated below
         .else_body = undefined, // populated below
     }, .{});
-    const cond_block = try addZIRInstBlock(mod, &loop_scope.base, for_src, .{
+    const cond_block = try addZIRInstBlock(mod, &loop_scope.base, for_src, .block, .{
         .instructions = try loop_scope.arena.dupe(*zir.Inst, cond_scope.instructions.items),
     });
 
@@ -1382,7 +1421,7 @@ fn forExpr(mod: *Module, scope: *Scope, rl: ResultLoc, for_node: *ast.Node.For)
     const loop = try addZIRInstLoop(mod, &for_scope.base, for_src, .{
         .instructions = try for_scope.arena.dupe(*zir.Inst, loop_scope.instructions.items),
     });
-    const for_block = try addZIRInstBlock(mod, scope, for_src, .{
+    const for_block = try addZIRInstBlock(mod, scope, for_src, .block, .{
         .instructions = try for_scope.arena.dupe(*zir.Inst, for_scope.instructions.items),
     });
 
@@ -2260,6 +2299,30 @@ pub fn addZIRBinOp(
     return &inst.base;
 }
 
+pub fn addZIRInstBlock(
+    mod: *Module,
+    scope: *Scope,
+    src: usize,
+    tag: zir.Inst.Tag,
+    body: zir.Module.Body,
+) !*zir.Inst.Block {
+    const gen_zir = scope.getGenZIR();
+    try gen_zir.instructions.ensureCapacity(mod.gpa, gen_zir.instructions.items.len + 1);
+    const inst = try gen_zir.arena.create(zir.Inst.Block);
+    inst.* = .{
+        .base = .{
+            .tag = tag,
+            .src = src,
+        },
+        .positionals = .{
+            .body = body,
+        },
+        .kw_args = .{},
+    };
+    gen_zir.instructions.appendAssumeCapacity(&inst.base);
+    return inst;
+}
+
 pub fn addZIRInst(
     mod: *Module,
     scope: *Scope,
@@ -2278,12 +2341,6 @@ pub fn addZIRInstConst(mod: *Module, scope: *Scope, src: usize, typed_value: Typ
     return addZIRInst(mod, scope, src, zir.Inst.Const, P{ .typed_value = typed_value }, .{});
 }
 
-/// TODO The existence of this function is a workaround for a bug in stage1.
-pub fn addZIRInstBlock(mod: *Module, scope: *Scope, src: usize, body: zir.Module.Body) !*zir.Inst.Block {
-    const P = std.meta.fieldInfo(zir.Inst.Block, "positionals").field_type;
-    return addZIRInstSpecial(mod, scope, src, zir.Inst.Block, P{ .body = body }, .{});
-}
-
 /// TODO The existence of this function is a workaround for a bug in stage1.
 pub fn addZIRInstLoop(mod: *Module, scope: *Scope, src: usize, body: zir.Module.Body) !*zir.Inst.Loop {
     const P = std.meta.fieldInfo(zir.Inst.Loop, "positionals").field_type;
src-self-hosted/ir.zig
@@ -189,7 +189,7 @@ pub const Inst = struct {
     }
 
     pub fn cmpOperator(base: *Inst) ?std.math.CompareOperator {
-        return switch (self.base.tag) {
+        return switch (base.tag) {
             .cmp_lt => .lt,
             .cmp_lte => .lte,
             .cmp_eq => .eq,
@@ -220,6 +220,14 @@ pub const Inst = struct {
         unreachable;
     }
 
+    pub fn breakBlock(base: *Inst) ?*Block {
+        return switch (base.tag) {
+            .br => base.castTag(.br).?.block,
+            .brvoid => base.castTag(.brvoid).?.block,
+            else => null,
+        };
+    }
+
     pub const NoOp = struct {
         base: Inst,
 
src-self-hosted/Module.zig
@@ -725,6 +725,7 @@ pub const Scope = struct {
         /// Points to the arena allocator of DeclAnalysis
         arena: *Allocator,
         label: ?Label = null,
+        is_comptime: bool,
 
         pub const Label = struct {
             zir_block: *zir.Inst.Block,
@@ -1320,6 +1321,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
                 .decl = decl,
                 .instructions = .{},
                 .arena = &decl_arena.allocator,
+                .is_comptime = false,
             };
             defer block_scope.instructions.deinit(self.gpa);
 
@@ -1457,6 +1459,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
                 .decl = decl,
                 .instructions = .{},
                 .arena = &decl_arena.allocator,
+                .is_comptime = true,
             };
             defer block_scope.instructions.deinit(self.gpa);
 
@@ -1528,7 +1531,6 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
                 defer gen_scope.instructions.deinit(self.gpa);
                 const src = tree.token_locs[init_node.firstToken()].start;
 
-                // TODO comptime scope here
                 const init_inst = try astgen.expr(self, &gen_scope.base, .none, init_node);
                 _ = try astgen.addZIRUnOp(self, &gen_scope.base, src, .@"return", init_inst);
 
@@ -1538,6 +1540,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
                     .decl = decl,
                     .instructions = .{},
                     .arena = &gen_scope_arena.allocator,
+                    .is_comptime = true,
                 };
                 defer inner_block.instructions.deinit(self.gpa);
                 try zir_sema.analyzeBody(self, &inner_block.base, .{ .instructions = gen_scope.instructions.items });
@@ -1628,8 +1631,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
             };
             defer gen_scope.instructions.deinit(self.gpa);
 
-            // TODO comptime scope here
-            _ = try astgen.expr(self, &gen_scope.base, .none, comptime_decl.expr);
+            _ = try astgen.comptimeExpr(self, &gen_scope.base, .none, comptime_decl.expr);
 
             var block_scope: Scope.Block = .{
                 .parent = null,
@@ -1637,6 +1639,7 @@ fn astGenAndAnalyzeDecl(self: *Module, decl: *Decl) !bool {
                 .decl = decl,
                 .instructions = .{},
                 .arena = &analysis_arena.allocator,
+                .is_comptime = true,
             };
             defer block_scope.instructions.deinit(self.gpa);
 
@@ -2007,6 +2010,7 @@ fn analyzeFnBody(self: *Module, decl: *Decl, func: *Fn) !void {
         .decl = decl,
         .instructions = .{},
         .arena = &arena.allocator,
+        .is_comptime = false,
     };
     defer inner_block.instructions.deinit(self.gpa);
 
@@ -3432,6 +3436,7 @@ pub fn addSafetyCheck(mod: *Module, parent_block: *Scope.Block, ok: *Inst, panic
         .decl = parent_block.decl,
         .instructions = .{},
         .arena = parent_block.arena,
+        .is_comptime = parent_block.is_comptime,
     };
     defer fail_block.instructions.deinit(mod.gpa);
 
src-self-hosted/zir.zig
@@ -78,6 +78,13 @@ pub const Inst = struct {
         bitor,
         /// A labeled block of code, which can return a value.
         block,
+        /// A block of code, which can return a value. There are no instructions that break out of
+        /// this block; it is implied that the final instruction is the result.
+        block_flat,
+        /// Same as `block` but additionally makes the inner instructions execute at comptime.
+        block_comptime,
+        /// Same as `block_flat` but additionally makes the inner instructions execute at comptime.
+        block_comptime_flat,
         /// Boolean NOT. See also `bitnot`.
         boolnot,
         /// Return a value from a `Block`.
@@ -338,9 +345,14 @@ pub const Inst = struct {
                 .merge_error_sets,
                 => BinOp,
 
+                .block,
+                .block_flat,
+                .block_comptime,
+                .block_comptime_flat,
+                => Block,
+
                 .arg => Arg,
                 .array_type_sentinel => ArrayTypeSentinel,
-                .block => Block,
                 .@"break" => Break,
                 .breakvoid => BreakVoid,
                 .call => Call,
@@ -392,6 +404,9 @@ pub const Inst = struct {
                 .bitcast_result_ptr,
                 .bitor,
                 .block,
+                .block_flat,
+                .block_comptime,
+                .block_comptime_flat,
                 .boolnot,
                 .breakpoint,
                 .call,
src-self-hosted/zir_sema.zig
@@ -31,7 +31,10 @@ pub fn analyzeInst(mod: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!
         .arg => return analyzeInstArg(mod, scope, old_inst.castTag(.arg).?),
         .bitcast_ref => return analyzeInstBitCastRef(mod, scope, old_inst.castTag(.bitcast_ref).?),
         .bitcast_result_ptr => return analyzeInstBitCastResultPtr(mod, scope, old_inst.castTag(.bitcast_result_ptr).?),
-        .block => return analyzeInstBlock(mod, scope, old_inst.castTag(.block).?),
+        .block => return analyzeInstBlock(mod, scope, old_inst.castTag(.block).?, false),
+        .block_comptime => return analyzeInstBlock(mod, scope, old_inst.castTag(.block_comptime).?, true),
+        .block_flat => return analyzeInstBlockFlat(mod, scope, old_inst.castTag(.block_flat).?, false),
+        .block_comptime_flat => return analyzeInstBlockFlat(mod, scope, old_inst.castTag(.block_comptime_flat).?, true),
         .@"break" => return analyzeInstBreak(mod, scope, old_inst.castTag(.@"break").?),
         .breakpoint => return analyzeInstBreakpoint(mod, scope, old_inst.castTag(.breakpoint).?),
         .breakvoid => return analyzeInstBreakVoid(mod, scope, old_inst.castTag(.breakvoid).?),
@@ -147,6 +150,7 @@ pub fn analyzeBody(mod: *Module, scope: *Scope, body: zir.Module.Body) !void {
     }
 }
 
+/// TODO improve this to use .block_comptime_flat
 pub fn analyzeBodyValueAsType(mod: *Module, block_scope: *Scope.Block, body: zir.Module.Body) !Type {
     try analyzeBody(mod, &block_scope.base, body);
     for (block_scope.instructions.items) |inst| {
@@ -517,6 +521,7 @@ fn analyzeInstLoop(mod: *Module, scope: *Scope, inst: *zir.Inst.Loop) InnerError
         .decl = parent_block.decl,
         .instructions = .{},
         .arena = parent_block.arena,
+        .is_comptime = parent_block.is_comptime,
     };
     defer child_block.instructions.deinit(mod.gpa);
 
@@ -529,7 +534,29 @@ fn analyzeInstLoop(mod: *Module, scope: *Scope, inst: *zir.Inst.Loop) InnerError
     return &loop_inst.base;
 }
 
-fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block) InnerError!*Inst {
+fn analyzeInstBlockFlat(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_comptime: bool) InnerError!*Inst {
+    const parent_block = scope.cast(Scope.Block).?;
+
+    var child_block: Scope.Block = .{
+        .parent = parent_block,
+        .func = parent_block.func,
+        .decl = parent_block.decl,
+        .instructions = .{},
+        .arena = parent_block.arena,
+        .label = null,
+        .is_comptime = parent_block.is_comptime or is_comptime,
+    };
+    defer child_block.instructions.deinit(mod.gpa);
+
+    try analyzeBody(mod, &child_block.base, inst.positionals.body);
+
+    const copied_instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items);
+    try parent_block.instructions.appendSlice(mod.gpa, copied_instructions);
+
+    return copied_instructions[copied_instructions.len - 1];
+}
+
+fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block, is_comptime: bool) InnerError!*Inst {
     const parent_block = scope.cast(Scope.Block).?;
 
     // Reserve space for a Block instruction so that generated Break instructions can
@@ -557,6 +584,7 @@ fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block) InnerErr
             .results = .{},
             .block_inst = block_inst,
         }),
+        .is_comptime = is_comptime or parent_block.is_comptime,
     };
     const label = &child_block.label.?;
 
@@ -569,6 +597,28 @@ fn analyzeInstBlock(mod: *Module, scope: *Scope, inst: *zir.Inst.Block) InnerErr
     assert(child_block.instructions.items.len != 0);
     assert(child_block.instructions.items[child_block.instructions.items.len - 1].ty.isNoReturn());
 
+    if (label.results.items.len == 0) {
+        // No need for a block instruction. We can put the new instructions directly into the parent block.
+        const copied_instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items);
+        try parent_block.instructions.appendSlice(mod.gpa, copied_instructions);
+        return copied_instructions[copied_instructions.len - 1];
+    }
+    if (label.results.items.len == 1) {
+        const last_inst_index = child_block.instructions.items.len - 1;
+        const last_inst = child_block.instructions.items[last_inst_index];
+        if (last_inst.breakBlock()) |br_block| {
+            if (br_block == block_inst) {
+                // No need for a block instruction. We can put the new instructions directly into the parent block.
+                // Here we omit the break instruction.
+                const copied_instructions = try parent_block.arena.dupe(*Inst, child_block.instructions.items[0..last_inst_index]);
+                try parent_block.instructions.appendSlice(mod.gpa, copied_instructions);
+                return label.results.items[0];
+            }
+        }
+    }
+    // It should be impossible to have the number of results be > 1 in a comptime scope.
+    assert(!child_block.is_comptime); // We should have already got a compile error in the condbr condition.
+
     // Need to set the type and emit the Block instruction. This allows machine code generation
     // to emit a jump instruction to after the block when it encounters the break.
     try parent_block.instructions.append(mod.gpa, &block_inst.base);
@@ -1083,7 +1133,7 @@ fn analyzeInstElemPtr(mod: *Module, scope: *Scope, inst: *zir.Inst.ElemPtr) Inne
     const array_ptr = try resolveInst(mod, scope, inst.positionals.array_ptr);
     const uncasted_index = try resolveInst(mod, scope, inst.positionals.index);
     const elem_index = try mod.coerce(scope, Type.initTag(.usize), uncasted_index);
-    
+
     const elem_ty = switch (array_ptr.ty.zigTypeTag()) {
         .Pointer => array_ptr.ty.elemType(),
         else => return mod.fail(scope, inst.positionals.array_ptr.src, "expected pointer, found '{}'", .{array_ptr.ty}),
@@ -1376,6 +1426,7 @@ fn analyzeInstCondBr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerE
         .decl = parent_block.decl,
         .instructions = .{},
         .arena = parent_block.arena,
+        .is_comptime = parent_block.is_comptime,
     };
     defer true_block.instructions.deinit(mod.gpa);
     try analyzeBody(mod, &true_block.base, inst.positionals.then_body);
@@ -1386,6 +1437,7 @@ fn analyzeInstCondBr(mod: *Module, scope: *Scope, inst: *zir.Inst.CondBr) InnerE
         .decl = parent_block.decl,
         .instructions = .{},
         .arena = parent_block.arena,
+        .is_comptime = parent_block.is_comptime,
     };
     defer false_block.instructions.deinit(mod.gpa);
     try analyzeBody(mod, &false_block.base, inst.positionals.else_body);
test/stage2/test.zig
@@ -274,7 +274,7 @@ pub fn addCases(ctx: *TestContext) !void {
     }
 
     {
-        var case = ctx.exe("substracting numbers at runtime", linux_x64);
+        var case = ctx.exe("subtracting numbers at runtime", linux_x64);
         case.addCompareOutput(
             \\export fn _start() noreturn {
             \\    sub(7, 4);