Commit 488df7f1d1
Changed files (3)
src-self-hosted
src-self-hosted/astgen.zig
@@ -17,6 +17,9 @@ pub const ResultLoc = union(enum) {
discard,
/// The expression has an inferred type, and it will be evaluated as an rvalue.
none,
+ /// The expression must generate a pointer rather than a value. For example, the left hand side
+ /// of an assignment uses an "LValue" result location.
+ lvalue,
/// The expression will be type coerced into this type, but it will be evaluated as an rvalue.
ty: *zir.Inst,
/// The expression must store its result into this typed pointer.
@@ -46,16 +49,43 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
switch (node.tag) {
.VarDecl => unreachable, // Handled in `blockExpr`.
.Assign => unreachable, // Handled in `blockExpr`.
-
- .Add => return arithmetic(mod, scope, rl, node.castTag(.Add).?, .add),
- .Sub => return arithmetic(mod, scope, rl, node.castTag(.Sub).?, .sub),
-
- .BangEqual => return cmp(mod, scope, rl, node.castTag(.BangEqual).?, .cmp_neq),
- .EqualEqual => return cmp(mod, scope, rl, node.castTag(.EqualEqual).?, .cmp_eq),
- .GreaterThan => return cmp(mod, scope, rl, node.castTag(.GreaterThan).?, .cmp_gt),
- .GreaterOrEqual => return cmp(mod, scope, rl, node.castTag(.GreaterOrEqual).?, .cmp_gte),
- .LessThan => return cmp(mod, scope, rl, node.castTag(.LessThan).?, .cmp_lt),
- .LessOrEqual => return cmp(mod, scope, rl, node.castTag(.LessOrEqual).?, .cmp_lte),
+ .AssignBitAnd => unreachable, // Handled in `blockExpr`.
+ .AssignBitOr => unreachable, // Handled in `blockExpr`.
+ .AssignBitShiftLeft => unreachable, // Handled in `blockExpr`.
+ .AssignBitShiftRight => unreachable, // Handled in `blockExpr`.
+ .AssignBitXor => unreachable, // Handled in `blockExpr`.
+ .AssignDiv => unreachable, // Handled in `blockExpr`.
+ .AssignSub => unreachable, // Handled in `blockExpr`.
+ .AssignSubWrap => unreachable, // Handled in `blockExpr`.
+ .AssignMod => unreachable, // Handled in `blockExpr`.
+ .AssignAdd => unreachable, // Handled in `blockExpr`.
+ .AssignAddWrap => unreachable, // Handled in `blockExpr`.
+ .AssignMul => unreachable, // Handled in `blockExpr`.
+ .AssignMulWrap => unreachable, // Handled in `blockExpr`.
+
+ .Add => return simpleBinOp(mod, scope, rl, node.castTag(.Add).?, .add),
+ .AddWrap => return simpleBinOp(mod, scope, rl, node.castTag(.AddWrap).?, .addwrap),
+ .Sub => return simpleBinOp(mod, scope, rl, node.castTag(.Sub).?, .sub),
+ .SubWrap => return simpleBinOp(mod, scope, rl, node.castTag(.SubWrap).?, .subwrap),
+ .Mul => return simpleBinOp(mod, scope, rl, node.castTag(.Mul).?, .mul),
+ .MulWrap => return simpleBinOp(mod, scope, rl, node.castTag(.MulWrap).?, .mulwrap),
+ .Div => return simpleBinOp(mod, scope, rl, node.castTag(.Div).?, .div),
+ .Mod => return simpleBinOp(mod, scope, rl, node.castTag(.Mod).?, .mod_rem),
+ .BitAnd => return simpleBinOp(mod, scope, rl, node.castTag(.BitAnd).?, .bitand),
+ .BitOr => return simpleBinOp(mod, scope, rl, node.castTag(.BitOr).?, .bitor),
+ .BitShiftLeft => return simpleBinOp(mod, scope, rl, node.castTag(.BitShiftLeft).?, .shl),
+ .BitShiftRight => return simpleBinOp(mod, scope, rl, node.castTag(.BitShiftRight).?, .shr),
+ .BitXor => return simpleBinOp(mod, scope, rl, node.castTag(.BitXor).?, .xor),
+
+ .BangEqual => return simpleBinOp(mod, scope, rl, node.castTag(.BangEqual).?, .cmp_neq),
+ .EqualEqual => return simpleBinOp(mod, scope, rl, node.castTag(.EqualEqual).?, .cmp_eq),
+ .GreaterThan => return simpleBinOp(mod, scope, rl, node.castTag(.GreaterThan).?, .cmp_gt),
+ .GreaterOrEqual => return simpleBinOp(mod, scope, rl, node.castTag(.GreaterOrEqual).?, .cmp_gte),
+ .LessThan => return simpleBinOp(mod, scope, rl, node.castTag(.LessThan).?, .cmp_lt),
+ .LessOrEqual => return simpleBinOp(mod, scope, rl, node.castTag(.LessOrEqual).?, .cmp_lte),
+
+ .ArrayCat => return simpleBinOp(mod, scope, rl, node.castTag(.ArrayCat).?, .array_cat),
+ .ArrayMult => return simpleBinOp(mod, scope, rl, node.castTag(.ArrayMult).?, .array_mul),
.Identifier => return rlWrap(mod, scope, rl, try identifier(mod, scope, node.castTag(.Identifier).?)),
.Asm => return rlWrap(mod, scope, rl, try assembly(mod, scope, node.castTag(.Asm).?)),
@@ -99,6 +129,20 @@ pub fn blockExpr(mod: *Module, parent_scope: *Scope, block_node: *ast.Node.Block
const ass = statement.castTag(.Assign).?;
try assign(mod, scope, ass);
},
+ .AssignBitAnd => try assignOp(mod, scope, statement.castTag(.AssignBitAnd).?, .bitand),
+ .AssignBitOr => try assignOp(mod, scope, statement.castTag(.AssignBitOr).?, .bitor),
+ .AssignBitShiftLeft => try assignOp(mod, scope, statement.castTag(.AssignBitShiftLeft).?, .shl),
+ .AssignBitShiftRight => try assignOp(mod, scope, statement.castTag(.AssignBitShiftRight).?, .shr),
+ .AssignBitXor => try assignOp(mod, scope, statement.castTag(.AssignBitXor).?, .xor),
+ .AssignDiv => try assignOp(mod, scope, statement.castTag(.AssignDiv).?, .div),
+ .AssignSub => try assignOp(mod, scope, statement.castTag(.AssignSub).?, .sub),
+ .AssignSubWrap => try assignOp(mod, scope, statement.castTag(.AssignSubWrap).?, .subwrap),
+ .AssignMod => try assignOp(mod, scope, statement.castTag(.AssignMod).?, .mod_rem),
+ .AssignAdd => try assignOp(mod, scope, statement.castTag(.AssignAdd).?, .add),
+ .AssignAddWrap => try assignOp(mod, scope, statement.castTag(.AssignAddWrap).?, .addwrap),
+ .AssignMul => try assignOp(mod, scope, statement.castTag(.AssignMul).?, .mul),
+ .AssignMulWrap => try assignOp(mod, scope, statement.castTag(.AssignMulWrap).?, .mulwrap),
+
else => {
const possibly_unused_result = try expr(mod, scope, .none, statement);
const src = scope.tree().token_locs[statement.firstToken()].start;
@@ -207,17 +251,33 @@ fn varDecl(
fn assign(mod: *Module, scope: *Scope, infix_node: *ast.Node.SimpleInfixOp) InnerError!void {
if (infix_node.lhs.castTag(.Identifier)) |ident| {
- const tree = scope.tree();
- const ident_name = try identifierTokenString(mod, scope, ident.token);
+ // This intentionally does not support @"_" syntax.
+ const ident_name = scope.tree().tokenSlice(ident.token);
if (std.mem.eql(u8, ident_name, "_")) {
_ = try expr(mod, scope, .discard, infix_node.rhs);
return;
- } else {
- return mod.failNode(scope, &infix_node.base, "TODO implement infix operator assign", .{});
}
- } else {
- return mod.failNode(scope, &infix_node.base, "TODO implement infix operator assign", .{});
}
+ const lvalue = try expr(mod, scope, .lvalue, infix_node.lhs);
+ _ = try expr(mod, scope, .{ .ptr = lvalue }, infix_node.rhs);
+}
+
+fn assignOp(
+ mod: *Module,
+ scope: *Scope,
+ infix_node: *ast.Node.SimpleInfixOp,
+ op_inst_tag: zir.Inst.Tag,
+) InnerError!void {
+ const lhs_ptr = try expr(mod, scope, .lvalue, infix_node.lhs);
+ const lhs = try mod.addZIRUnOp(scope, lhs_ptr.src, .deref, lhs_ptr);
+ const lhs_type = try mod.addZIRUnOp(scope, lhs_ptr.src, .typeof, lhs);
+ const rhs = try expr(mod, scope, .{ .ty = lhs_type }, infix_node.rhs);
+
+ const tree = scope.tree();
+ const src = tree.token_locs[infix_node.op_token].start;
+
+ const result = try mod.addZIRBinOp(scope, src, op_inst_tag, lhs, rhs);
+ _ = try mod.addZIRBinOp(scope, src, .store, lhs_ptr, result);
}
fn boolNot(mod: *Module, scope: *Scope, node: *ast.Node.SimplePrefixOp) InnerError!*zir.Inst {
@@ -279,35 +339,19 @@ fn deref(mod: *Module, scope: *Scope, node: *ast.Node.SimpleSuffixOp) InnerError
return mod.addZIRUnOp(scope, src, .deref, lhs);
}
-fn cmp(
+fn simpleBinOp(
mod: *Module,
scope: *Scope,
rl: ResultLoc,
infix_node: *ast.Node.SimpleInfixOp,
- cmp_inst_tag: zir.Inst.Tag,
+ op_inst_tag: zir.Inst.Tag,
) InnerError!*zir.Inst {
const tree = scope.tree();
const src = tree.token_locs[infix_node.op_token].start;
- const lhs = try expr(mod, scope, .none, infix_node.lhs);
- const rhs = try expr(mod, scope, .none, infix_node.rhs);
- const result = try mod.addZIRBinOp(scope, src, cmp_inst_tag, lhs, rhs);
- return rlWrap(mod, scope, rl, result);
-}
-
-fn arithmetic(
- mod: *Module,
- scope: *Scope,
- rl: ResultLoc,
- infix_node: *ast.Node.SimpleInfixOp,
- op_inst_tag: zir.Inst.Tag,
-) InnerError!*zir.Inst {
const lhs = try expr(mod, scope, .none, infix_node.lhs);
const rhs = try expr(mod, scope, .none, infix_node.rhs);
- const tree = scope.tree();
- const src = tree.token_locs[infix_node.op_token].start;
-
const result = try mod.addZIRBinOp(scope, src, op_inst_tag, lhs, rhs);
return rlWrap(mod, scope, rl, result);
}
@@ -359,7 +403,7 @@ fn ifExpr(mod: *Module, scope: *Scope, rl: ResultLoc, if_node: *ast.Node.If) Inn
// proper type inference requires peer type resolution on the if's
// branches.
const branch_rl: ResultLoc = switch (rl) {
- .discard, .none, .ty, .ptr => rl,
+ .discard, .none, .ty, .ptr, .lvalue => rl,
.inferred_ptr, .bitcasted_ptr, .block_ptr => .{ .block_ptr = block },
};
@@ -698,6 +742,10 @@ fn as(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCall) I
_ = try mod.addZIRUnOp(scope, result.src, .ensure_result_non_error, result);
return result;
},
+ .lvalue => {
+ const result = try expr(mod, scope, .{ .ty = dest_type }, params[1]);
+ return mod.addZIRUnOp(scope, result.src, .ref, result);
+ },
.ty => |result_ty| {
const result = try expr(mod, scope, .{ .ty = dest_type }, params[1]);
return mod.addZIRBinOp(scope, src, .as, result_ty, result);
@@ -745,6 +793,11 @@ fn bitCast(mod: *Module, scope: *Scope, rl: ResultLoc, call: *ast.Node.BuiltinCa
_ = try mod.addZIRUnOp(scope, result.src, .ensure_result_non_error, result);
return result;
},
+ .lvalue => {
+ const operand = try expr(mod, scope, .lvalue, params[1]);
+ const result = try mod.addZIRBinOp(scope, src, .bitcast_lvalue, dest_type, operand);
+ return result;
+ },
.ty => |result_ty| {
const result = try expr(mod, scope, .none, params[1]);
const bitcasted = try mod.addZIRBinOp(scope, src, .bitcast, dest_type, result);
@@ -1003,6 +1056,10 @@ fn rlWrap(mod: *Module, scope: *Scope, rl: ResultLoc, result: *zir.Inst) InnerEr
_ = try mod.addZIRUnOp(scope, result.src, .ensure_result_non_error, result);
return result;
},
+ .lvalue => {
+ // We need a pointer but we have a value.
+ return mod.addZIRUnOp(scope, result.src, .ref, result);
+ },
.ty => |ty_inst| return mod.addZIRBinOp(scope, result.src, .as, ty_inst, result),
.ptr => |ptr_inst| {
const casted_result = try mod.addZIRInst(scope, result.src, zir.Inst.CoerceToPtrElem, .{
src-self-hosted/Module.zig
@@ -2357,6 +2357,7 @@ fn analyzeInst(self: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!*In
.alloc => return self.analyzeInstAlloc(scope, old_inst.castTag(.alloc).?),
.alloc_inferred => return self.analyzeInstAllocInferred(scope, old_inst.castTag(.alloc_inferred).?),
.arg => return self.analyzeInstArg(scope, old_inst.castTag(.arg).?),
+ .bitcast_lvalue => return self.analyzeInstBitCastLValue(scope, old_inst.castTag(.bitcast_lvalue).?),
.bitcast_result_ptr => return self.analyzeInstBitCastResultPtr(scope, old_inst.castTag(.bitcast_result_ptr).?),
.block => return self.analyzeInstBlock(scope, old_inst.castTag(.block).?),
.@"break" => return self.analyzeInstBreak(scope, old_inst.castTag(.@"break").?),
@@ -2374,6 +2375,7 @@ fn analyzeInst(self: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!*In
.declval_in_module => return self.analyzeInstDeclValInModule(scope, old_inst.castTag(.declval_in_module).?),
.ensure_result_used => return self.analyzeInstEnsureResultUsed(scope, old_inst.castTag(.ensure_result_used).?),
.ensure_result_non_error => return self.analyzeInstEnsureResultNonError(scope, old_inst.castTag(.ensure_result_non_error).?),
+ .ref => return self.analyzeInstRef(scope, old_inst.castTag(.ref).?),
.ret_ptr => return self.analyzeInstRetPtr(scope, old_inst.castTag(.ret_ptr).?),
.ret_type => return self.analyzeInstRetType(scope, old_inst.castTag(.ret_type).?),
.store => return self.analyzeInstStore(scope, old_inst.castTag(.store).?),
@@ -2390,6 +2392,7 @@ fn analyzeInst(self: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!*In
.as => return self.analyzeInstAs(scope, old_inst.castTag(.as).?),
.@"asm" => return self.analyzeInstAsm(scope, old_inst.castTag(.@"asm").?),
.@"unreachable" => return self.analyzeInstUnreachable(scope, old_inst.castTag(.@"unreachable").?),
+ .unreach_nocheck => return self.analyzeInstUnreachNoChk(scope, old_inst.castTag(.unreach_nocheck).?),
.@"return" => return self.analyzeInstRet(scope, old_inst.castTag(.@"return").?),
.returnvoid => return self.analyzeInstRetVoid(scope, old_inst.castTag(.returnvoid).?),
.@"fn" => return self.analyzeInstFn(scope, old_inst.castTag(.@"fn").?),
@@ -2400,7 +2403,21 @@ fn analyzeInst(self: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!*In
.bitcast => return self.analyzeInstBitCast(scope, old_inst.castTag(.bitcast).?),
.floatcast => return self.analyzeInstFloatCast(scope, old_inst.castTag(.floatcast).?),
.elemptr => return self.analyzeInstElemPtr(scope, old_inst.castTag(.elemptr).?),
- .add, .sub => return self.analyzeInstArithmetic(scope, old_inst.cast(zir.Inst.BinOp).?),
+ .add => return self.analyzeInstArithmetic(scope, old_inst.castTag(.add).?),
+ .addwrap => return self.analyzeInstArithmetic(scope, old_inst.castTag(.addwrap).?),
+ .sub => return self.analyzeInstArithmetic(scope, old_inst.castTag(.sub).?),
+ .subwrap => return self.analyzeInstArithmetic(scope, old_inst.castTag(.subwrap).?),
+ .mul => return self.analyzeInstArithmetic(scope, old_inst.castTag(.mul).?),
+ .mulwrap => return self.analyzeInstArithmetic(scope, old_inst.castTag(.mulwrap).?),
+ .div => return self.analyzeInstArithmetic(scope, old_inst.castTag(.div).?),
+ .mod_rem => return self.analyzeInstArithmetic(scope, old_inst.castTag(.mod_rem).?),
+ .array_cat => return self.analyzeInstArrayCat(scope, old_inst.castTag(.array_cat).?),
+ .array_mul => return self.analyzeInstArrayMul(scope, old_inst.castTag(.array_mul).?),
+ .bitand => return self.analyzeInstBitwise(scope, old_inst.castTag(.bitand).?),
+ .bitor => return self.analyzeInstBitwise(scope, old_inst.castTag(.bitor).?),
+ .xor => return self.analyzeInstBitwise(scope, old_inst.castTag(.xor).?),
+ .shl => return self.analyzeInstShl(scope, old_inst.castTag(.shl).?),
+ .shr => return self.analyzeInstShr(scope, old_inst.castTag(.shr).?),
.cmp_lt => return self.analyzeInstCmp(scope, old_inst.castTag(.cmp_lt).?, .lt),
.cmp_lte => return self.analyzeInstCmp(scope, old_inst.castTag(.cmp_lte).?, .lte),
.cmp_eq => return self.analyzeInstCmp(scope, old_inst.castTag(.cmp_eq).?, .eq),
@@ -2411,6 +2428,7 @@ fn analyzeInst(self: *Module, scope: *Scope, old_inst: *zir.Inst) InnerError!*In
.isnull => return self.analyzeInstIsNonNull(scope, old_inst.castTag(.isnull).?, true),
.isnonnull => return self.analyzeInstIsNonNull(scope, old_inst.castTag(.isnonnull).?, false),
.boolnot => return self.analyzeInstBoolNot(scope, old_inst.castTag(.boolnot).?),
+ .typeof => return self.analyzeInstTypeOf(scope, old_inst.castTag(.typeof).?),
}
}
@@ -2422,6 +2440,10 @@ fn analyzeInstCoerceResultBlockPtr(
return self.fail(scope, inst.base.src, "TODO implement analyzeInstCoerceResultBlockPtr", .{});
}
+fn analyzeInstBitCastLValue(self: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+ return self.fail(scope, inst.base.src, "TODO implement analyzeInstBitCastLValue", .{});
+}
+
fn analyzeInstBitCastResultPtr(self: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
return self.fail(scope, inst.base.src, "TODO implement analyzeInstBitCastResultPtr", .{});
}
@@ -2438,6 +2460,10 @@ fn analyzeInstRetPtr(self: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerEr
return self.fail(scope, inst.base.src, "TODO implement analyzeInstRetPtr", .{});
}
+fn analyzeInstRef(self: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+ return self.fail(scope, inst.base.src, "TODO implement analyzeInstRef", .{});
+}
+
fn analyzeInstRetType(self: *Module, scope: *Scope, inst: *zir.Inst.NoOp) InnerError!*Inst {
const b = try self.requireRuntimeBlock(scope, inst.base.src);
const fn_ty = b.func.?.owner_decl.typed_value.most_recent.typed_value.ty;
@@ -3044,6 +3070,26 @@ fn floatOpAllowed(tag: zir.Inst.Tag) bool {
};
}
+fn analyzeInstShl(self: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+ return self.fail(scope, inst.base.src, "TODO implement analyzeInstShl", .{});
+}
+
+fn analyzeInstShr(self: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+ return self.fail(scope, inst.base.src, "TODO implement analyzeInstShr", .{});
+}
+
+fn analyzeInstBitwise(self: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+ return self.fail(scope, inst.base.src, "TODO implement analyzeInstBitwise", .{});
+}
+
+fn analyzeInstArrayCat(self: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+ return self.fail(scope, inst.base.src, "TODO implement analyzeInstArrayCat", .{});
+}
+
+fn analyzeInstArrayMul(self: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
+ return self.fail(scope, inst.base.src, "TODO implement analyzeInstArrayMul", .{});
+}
+
fn analyzeInstArithmetic(self: *Module, scope: *Scope, inst: *zir.Inst.BinOp) InnerError!*Inst {
const tracy = trace(@src());
defer tracy.end();
@@ -3257,6 +3303,11 @@ fn analyzeInstCmp(
return self.fail(scope, inst.base.src, "TODO implement more cmp analysis", .{});
}
+fn analyzeInstTypeOf(self: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
+ const operand = try self.resolveInst(scope, inst.positionals.operand);
+ return self.constType(scope, inst.base.src, operand.ty);
+}
+
fn analyzeInstBoolNot(self: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
const uncasted_operand = try self.resolveInst(scope, inst.positionals.operand);
const bool_type = Type.initTag(.bool);
@@ -3319,13 +3370,22 @@ fn wantSafety(self: *Module, scope: *Scope) bool {
};
}
+fn analyzeUnreach(self: *Module, scope: *Scope, src: usize) InnerError!*Inst {
+ const b = try self.requireRuntimeBlock(scope, src);
+ return self.addNoOp(b, src, Type.initTag(.noreturn), .unreach);
+}
+
+fn analyzeInstUnreachNoChk(self: *Module, scope: *Scope, unreach: *zir.Inst.NoOp) InnerError!*Inst {
+ return self.analyzeUnreach(scope, unreach.base.src);
+}
+
fn analyzeInstUnreachable(self: *Module, scope: *Scope, unreach: *zir.Inst.NoOp) InnerError!*Inst {
const b = try self.requireRuntimeBlock(scope, unreach.base.src);
if (self.wantSafety(scope)) {
// TODO Once we have a panic function to call, call it here instead of this.
_ = try self.addNoOp(b, unreach.base.src, Type.initTag(.void), .breakpoint);
}
- return self.addNoOp(b, unreach.base.src, Type.initTag(.noreturn), .unreach);
+ return self.analyzeUnreach(scope, unreach.base.src);
}
fn analyzeInstRet(self: *Module, scope: *Scope, inst: *zir.Inst.UnOp) InnerError!*Inst {
src-self-hosted/zir.zig
@@ -34,25 +34,63 @@ pub const Inst = struct {
/// These names are used directly as the instruction names in the text format.
pub const Tag = enum {
+ /// Arithmetic addition, asserts no integer overflow.
+ add,
+ /// Twos complement wrapping integer addition.
+ addwrap,
/// Allocates stack local memory. Its lifetime ends when the block ends that contains
/// this instruction.
alloc,
/// Same as `alloc` except the type is inferred.
alloc_inferred,
+ /// Array concatenation. `a ++ b`
+ array_cat,
+ /// Array multiplication `a ** b`
+ array_mul,
/// Function parameter value. These must be first in a function's main block,
/// in respective order with the parameters.
arg,
+ /// Type coercion.
+ as,
+ /// Inline assembly.
+ @"asm",
+ /// Bitwise AND. `&`
+ bitand,
+ /// TODO delete this instruction, it has no purpose.
+ bitcast,
+ /// An arbitrary typed pointer, which is to be used as an L-Value, is pointer-casted
+ /// to a new L-Value. The destination type is given by LHS. The cast is to be evaluated
+ /// as if it were a bit-cast operation from the operand pointer element type to the
+ /// provided destination type.
+ bitcast_lvalue,
/// A typed result location pointer is bitcasted to a new result location pointer.
/// The new result location pointer has an inferred type.
bitcast_result_ptr,
+ /// Bitwise OR. `|`
+ bitor,
/// A labeled block of code, which can return a value.
block,
+ /// Boolean NOT. See also `bitnot`.
+ boolnot,
/// Return a value from a `Block`.
@"break",
breakpoint,
/// Same as `break` but without an operand; the operand is assumed to be the void value.
breakvoid,
+ /// Function call.
call,
+ /// `<`
+ cmp_lt,
+ /// `<=`
+ cmp_lte,
+ /// `==`
+ cmp_eq,
+ /// `>=`
+ cmp_gte,
+ /// `>`
+ cmp_gt,
+ /// `!=`
+ cmp_neq,
/// Coerces a result location pointer to a new element type. It is evaluated "backwards"-
/// as type coercion from the new element type to the old element type.
/// LHS is destination element type, RHS is result pointer.
@@ -65,6 +103,8 @@ pub const Inst = struct {
coerce_to_ptr_elem,
/// Emit an error message and fail compilation.
compileerror,
+ /// Conditional branch. Splits control flow based on a boolean condition value.
+ condbr,
/// Special case, has no textual representation.
@"const",
/// Represents a pointer to a global decl by name.
@@ -76,61 +116,103 @@ pub const Inst = struct {
declval,
/// Same as declval but the parameter is a `*Module.Decl` rather than a name.
declval_in_module,
+ /// Load the value from a pointer.
+ deref,
+ /// Arithmetic division. Asserts no integer overflow.
+ div,
+ /// Given a pointer to an array, slice, or pointer, returns a pointer to the element at
+ /// the provided index.
+ elemptr,
/// Emits a compile error if the operand is not `void`.
ensure_result_used,
/// Emits a compile error if an error is ignored.
ensure_result_non_error,
- boolnot,
- /// Obtains a pointer to the return value.
- ret_ptr,
- /// Obtains the return type of the in-scope function.
- ret_type,
- /// Write a value to a pointer.
- store,
- /// String Literal. Makes an anonymous Decl and then takes a pointer to it.
- str,
- int,
- inttype,
- ptrtoint,
+ /// Export the provided Decl as the provided name in the compilation's output object file.
+ @"export",
+ /// Given a pointer to a struct or object that contains virtual fields, returns a pointer
+ /// to the named field.
fieldptr,
- deref,
- as,
- @"asm",
- @"unreachable",
- @"return",
- returnvoid,
+ /// Convert a larger float type to any other float type, possibly causing a loss of precision.
+ floatcast,
+ /// Declare a function body.
@"fn",
+ /// Returns a function type.
fntype,
- @"export",
+ /// Integer literal.
+ int,
+ /// Convert an integer value to another integer type, asserting that the destination type
+ /// can hold the same mathematical value.
+ intcast,
+ /// Make an integer type out of signedness and bit count.
+ inttype,
+ /// Return a boolean false if an optional is null. `x != null`
+ isnonnull,
+ /// Return a boolean true if an optional is null. `x == null`
+ isnull,
+ /// Ambiguously remainder division or modulus. If the computation would possibly have
+ /// a different value depending on whether the operation is remainder division or modulus,
+ /// a compile error is emitted. Otherwise the computation is performed.
+ mod_rem,
+ /// Arithmetic multiplication. Asserts no integer overflow.
+ mul,
+ /// Twos complement wrapping integer multiplication.
+ mulwrap,
/// Given a reference to a function and a parameter index, returns the
/// type of the parameter. TODO what happens when the parameter is `anytype`?
param_type,
+ /// An alternative to using `const` for simple primitive values such as `true` or `u8`.
+ /// TODO flatten so that each primitive has its own ZIR Inst Tag.
primitive,
- intcast,
- bitcast,
- floatcast,
- elemptr,
- add,
+ /// Convert a pointer to a `usize` integer.
+ ptrtoint,
+ /// Turns an R-Value into a const L-Value. In other words, it takes a value,
+ /// stores it in a memory location, and returns a const pointer to it. If the value
+ /// is `comptime`, the memory location is global static constant data. Otherwise,
+ /// the memory location is in the stack frame, local to the scope containing the
+ /// instruction.
+ ref,
+ /// Obtains a pointer to the return value.
+ ret_ptr,
+ /// Obtains the return type of the in-scope function.
+ ret_type,
+ /// Sends control flow back to the function's callee. Takes an operand as the return value.
+ @"return",
+ /// Same as `return` but there is no operand; the operand is implicitly the void value.
+ returnvoid,
+ /// Integer shift-left. Zeroes are shifted in from the right hand side.
+ shl,
+ /// Integer shift-right. Arithmetic or logical depending on the signedness of the integer type.
+ shr,
+ /// Write a value to a pointer. For loading, see `deref`.
+ store,
+ /// String Literal. Makes an anonymous Decl and then takes a pointer to it.
+ str,
+ /// Arithmetic subtraction. Asserts no integer overflow.
sub,
- cmp_lt,
- cmp_lte,
- cmp_eq,
- cmp_gte,
- cmp_gt,
- cmp_neq,
- condbr,
- isnull,
- isnonnull,
+ /// Twos complement wrapping integer subtraction.
+ subwrap,
+ /// Returns the type of a value.
+ typeof,
+ /// Asserts control-flow will not reach this instruction. Not safety checked - the compiler
+ /// will assume the correctness of this instruction.
+ unreach_nocheck,
+ /// Asserts control-flow will not reach this instruction. In safety-checked modes,
+ /// this will generate a call to the panic function unless it can be proven unreachable
+ /// by the compiler.
+ @"unreachable",
+ /// Bitwise XOR. `^`
+ xor,
pub fn Type(tag: Tag) type {
return switch (tag) {
.arg,
.breakpoint,
- .@"unreachable",
.returnvoid,
.alloc_inferred,
.ret_ptr,
.ret_type,
+ .unreach_nocheck,
+ .@"unreachable",
=> NoOp,
.boolnot,
@@ -143,10 +225,25 @@ pub const Inst = struct {
.ensure_result_used,
.ensure_result_non_error,
.bitcast_result_ptr,
+ .ref,
+ .bitcast_lvalue,
+ .typeof,
=> UnOp,
.add,
+ .addwrap,
+ .array_cat,
+ .array_mul,
+ .bitand,
+ .bitor,
+ .div,
+ .mod_rem,
+ .mul,
+ .mulwrap,
+ .shl,
+ .shr,
.sub,
+ .subwrap,
.cmp_lt,
.cmp_lte,
.cmp_eq,
@@ -158,6 +255,7 @@ pub const Inst = struct {
.intcast,
.bitcast,
.coerce_result_ptr,
+ .xor,
=> BinOp,
.block => Block,
@@ -192,13 +290,30 @@ pub const Inst = struct {
/// Function calls do not count.
pub fn isNoReturn(tag: Tag) bool {
return switch (tag) {
+ .add,
+ .addwrap,
.alloc,
.alloc_inferred,
+ .array_cat,
+ .array_mul,
.arg,
+ .as,
+ .@"asm",
+ .bitand,
+ .bitcast,
+ .bitcast_lvalue,
.bitcast_result_ptr,
+ .bitor,
.block,
+ .boolnot,
.breakpoint,
.call,
+ .cmp_lt,
+ .cmp_lte,
+ .cmp_eq,
+ .cmp_gte,
+ .cmp_gt,
+ .cmp_neq,
.coerce_result_ptr,
.coerce_result_block_ptr,
.coerce_to_ptr_elem,
@@ -207,48 +322,48 @@ pub const Inst = struct {
.declref_str,
.declval,
.declval_in_module,
+ .deref,
+ .div,
+ .elemptr,
.ensure_result_used,
.ensure_result_non_error,
- .ret_ptr,
- .ret_type,
- .store,
- .str,
- .int,
- .inttype,
- .ptrtoint,
+ .@"export",
+ .floatcast,
.fieldptr,
- .deref,
- .as,
- .@"asm",
.@"fn",
.fntype,
- .@"export",
+ .int,
+ .intcast,
+ .inttype,
+ .isnonnull,
+ .isnull,
+ .mod_rem,
+ .mul,
+ .mulwrap,
.param_type,
.primitive,
- .intcast,
- .bitcast,
- .floatcast,
- .elemptr,
- .add,
+ .ptrtoint,
+ .ref,
+ .ret_ptr,
+ .ret_type,
+ .shl,
+ .shr,
+ .store,
+ .str,
.sub,
- .cmp_lt,
- .cmp_lte,
- .cmp_eq,
- .cmp_gte,
- .cmp_gt,
- .cmp_neq,
- .isnull,
- .isnonnull,
- .boolnot,
+ .subwrap,
+ .typeof,
+ .xor,
=> false,
- .condbr,
- .@"unreachable",
- .@"return",
- .returnvoid,
.@"break",
.breakvoid,
+ .condbr,
.compileerror,
+ .@"return",
+ .returnvoid,
+ .unreach_nocheck,
+ .@"unreachable",
=> true,
};
}