Commit e78b1b810f
Changed files (7)
src-self-hosted
std
event
src-self-hosted/errmsg.zig
@@ -14,6 +14,13 @@ pub const Color = enum {
pub const Span = struct {
first: ast.TokenIndex,
last: ast.TokenIndex,
+
+ pub fn token(i: TokenIndex) Span {
+ return Span {
+ .first = i,
+ .last = i,
+ };
+ }
};
pub const Msg = struct {
src-self-hosted/ir.zig
@@ -9,31 +9,34 @@ const Type = Value.Type;
const assert = std.debug.assert;
const Token = std.zig.Token;
const ParsedFile = @import("parsed_file.zig").ParsedFile;
+const Span = @import("errmsg.zig").Span;
pub const LVal = enum {
None,
Ptr,
};
-pub const Mut = enum {
- Mut,
- Const,
-};
-
-pub const Volatility = enum {
- NonVolatile,
- Volatile,
-};
-
pub const IrVal = union(enum) {
Unknown,
- Known: *Value,
+ KnownType: *Type,
+ KnownValue: *Value,
+
+ const Init = enum {
+ Unknown,
+ NoReturn,
+ Void,
+ };
pub fn dump(self: IrVal) void {
switch (self) {
- IrVal.Unknown => std.debug.warn("Unknown"),
- IrVal.Known => |value| {
- std.debug.warn("Known(");
+ IrVal.Unknown => typeof.dump(),
+ IrVal.KnownType => |typeof| {
+ std.debug.warn("KnownType(");
+ typeof.dump();
+ std.debug.warn(")");
+ },
+ IrVal.KnownValue => |value| {
+ std.debug.warn("KnownValue(");
value.dump();
std.debug.warn(")");
},
@@ -46,10 +49,18 @@ pub const Instruction = struct {
scope: *Scope,
debug_id: usize,
val: IrVal,
+ ref_count: usize,
+ span: Span,
/// true if this instruction was generated by zig and not from user code
is_generated: bool,
+ /// the instruction that is derived from this one in analysis
+ child: ?*Instruction,
+
+ /// the instruction that this one derives from in analysis
+ parent: ?*Instruction,
+
pub fn cast(base: *Instruction, comptime T: type) ?*T {
if (base.id == comptime typeToId(T)) {
return @fieldParentPtr(T, "base", base);
@@ -81,6 +92,47 @@ pub const Instruction = struct {
unreachable;
}
+ pub fn hasSideEffects(base: *const Instruction) bool {
+ comptime var i = 0;
+ inline while (i < @memberCount(Id)) : (i += 1) {
+ if (base.id == @field(Id, @memberName(Id, i))) {
+ const T = @field(Instruction, @memberName(Id, i));
+ return @fieldParentPtr(T, "base", base).hasSideEffects();
+ }
+ }
+ unreachable;
+ }
+
+ pub fn analyze(base: *Instruction, ira: *Analyze) Analyze.Error!*Instruction {
+ comptime var i = 0;
+ inline while (i < @memberCount(Id)) : (i += 1) {
+ if (base.id == @field(Id, @memberName(Id, i))) {
+ const T = @field(Instruction, @memberName(Id, i));
+ const new_inst = try @fieldParentPtr(T, "base", base).analyze(ira);
+ new_inst.linkToParent(base);
+ return new_inst;
+ }
+ }
+ unreachable;
+ }
+
+ fn getAsParam(param: *Instruction) !*Instruction {
+ const child = param.child orelse return error.SemanticAnalysisFailed;
+ switch (child.val) {
+ IrVal.Unknown => return error.SemanticAnalysisFailed,
+ else => return child,
+ }
+ }
+
+ /// asserts that the type is known
+ fn getKnownType(self: *Instruction) *Type {
+ switch (self.val) {
+ IrVal.KnownType => |typeof| return typeof,
+ IrVal.KnownValue => |value| return value.typeof,
+ IrVal.Unknown => unreachable,
+ }
+ }
+
pub fn setGenerated(base: *Instruction) void {
base.is_generated = true;
}
@@ -88,10 +140,18 @@ pub const Instruction = struct {
pub fn isNoReturn(base: *const Instruction) bool {
switch (base.val) {
IrVal.Unknown => return false,
- IrVal.Known => |x| return x.typeof.id == Type.Id.NoReturn,
+ IrVal.KnownValue => |x| return x.typeof.id == Type.Id.NoReturn,
+ IrVal.KnownType => |typeof| return typeof.id == Type.Id.NoReturn,
}
}
+ pub fn linkToParent(self: *Instruction, parent: *Instruction) void {
+ assert(self.parent == null);
+ assert(parent.child == null);
+ self.parent = parent;
+ parent.child = self;
+ }
+
pub const Id = enum {
Return,
Const,
@@ -100,196 +160,231 @@ pub const Instruction = struct {
CheckVoidStmt,
Phi,
Br,
+ AddImplicitReturnType,
};
pub const Const = struct {
base: Instruction,
+ params: Params,
- pub fn buildBool(irb: *Builder, scope: *Scope, val: bool) !*Instruction {
- const inst = try irb.arena().create(Const{
- .base = Instruction{
- .id = Instruction.Id.Const,
- .is_generated = false,
- .scope = scope,
- .debug_id = irb.next_debug_id,
- .val = IrVal{ .Known = &Value.Bool.get(irb.module, val).base },
- },
- });
- irb.next_debug_id += 1;
- try irb.current_basic_block.instruction_list.append(&inst.base);
- return &inst.base;
- }
-
- pub fn buildVoid(irb: *Builder, scope: *Scope, is_generated: bool) !*Instruction {
- const inst = try irb.arena().create(Const{
- .base = Instruction{
- .id = Instruction.Id.Const,
- .is_generated = is_generated,
- .scope = scope,
- .debug_id = irb.next_debug_id,
- .val = IrVal{ .Known = &Value.Void.get(irb.module).base },
- },
- });
- irb.next_debug_id += 1;
- try irb.current_basic_block.instruction_list.append(&inst.base);
- return &inst.base;
+ const Params = struct {};
+
+ // Use Builder.buildConst* methods, or, after building a Const instruction,
+ // manually set the ir_val field.
+ const ir_val_init = IrVal.Init.Unknown;
+
+ pub fn dump(self: *const Const) void {
+ self.base.val.KnownValue.dump();
}
- pub fn dump(inst: *const Const) void {
- inst.base.val.Known.dump();
+ pub fn hasSideEffects(self: *const Const) bool {
+ return false;
+ }
+
+ pub fn analyze(self: *const Const, ira: *Analyze) !*Instruction {
+ const new_inst = try ira.irb.build(Const, self.base.scope, self.base.span, Params{});
+ new_inst.val = IrVal{ .KnownValue = self.base.val.KnownValue.getRef() };
+ return new_inst;
}
};
pub const Return = struct {
base: Instruction,
- return_value: *Instruction,
-
- pub fn build(irb: *Builder, scope: *Scope, return_value: *Instruction) !*Instruction {
- const inst = try irb.arena().create(Return{
- .base = Instruction{
- .id = Instruction.Id.Return,
- .is_generated = false,
- .scope = scope,
- .debug_id = irb.next_debug_id,
- .val = IrVal{ .Known = &Value.Void.get(irb.module).base },
- },
- .return_value = return_value,
- });
- irb.next_debug_id += 1;
- try irb.current_basic_block.instruction_list.append(&inst.base);
- return &inst.base;
+ params: Params,
+
+ const Params = struct {
+ return_value: *Instruction,
+ };
+
+ const ir_val_init = IrVal.Init.NoReturn;
+
+ pub fn dump(self: *const Return) void {
+ std.debug.warn("#{}", self.params.return_value.debug_id);
}
- pub fn dump(inst: *const Return) void {
- std.debug.warn("#{}", inst.return_value.debug_id);
+ pub fn hasSideEffects(self: *const Return) bool {
+ return true;
+ }
+
+ pub fn analyze(self: *const Return, ira: *Analyze) !*Instruction {
+ const value = try self.params.return_value.getAsParam();
+ const casted_value = try ira.implicitCast(value, ira.explicit_return_type);
+
+ // TODO detect returning local variable address
+
+ return ira.irb.build(Return, self.base.scope, self.base.span, Params{ .return_value = casted_value });
}
};
pub const Ref = struct {
base: Instruction,
- target: *Instruction,
- mut: Mut,
- volatility: Volatility,
+ params: Params,
- pub fn build(
- irb: *Builder,
- scope: *Scope,
+ const Params = struct {
target: *Instruction,
- mut: Mut,
- volatility: Volatility,
- ) !*Instruction {
- const inst = try irb.arena().create(Ref{
- .base = Instruction{
- .id = Instruction.Id.Ref,
- .is_generated = false,
- .scope = scope,
- .debug_id = irb.next_debug_id,
- .val = IrVal.Unknown,
- },
+ mut: Type.Pointer.Mut,
+ volatility: Type.Pointer.Vol,
+ };
+
+ const ir_val_init = IrVal.Init.Unknown;
+
+ pub fn dump(inst: *const Ref) void {}
+
+ pub fn hasSideEffects(inst: *const Ref) bool {
+ return false;
+ }
+
+ pub fn analyze(self: *const Ref, ira: *Analyze) !*Instruction {
+ const target = try self.params.target.getAsParam();
+
+ if (ira.getCompTimeValOrNullUndefOk(target)) |val| {
+ return ira.getCompTimeRef(
+ val,
+ Value.Ptr.Mut.CompTimeConst,
+ self.params.mut,
+ self.params.volatility,
+ val.typeof.getAbiAlignment(ira.irb.module),
+ );
+ }
+
+ const new_inst = try ira.irb.build(Ref, self.base.scope, self.base.span, Params{
.target = target,
- .mut = mut,
- .volatility = volatility,
+ .mut = self.params.mut,
+ .volatility = self.params.volatility,
});
- irb.next_debug_id += 1;
- try irb.current_basic_block.instruction_list.append(&inst.base);
- return &inst.base;
+ const elem_type = target.getKnownType();
+ const ptr_type = Type.Pointer.get(
+ ira.irb.module,
+ elem_type,
+ self.params.mut,
+ self.params.volatility,
+ Type.Pointer.Size.One,
+ elem_type.getAbiAlignment(ira.irb.module),
+ );
+ // TODO: potentially set the hint that this is a stack pointer. But it might not be - this
+ // could be a ref of a global, for example
+ new_inst.val = IrVal{ .KnownType = &ptr_type.base };
+ // TODO potentially add an alloca entry here
+ return new_inst;
}
-
- pub fn dump(inst: *const Ref) void {}
};
pub const DeclVar = struct {
base: Instruction,
- variable: *Variable,
+ params: Params,
+
+ const Params = struct {
+ variable: *Variable,
+ };
+
+ const ir_val_init = IrVal.Init.Unknown;
pub fn dump(inst: *const DeclVar) void {}
+
+ pub fn hasSideEffects(inst: *const DeclVar) bool {
+ return true;
+ }
+
+ pub fn analyze(self: *const DeclVar, ira: *Analyze) !*Instruction {
+ return error.Unimplemented; // TODO
+ }
};
pub const CheckVoidStmt = struct {
base: Instruction,
- target: *Instruction,
+ params: Params,
- pub fn build(
- irb: *Builder,
- scope: *Scope,
+ const Params = struct {
target: *Instruction,
- ) !*Instruction {
- const inst = try irb.arena().create(CheckVoidStmt{
- .base = Instruction{
- .id = Instruction.Id.CheckVoidStmt,
- .is_generated = true,
- .scope = scope,
- .debug_id = irb.next_debug_id,
- .val = IrVal{ .Known = &Value.Void.get(irb.module).base },
- },
- .target = target,
- });
- irb.next_debug_id += 1;
- try irb.current_basic_block.instruction_list.append(&inst.base);
- return &inst.base;
- }
+ };
+
+ const ir_val_init = IrVal.Init.Unknown;
pub fn dump(inst: *const CheckVoidStmt) void {}
+
+ pub fn hasSideEffects(inst: *const CheckVoidStmt) bool {
+ return true;
+ }
+
+ pub fn analyze(self: *const CheckVoidStmt, ira: *Analyze) !*Instruction {
+ return error.Unimplemented; // TODO
+ }
};
pub const Phi = struct {
base: Instruction,
- incoming_blocks: []*BasicBlock,
- incoming_values: []*Instruction,
+ params: Params,
- pub fn build(
- irb: *Builder,
- scope: *Scope,
+ const Params = struct {
incoming_blocks: []*BasicBlock,
incoming_values: []*Instruction,
- ) !*Instruction {
- const inst = try irb.arena().create(Phi{
- .base = Instruction{
- .id = Instruction.Id.Phi,
- .is_generated = false,
- .scope = scope,
- .debug_id = irb.next_debug_id,
- .val = IrVal.Unknown,
- },
- .incoming_blocks = incoming_blocks,
- .incoming_values = incoming_values,
- });
- irb.next_debug_id += 1;
- try irb.current_basic_block.instruction_list.append(&inst.base);
- return &inst.base;
- }
+ };
+
+ const ir_val_init = IrVal.Init.Unknown;
pub fn dump(inst: *const Phi) void {}
+
+ pub fn hasSideEffects(inst: *const Phi) bool {
+ return false;
+ }
+
+ pub fn analyze(self: *const Phi, ira: *Analyze) !*Instruction {
+ return error.Unimplemented; // TODO
+ }
};
pub const Br = struct {
base: Instruction,
- dest_block: *BasicBlock,
- is_comptime: *Instruction,
+ params: Params,
- pub fn build(
- irb: *Builder,
- scope: *Scope,
+ const Params = struct {
dest_block: *BasicBlock,
is_comptime: *Instruction,
- ) !*Instruction {
- const inst = try irb.arena().create(Br{
- .base = Instruction{
- .id = Instruction.Id.Br,
- .is_generated = false,
- .scope = scope,
- .debug_id = irb.next_debug_id,
- .val = IrVal{ .Known = &Value.NoReturn.get(irb.module).base },
- },
- .dest_block = dest_block,
- .is_comptime = is_comptime,
- });
- irb.next_debug_id += 1;
- try irb.current_basic_block.instruction_list.append(&inst.base);
- return &inst.base;
- }
+ };
+
+ const ir_val_init = IrVal.Init.NoReturn;
pub fn dump(inst: *const Br) void {}
+
+ pub fn hasSideEffects(inst: *const Br) bool {
+ return true;
+ }
+
+ pub fn analyze(self: *const Br, ira: *Analyze) !*Instruction {
+ return error.Unimplemented; // TODO
+ }
+ };
+
+ pub const AddImplicitReturnType = struct {
+ base: Instruction,
+ params: Params,
+
+ pub const Params = struct {
+ target: *Instruction,
+ };
+
+ const ir_val_init = IrVal.Init.Unknown;
+
+ pub fn dump(inst: *const AddImplicitReturnType) void {
+ std.debug.warn("#{}", inst.params.target.debug_id);
+ }
+
+ pub fn hasSideEffects(inst: *const AddImplicitReturnType) bool {
+ return true;
+ }
+
+ pub fn analyze(self: *const AddImplicitReturnType, ira: *Analyze) !*Instruction {
+ const target = try self.params.target.getAsParam();
+
+ try ira.src_implicit_return_type_list.append(target);
+
+ return ira.irb.build(
+ AddImplicitReturnType,
+ self.base.scope,
+ self.base.span,
+ Params{ .target = target },
+ );
+ }
};
};
@@ -303,16 +398,31 @@ pub const BasicBlock = struct {
debug_id: usize,
scope: *Scope,
instruction_list: std.ArrayList(*Instruction),
+ ref_instruction: ?*Instruction,
+
+ /// the basic block that is derived from this one in analysis
+ child: ?*BasicBlock,
+
+ /// the basic block that this one derives from in analysis
+ parent: ?*BasicBlock,
pub fn ref(self: *BasicBlock) void {
self.ref_count += 1;
}
+
+ pub fn linkToParent(self: *BasicBlock, parent: *BasicBlock) void {
+ assert(self.parent == null);
+ assert(parent.child == null);
+ self.parent = parent;
+ parent.child = self;
+ }
};
/// Stuff that survives longer than Builder
pub const Code = struct {
basic_block_list: std.ArrayList(*BasicBlock),
arena: std.heap.ArenaAllocator,
+ return_type: ?*Type,
/// allocator is module.a()
pub fn destroy(self: *Code, allocator: *Allocator) void {
@@ -341,15 +451,13 @@ pub const Builder = struct {
parsed_file: *ParsedFile,
is_comptime: bool,
- pub const Error = error{
- OutOfMemory,
- Unimplemented,
- };
+ pub const Error = Analyze.Error;
pub fn init(module: *Module, parsed_file: *ParsedFile) !Builder {
const code = try module.a().create(Code{
.basic_block_list = undefined,
.arena = std.heap.ArenaAllocator.init(module.a()),
+ .return_type = null,
});
code.basic_block_list = std.ArrayList(*BasicBlock).init(&code.arena.allocator);
errdefer code.destroy(module.a());
@@ -381,6 +489,9 @@ pub const Builder = struct {
.debug_id = self.next_debug_id,
.scope = scope,
.instruction_list = std.ArrayList(*Instruction).init(self.arena()),
+ .child = null,
+ .parent = null,
+ .ref_instruction = null,
});
self.next_debug_id += 1;
return basic_block;
@@ -490,14 +601,18 @@ pub const Builder = struct {
if (block.statements.len == 0) {
// {}
- return Instruction.Const.buildVoid(irb, child_scope, false);
+ return irb.buildConstVoid(child_scope, Span.token(block.lbrace), false);
}
if (block.label) |label| {
block_scope.incoming_values = std.ArrayList(*Instruction).init(irb.arena());
block_scope.incoming_blocks = std.ArrayList(*BasicBlock).init(irb.arena());
block_scope.end_block = try irb.createBasicBlock(parent_scope, "BlockEnd");
- block_scope.is_comptime = try Instruction.Const.buildBool(irb, parent_scope, irb.isCompTime(parent_scope));
+ block_scope.is_comptime = try irb.buildConstBool(
+ parent_scope,
+ Span.token(block.lbrace),
+ irb.isCompTime(parent_scope),
+ );
}
var is_continuation_unreachable = false;
@@ -530,10 +645,15 @@ pub const Builder = struct {
if (statement_value.cast(Instruction.DeclVar)) |decl_var| {
// variable declarations start a new scope
- child_scope = decl_var.variable.child_scope;
+ child_scope = decl_var.params.variable.child_scope;
} else if (!is_continuation_unreachable) {
// this statement's value must be void
- _ = Instruction.CheckVoidStmt.build(irb, child_scope, statement_value);
+ _ = irb.build(
+ Instruction.CheckVoidStmt,
+ child_scope,
+ statement_value.span,
+ Instruction.CheckVoidStmt.Params{ .target = statement_value },
+ );
}
}
@@ -544,37 +664,34 @@ pub const Builder = struct {
}
try irb.setCursorAtEndAndAppendBlock(block_scope.end_block);
- return Instruction.Phi.build(
- irb,
- parent_scope,
- block_scope.incoming_blocks.toOwnedSlice(),
- block_scope.incoming_values.toOwnedSlice(),
- );
+ return irb.build(Instruction.Phi, parent_scope, Span.token(block.rbrace), Instruction.Phi.Params{
+ .incoming_blocks = block_scope.incoming_blocks.toOwnedSlice(),
+ .incoming_values = block_scope.incoming_values.toOwnedSlice(),
+ });
}
if (block.label) |label| {
try block_scope.incoming_blocks.append(irb.current_basic_block);
try block_scope.incoming_values.append(
- try Instruction.Const.buildVoid(irb, parent_scope, true),
+ try irb.buildConstVoid(parent_scope, Span.token(block.rbrace), true),
);
_ = try irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit);
- (try Instruction.Br.build(
- irb,
- parent_scope,
- block_scope.end_block,
- block_scope.is_comptime,
- )).setGenerated();
+
+ _ = try irb.buildGen(Instruction.Br, parent_scope, Span.token(block.rbrace), Instruction.Br.Params{
+ .dest_block = block_scope.end_block,
+ .is_comptime = block_scope.is_comptime,
+ });
+
try irb.setCursorAtEndAndAppendBlock(block_scope.end_block);
- return Instruction.Phi.build(
- irb,
- parent_scope,
- block_scope.incoming_blocks.toOwnedSlice(),
- block_scope.incoming_values.toOwnedSlice(),
- );
+
+ return irb.build(Instruction.Phi, parent_scope, Span.token(block.rbrace), Instruction.Phi.Params{
+ .incoming_blocks = block_scope.incoming_blocks.toOwnedSlice(),
+ .incoming_values = block_scope.incoming_values.toOwnedSlice(),
+ });
}
_ = try irb.genDefersForBlock(child_scope, outer_block_scope, Scope.Defer.Kind.ScopeExit);
- return try Instruction.Const.buildVoid(irb, child_scope, true);
+ return irb.buildConstVoid(child_scope, Span.token(block.rbrace), true);
}
fn genDefersForBlock(
@@ -603,7 +720,12 @@ pub const Builder = struct {
if (instruction.isNoReturn()) {
is_noreturn = true;
} else {
- _ = Instruction.CheckVoidStmt.build(irb, &defer_expr_scope.base, instruction);
+ _ = try irb.build(
+ Instruction.CheckVoidStmt,
+ &defer_expr_scope.base,
+ Span.token(defer_expr_scope.expr_node.lastToken()),
+ Instruction.CheckVoidStmt.Params{ .target = instruction },
+ );
}
}
},
@@ -626,7 +748,11 @@ pub const Builder = struct {
LVal.Ptr => {
// We needed a pointer to a value, but we got a value. So we create
// an instruction which just makes a const pointer of it.
- return Instruction.Ref.build(irb, scope, instruction, Mut.Const, Volatility.NonVolatile);
+ return irb.build(Instruction.Ref, scope, instruction.span, Instruction.Ref.Params{
+ .target = instruction,
+ .mut = Type.Pointer.Mut.Const,
+ .volatility = Type.Pointer.Vol.Non,
+ });
},
}
}
@@ -634,9 +760,218 @@ pub const Builder = struct {
fn arena(self: *Builder) *Allocator {
return &self.code.arena.allocator;
}
+
+ fn buildExtra(
+ self: *Builder,
+ comptime I: type,
+ scope: *Scope,
+ span: Span,
+ params: I.Params,
+ is_generated: bool,
+ ) !*Instruction {
+ const inst = try self.arena().create(I{
+ .base = Instruction{
+ .id = Instruction.typeToId(I),
+ .is_generated = is_generated,
+ .scope = scope,
+ .debug_id = self.next_debug_id,
+ .val = switch (I.ir_val_init) {
+ IrVal.Init.Unknown => IrVal.Unknown,
+ IrVal.Init.NoReturn => IrVal{ .KnownValue = &Value.NoReturn.get(self.module).base },
+ IrVal.Init.Void => IrVal{ .KnownValue = &Value.Void.get(self.module).base },
+ },
+ .ref_count = 0,
+ .span = span,
+ .child = null,
+ .parent = null,
+ },
+ .params = params,
+ });
+
+ // Look at the params and ref() other instructions
+ comptime var i = 0;
+ inline while (i < @memberCount(I.Params)) : (i += 1) {
+ const FieldType = comptime @typeOf(@field(I.Params(undefined), @memberName(I.Params, i)));
+ switch (FieldType) {
+ *Instruction => @field(inst.params, @memberName(I.Params, i)).ref_count += 1,
+ ?*Instruction => if (@field(inst.params, @memberName(I.Params, i))) |other| other.ref_count += 1,
+ else => {},
+ }
+ }
+
+ self.next_debug_id += 1;
+ try self.current_basic_block.instruction_list.append(&inst.base);
+ return &inst.base;
+ }
+
+ fn build(
+ self: *Builder,
+ comptime I: type,
+ scope: *Scope,
+ span: Span,
+ params: I.Params,
+ ) !*Instruction {
+ return self.buildExtra(I, scope, span, params, false);
+ }
+
+ fn buildGen(
+ self: *Builder,
+ comptime I: type,
+ scope: *Scope,
+ span: Span,
+ params: I.Params,
+ ) !*Instruction {
+ return self.buildExtra(I, scope, span, params, true);
+ }
+
+ fn buildConstBool(self: *Builder, scope: *Scope, span: Span, x: bool) !*Instruction {
+ const inst = try self.build(Instruction.Const, scope, span, Instruction.Const.Params{});
+ inst.val = IrVal{ .KnownValue = &Value.Bool.get(self.module, x).base };
+ return inst;
+ }
+
+ fn buildConstVoid(self: *Builder, scope: *Scope, span: Span, is_generated: bool) !*Instruction {
+ const inst = try self.buildExtra(Instruction.Const, scope, span, Instruction.Const.Params{}, is_generated);
+ inst.val = IrVal{ .KnownValue = &Value.Void.get(self.module).base };
+ return inst;
+ }
+};
+
+const Analyze = struct {
+ irb: Builder,
+ old_bb_index: usize,
+ const_predecessor_bb: ?*BasicBlock,
+ parent_basic_block: *BasicBlock,
+ instruction_index: usize,
+ src_implicit_return_type_list: std.ArrayList(*Instruction),
+ explicit_return_type: ?*Type,
+
+ pub const Error = error{
+ /// This is only for when we have already reported a compile error. It is the poison value.
+ SemanticAnalysisFailed,
+
+ /// This is a placeholder - it is useful to use instead of panicking but once the compiler is
+ /// done this error code will be removed.
+ Unimplemented,
+
+ OutOfMemory,
+ };
+
+ pub fn init(module: *Module, parsed_file: *ParsedFile, explicit_return_type: ?*Type) !Analyze {
+ var irb = try Builder.init(module, parsed_file);
+ errdefer irb.abort();
+
+ return Analyze{
+ .irb = irb,
+ .old_bb_index = 0,
+ .const_predecessor_bb = null,
+ .parent_basic_block = undefined, // initialized with startBasicBlock
+ .instruction_index = undefined, // initialized with startBasicBlock
+ .src_implicit_return_type_list = std.ArrayList(*Instruction).init(irb.arena()),
+ .explicit_return_type = explicit_return_type,
+ };
+ }
+
+ pub fn abort(self: *Analyze) void {
+ self.irb.abort();
+ }
+
+ pub fn getNewBasicBlock(self: *Analyze, old_bb: *BasicBlock, ref_old_instruction: ?*Instruction) !*BasicBlock {
+ if (old_bb.child) |child| {
+ if (ref_old_instruction == null or child.ref_instruction != ref_old_instruction)
+ return child;
+ }
+
+ const new_bb = try self.irb.createBasicBlock(old_bb.scope, old_bb.name_hint);
+ new_bb.linkToParent(old_bb);
+ new_bb.ref_instruction = ref_old_instruction;
+ return new_bb;
+ }
+
+ pub fn startBasicBlock(self: *Analyze, old_bb: *BasicBlock, const_predecessor_bb: ?*BasicBlock) void {
+ self.instruction_index = 0;
+ self.parent_basic_block = old_bb;
+ self.const_predecessor_bb = const_predecessor_bb;
+ }
+
+ pub fn finishBasicBlock(ira: *Analyze, old_code: *Code) !void {
+ try ira.irb.code.basic_block_list.append(ira.irb.current_basic_block);
+ ira.instruction_index += 1;
+
+ while (ira.instruction_index < ira.parent_basic_block.instruction_list.len) {
+ const next_instruction = ira.parent_basic_block.instruction_list.at(ira.instruction_index);
+
+ if (!next_instruction.is_generated) {
+ try ira.addCompileError(next_instruction.span, "unreachable code");
+ break;
+ }
+ ira.instruction_index += 1;
+ }
+
+ ira.old_bb_index += 1;
+
+ var need_repeat = true;
+ while (true) {
+ while (ira.old_bb_index < old_code.basic_block_list.len) {
+ const old_bb = old_code.basic_block_list.at(ira.old_bb_index);
+ const new_bb = old_bb.child orelse {
+ ira.old_bb_index += 1;
+ continue;
+ };
+ if (new_bb.instruction_list.len != 0) {
+ ira.old_bb_index += 1;
+ continue;
+ }
+ ira.irb.current_basic_block = new_bb;
+
+ ira.startBasicBlock(old_bb, null);
+ return;
+ }
+ if (!need_repeat)
+ return;
+ need_repeat = false;
+ ira.old_bb_index = 0;
+ continue;
+ }
+ }
+
+ fn addCompileError(self: *Analyze, span: Span, comptime fmt: []const u8, args: ...) !void {
+ return self.irb.module.addCompileError(self.irb.parsed_file, span, fmt, args);
+ }
+
+ fn resolvePeerTypes(self: *Analyze, expected_type: ?*Type, peers: []const *Instruction) Analyze.Error!*Type {
+ // TODO actual implementation
+ return &Type.Void.get(self.irb.module).base;
+ }
+
+ fn implicitCast(self: *Analyze, target: *Instruction, optional_dest_type: ?*Type) Analyze.Error!*Instruction {
+ const dest_type = optional_dest_type orelse return target;
+ @panic("TODO implicitCast");
+ }
+
+ fn getCompTimeValOrNullUndefOk(self: *Analyze, target: *Instruction) ?*Value {
+ @panic("TODO getCompTimeValOrNullUndefOk");
+ }
+
+ fn getCompTimeRef(
+ self: *Analyze,
+ value: *Value,
+ ptr_mut: Value.Ptr.Mut,
+ mut: Type.Pointer.Mut,
+ volatility: Type.Pointer.Vol,
+ ptr_align: u32,
+ ) Analyze.Error!*Instruction {
+ @panic("TODO getCompTimeRef");
+ }
};
-pub async fn gen(module: *Module, body_node: *ast.Node, scope: *Scope, parsed_file: *ParsedFile) !*Code {
+pub async fn gen(
+ module: *Module,
+ body_node: *ast.Node,
+ scope: *Scope,
+ end_span: Span,
+ parsed_file: *ParsedFile,
+) !*Code {
var irb = try Builder.init(module, parsed_file);
errdefer irb.abort();
@@ -646,8 +981,61 @@ pub async fn gen(module: *Module, body_node: *ast.Node, scope: *Scope, parsed_fi
const result = try irb.genNode(body_node, scope, LVal.None);
if (!result.isNoReturn()) {
- (try Instruction.Return.build(&irb, scope, result)).setGenerated();
+ _ = irb.buildGen(
+ Instruction.AddImplicitReturnType,
+ scope,
+ end_span,
+ Instruction.AddImplicitReturnType.Params{ .target = result },
+ );
+ _ = irb.buildGen(
+ Instruction.Return,
+ scope,
+ end_span,
+ Instruction.Return.Params{ .return_value = result },
+ );
}
return irb.finish();
}
+
+pub async fn analyze(module: *Module, parsed_file: *ParsedFile, old_code: *Code, expected_type: ?*Type) !*Code {
+ var ira = try Analyze.init(module, parsed_file, expected_type);
+ errdefer ira.abort();
+
+ const old_entry_bb = old_code.basic_block_list.at(0);
+
+ const new_entry_bb = try ira.getNewBasicBlock(old_entry_bb, null);
+ new_entry_bb.ref();
+
+ ira.irb.current_basic_block = new_entry_bb;
+
+ ira.startBasicBlock(old_entry_bb, null);
+
+ while (ira.old_bb_index < old_code.basic_block_list.len) {
+ const old_instruction = ira.parent_basic_block.instruction_list.at(ira.instruction_index);
+
+ if (old_instruction.ref_count == 0 and !old_instruction.hasSideEffects()) {
+ ira.instruction_index += 1;
+ continue;
+ }
+
+ const return_inst = try old_instruction.analyze(&ira);
+ // Note: if we ever modify the above to handle error.CompileError by continuing analysis,
+ // then here we want to check if ira.isCompTime() and return early if true
+
+ if (return_inst.isNoReturn()) {
+ try ira.finishBasicBlock(old_code);
+ continue;
+ }
+
+ ira.instruction_index += 1;
+ }
+
+ if (ira.src_implicit_return_type_list.len == 0) {
+ ira.irb.code.return_type = &Type.NoReturn.get(module).base;
+ return ira.irb.finish();
+ }
+
+ ira.irb.code.return_type = try ira.resolvePeerTypes(expected_type, ira.src_implicit_return_type_list.toSliceConst());
+ return ira.irb.finish();
+}
src-self-hosted/main.zig
@@ -497,7 +497,7 @@ async fn processBuildEvents(module: *Module, color: errmsg.Color) void {
},
Module.Event.Error => |err| {
std.debug.warn("build failed: {}\n", @errorName(err));
- @panic("TODO error return trace");
+ os.exit(1);
},
Module.Event.Fail => |msgs| {
for (msgs) |msg| {
src-self-hosted/module.zig
@@ -24,6 +24,7 @@ const Visib = @import("visib.zig").Visib;
const ParsedFile = @import("parsed_file.zig").ParsedFile;
const Value = @import("value.zig").Value;
const Type = Value.Type;
+const Span = errmsg.Span;
pub const Module = struct {
loop: *event.Loop,
@@ -148,13 +149,14 @@ pub const Module = struct {
Overflow,
NotSupported,
BufferTooSmall,
- Unimplemented,
+ Unimplemented, // TODO remove this one
+ SemanticAnalysisFailed, // TODO remove this one
};
pub const Event = union(enum) {
Ok,
- Fail: []*errmsg.Msg,
Error: BuildError,
+ Fail: []*errmsg.Msg,
};
pub const DarwinVersionMin = union(enum) {
@@ -413,21 +415,32 @@ pub const Module = struct {
while (true) {
// TODO directly awaiting async should guarantee memory allocation elision
// TODO also async before suspending should guarantee memory allocation elision
- (await (async self.addRootSrc() catch unreachable)) catch |err| {
- await (async self.events.put(Event{ .Error = err }) catch unreachable);
- return;
- };
+ const build_result = await (async self.addRootSrc() catch unreachable);
+
+ // this makes a handy error return trace and stack trace in debug mode
+ if (std.debug.runtime_safety) {
+ build_result catch unreachable;
+ }
+
const compile_errors = blk: {
const held = await (async self.compile_errors.acquire() catch unreachable);
defer held.release();
break :blk held.value.toOwnedSlice();
};
- if (compile_errors.len == 0) {
- await (async self.events.put(Event.Ok) catch unreachable);
- } else {
- await (async self.events.put(Event{ .Fail = compile_errors }) catch unreachable);
+ if (build_result) |_| {
+ if (compile_errors.len == 0) {
+ await (async self.events.put(Event.Ok) catch unreachable);
+ } else {
+ await (async self.events.put(Event{ .Fail = compile_errors }) catch unreachable);
+ }
+ } else |err| {
+ // if there's an error then the compile errors have dangling references
+ self.a().free(compile_errors);
+
+ await (async self.events.put(Event{ .Error = err }) catch unreachable);
}
+
// for now we stop after 1
return;
}
@@ -477,7 +490,7 @@ pub const Module = struct {
const fn_proto = @fieldParentPtr(ast.Node.FnProto, "base", decl);
const name = if (fn_proto.name_token) |name_token| tree.tokenSlice(name_token) else {
- try self.addCompileError(parsed_file, errmsg.Span{
+ try self.addCompileError(parsed_file, Span{
.first = fn_proto.fn_token,
.last = fn_proto.fn_token + 1,
}, "missing function name");
@@ -518,27 +531,23 @@ pub const Module = struct {
}
}
- fn addCompileError(self: *Module, parsed_file: *ParsedFile, span: errmsg.Span, comptime fmt: []const u8, args: ...) !void {
+ fn addCompileError(self: *Module, parsed_file: *ParsedFile, span: Span, comptime fmt: []const u8, args: ...) !void {
const text = try std.fmt.allocPrint(self.loop.allocator, fmt, args);
errdefer self.loop.allocator.free(text);
- try self.build_group.call(addCompileErrorAsync, self, parsed_file, span.first, span.last, text);
+ try self.build_group.call(addCompileErrorAsync, self, parsed_file, span, text);
}
async fn addCompileErrorAsync(
self: *Module,
parsed_file: *ParsedFile,
- first_token: ast.TokenIndex,
- last_token: ast.TokenIndex,
+ span: Span,
text: []u8,
) !void {
const msg = try self.loop.allocator.create(errmsg.Msg{
.path = parsed_file.realpath,
.text = text,
- .span = errmsg.Span{
- .first = first_token,
- .last = last_token,
- },
+ .span = span,
.tree = &parsed_file.tree,
});
errdefer self.loop.allocator.destroy(msg);
@@ -624,6 +633,7 @@ pub async fn resolveDecl(module: *Module, decl: *Decl) !void {
if (@atomicRmw(u8, &decl.resolution_in_progress, AtomicRmwOp.Xchg, 1, AtomicOrder.SeqCst) == 0) {
decl.resolution.data = await (async generateDecl(module, decl) catch unreachable);
decl.resolution.resolve();
+ return decl.resolution.data;
} else {
return (await (async decl.resolution.get() catch unreachable)).*;
}
@@ -655,12 +665,41 @@ async fn generateDeclFn(module: *Module, fn_decl: *Decl.Fn) !void {
fn_decl.value = Decl.Fn.Val{ .Ok = fn_val };
- const code = try await (async ir.gen(
+ const unanalyzed_code = (await (async ir.gen(
module,
body_node,
&fndef_scope.base,
+ Span.token(body_node.lastToken()),
+ fn_decl.base.parsed_file,
+ ) catch unreachable)) catch |err| switch (err) {
+ // This poison value should not cause the errdefers to run. It simply means
+ // that self.compile_errors is populated.
+ error.SemanticAnalysisFailed => return {},
+ else => return err,
+ };
+ defer unanalyzed_code.destroy(module.a());
+
+ if (module.verbose_ir) {
+ std.debug.warn("unanalyzed:\n");
+ unanalyzed_code.dump();
+ }
+
+ const analyzed_code = (await (async ir.analyze(
+ module,
fn_decl.base.parsed_file,
- ) catch unreachable);
- //code.dump();
- //try await (async irAnalyze(module, func) catch unreachable);
+ unanalyzed_code,
+ null,
+ ) catch unreachable)) catch |err| switch (err) {
+ // This poison value should not cause the errdefers to run. It simply means
+ // that self.compile_errors is populated.
+ error.SemanticAnalysisFailed => return {},
+ else => return err,
+ };
+ defer analyzed_code.destroy(module.a());
+
+ if (module.verbose_ir) {
+ std.debug.warn("analyzed:\n");
+ analyzed_code.dump();
+ }
+ // TODO now render to LLVM module
}
src-self-hosted/type.zig
@@ -39,6 +39,14 @@ pub const Type = struct {
}
}
+ pub fn dump(base: *const Type) void {
+ std.debug.warn("{}", @tagName(base.id));
+ }
+
+ pub fn getAbiAlignment(base: *Type, module: *Module) u32 {
+ @panic("TODO getAbiAlignment");
+ }
+
pub const Struct = struct {
base: Type,
decls: *Scope.Decls,
@@ -143,10 +151,35 @@ pub const Type = struct {
};
pub const Pointer = struct {
base: Type,
+ mut: Mut,
+ vol: Vol,
+ size: Size,
+ alignment: u32,
+
+ pub const Mut = enum {
+ Mut,
+ Const,
+ };
+ pub const Vol = enum {
+ Non,
+ Volatile,
+ };
+ pub const Size = builtin.TypeInfo.Pointer.Size;
pub fn destroy(self: *Pointer, module: *Module) void {
module.a().destroy(self);
}
+
+ pub fn get(
+ module: *Module,
+ elem_type: *Type,
+ mut: Mut,
+ vol: Vol,
+ size: Size,
+ alignment: u32,
+ ) *Pointer {
+ @panic("TODO get pointer");
+ }
};
pub const Array = struct {
base: Type,
src-self-hosted/value.zig
@@ -24,10 +24,16 @@ pub const Value = struct {
Id.Void => @fieldParentPtr(Void, "base", base).destroy(module),
Id.Bool => @fieldParentPtr(Bool, "base", base).destroy(module),
Id.NoReturn => @fieldParentPtr(NoReturn, "base", base).destroy(module),
+ Id.Ptr => @fieldParentPtr(Ptr, "base", base).destroy(module),
}
}
}
+ pub fn getRef(base: *Value) *Value {
+ base.ref();
+ return base;
+ }
+
pub fn dump(base: *const Value) void {
std.debug.warn("{}", @tagName(base.id));
}
@@ -38,6 +44,7 @@ pub const Value = struct {
Void,
Bool,
NoReturn,
+ Ptr,
};
pub const Type = @import("type.zig").Type;
@@ -122,4 +129,18 @@ pub const Value = struct {
module.a().destroy(self);
}
};
+
+ pub const Ptr = struct {
+ base: Value,
+
+ pub const Mut = enum {
+ CompTimeConst,
+ CompTimeVar,
+ RunTime,
+ };
+
+ pub fn destroy(self: *Ptr, module: *Module) void {
+ module.a().destroy(self);
+ }
+ };
};
std/event/loop.zig
@@ -382,6 +382,21 @@ pub const Loop = struct {
return async<self.allocator> S.asyncFunc(self, &handle, args);
}
+ /// Awaiting a yield lets the event loop run, starting any unstarted async operations.
+ /// Note that async operations automatically start when a function yields for any other reason,
+ /// for example, when async I/O is performed. This function is intended to be used only when
+ /// CPU bound tasks would be waiting in the event loop but never get started because no async I/O
+ /// is performed.
+ pub async fn yield(self: *Loop) void {
+ suspend |p| {
+ var my_tick_node = Loop.NextTickNode{
+ .next = undefined,
+ .data = p,
+ };
+ loop.onNextTick(&my_tick_node);
+ }
+ }
+
fn workerRun(self: *Loop) void {
start_over: while (true) {
if (@atomicRmw(u8, &self.dispatch_lock, AtomicRmwOp.Xchg, 1, AtomicOrder.SeqCst) == 0) {