Commit 93e78ee722
Changed files (15)
src-self-hosted/codegen.zig
@@ -78,6 +78,7 @@ pub async fn renderToLlvm(comp: *Compilation, fn_val: *Value.Fn, code: *ir.Code)
.dibuilder = dibuilder,
.context = context,
.lock = event.Lock.init(comp.loop),
+ .arena = &code.arena.allocator,
};
try renderToLlvmModule(&ofile, fn_val, code);
@@ -139,6 +140,7 @@ pub const ObjectFile = struct {
dibuilder: *llvm.DIBuilder,
context: llvm.ContextRef,
lock: event.Lock,
+ arena: *std.mem.Allocator,
fn gpa(self: *ObjectFile) *std.mem.Allocator {
return self.comp.gpa();
@@ -147,7 +149,7 @@ pub const ObjectFile = struct {
pub fn renderToLlvmModule(ofile: *ObjectFile, fn_val: *Value.Fn, code: *ir.Code) !void {
// TODO audit more of codegen.cpp:fn_llvm_value and port more logic
- const llvm_fn_type = try fn_val.base.typeof.getLlvmType(ofile);
+ const llvm_fn_type = try fn_val.base.typ.getLlvmType(ofile.arena, ofile.context);
const llvm_fn = llvm.AddFunction(
ofile.module,
fn_val.symbol_name.ptr(),
@@ -165,7 +167,7 @@ pub fn renderToLlvmModule(ofile: *ObjectFile, fn_val: *Value.Fn, code: *ir.Code)
// try addLLVMFnAttrInt(ofile, llvm_fn, "alignstack", align_stack);
//}
- const fn_type = fn_val.base.typeof.cast(Type.Fn).?;
+ const fn_type = fn_val.base.typ.cast(Type.Fn).?;
try addLLVMFnAttr(ofile, llvm_fn, "nounwind");
//add_uwtable_attr(g, fn_table_entry->llvm_value);
src-self-hosted/compilation.zig
@@ -194,6 +194,7 @@ pub const Compilation = struct {
bool_type: *Type.Bool,
noreturn_type: *Type.NoReturn,
comptime_int_type: *Type.ComptimeInt,
+ u8_type: *Type.Int,
void_value: *Value.Void,
true_value: *Value.Bool,
@@ -203,6 +204,7 @@ pub const Compilation = struct {
target_machine: llvm.TargetMachineRef,
target_data_ref: llvm.TargetDataRef,
target_layout_str: [*]u8,
+ target_ptr_bits: u32,
/// for allocating things which have the same lifetime as this Compilation
arena_allocator: std.heap.ArenaAllocator,
@@ -223,10 +225,14 @@ pub const Compilation = struct {
primitive_type_table: TypeTable,
int_type_table: event.Locked(IntTypeTable),
+ array_type_table: event.Locked(ArrayTypeTable),
+ ptr_type_table: event.Locked(PtrTypeTable),
c_int_types: [CInt.list.len]*Type.Int,
const IntTypeTable = std.HashMap(*const Type.Int.Key, *Type.Int, Type.Int.Key.hash, Type.Int.Key.eql);
+ const ArrayTypeTable = std.HashMap(*const Type.Array.Key, *Type.Array, Type.Array.Key.hash, Type.Array.Key.eql);
+ const PtrTypeTable = std.HashMap(*const Type.Pointer.Key, *Type.Pointer, Type.Pointer.Key.hash, Type.Pointer.Key.eql);
const TypeTable = std.HashMap([]const u8, *Type, mem.hash_slice_u8, mem.eql_slice_u8);
const CompileErrList = std.ArrayList(*errmsg.Msg);
@@ -383,6 +389,8 @@ pub const Compilation = struct {
.deinit_group = event.Group(void).init(loop),
.compile_errors = event.Locked(CompileErrList).init(loop, CompileErrList.init(loop.allocator)),
.int_type_table = event.Locked(IntTypeTable).init(loop, IntTypeTable.init(loop.allocator)),
+ .array_type_table = event.Locked(ArrayTypeTable).init(loop, ArrayTypeTable.init(loop.allocator)),
+ .ptr_type_table = event.Locked(PtrTypeTable).init(loop, PtrTypeTable.init(loop.allocator)),
.c_int_types = undefined,
.meta_type = undefined,
@@ -394,10 +402,12 @@ pub const Compilation = struct {
.noreturn_type = undefined,
.noreturn_value = undefined,
.comptime_int_type = undefined,
+ .u8_type = undefined,
.target_machine = undefined,
.target_data_ref = undefined,
.target_layout_str = undefined,
+ .target_ptr_bits = target.getArchPtrBitWidth(),
.root_package = undefined,
.std_package = undefined,
@@ -409,6 +419,8 @@ pub const Compilation = struct {
});
errdefer {
comp.int_type_table.private_data.deinit();
+ comp.array_type_table.private_data.deinit();
+ comp.ptr_type_table.private_data.deinit();
comp.arena_allocator.deinit();
comp.loop.allocator.destroy(comp);
}
@@ -517,15 +529,16 @@ pub const Compilation = struct {
.name = "type",
.base = Value{
.id = Value.Id.Type,
- .typeof = undefined,
+ .typ = undefined,
.ref_count = std.atomic.Int(usize).init(3), // 3 because it references itself twice
},
.id = builtin.TypeId.Type,
+ .abi_alignment = Type.AbiAlignment.init(comp.loop),
},
.value = undefined,
});
comp.meta_type.value = &comp.meta_type.base;
- comp.meta_type.base.base.typeof = &comp.meta_type.base;
+ comp.meta_type.base.base.typ = &comp.meta_type.base;
assert((try comp.primitive_type_table.put(comp.meta_type.base.name, &comp.meta_type.base)) == null);
comp.void_type = try comp.arena().create(Type.Void{
@@ -533,10 +546,11 @@ pub const Compilation = struct {
.name = "void",
.base = Value{
.id = Value.Id.Type,
- .typeof = &Type.MetaType.get(comp).base,
+ .typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.id = builtin.TypeId.Void,
+ .abi_alignment = Type.AbiAlignment.init(comp.loop),
},
});
assert((try comp.primitive_type_table.put(comp.void_type.base.name, &comp.void_type.base)) == null);
@@ -546,10 +560,11 @@ pub const Compilation = struct {
.name = "noreturn",
.base = Value{
.id = Value.Id.Type,
- .typeof = &Type.MetaType.get(comp).base,
+ .typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.id = builtin.TypeId.NoReturn,
+ .abi_alignment = Type.AbiAlignment.init(comp.loop),
},
});
assert((try comp.primitive_type_table.put(comp.noreturn_type.base.name, &comp.noreturn_type.base)) == null);
@@ -559,10 +574,11 @@ pub const Compilation = struct {
.name = "comptime_int",
.base = Value{
.id = Value.Id.Type,
- .typeof = &Type.MetaType.get(comp).base,
+ .typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.id = builtin.TypeId.ComptimeInt,
+ .abi_alignment = Type.AbiAlignment.init(comp.loop),
},
});
assert((try comp.primitive_type_table.put(comp.comptime_int_type.base.name, &comp.comptime_int_type.base)) == null);
@@ -572,10 +588,11 @@ pub const Compilation = struct {
.name = "bool",
.base = Value{
.id = Value.Id.Type,
- .typeof = &Type.MetaType.get(comp).base,
+ .typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.id = builtin.TypeId.Bool,
+ .abi_alignment = Type.AbiAlignment.init(comp.loop),
},
});
assert((try comp.primitive_type_table.put(comp.bool_type.base.name, &comp.bool_type.base)) == null);
@@ -583,7 +600,7 @@ pub const Compilation = struct {
comp.void_value = try comp.arena().create(Value.Void{
.base = Value{
.id = Value.Id.Void,
- .typeof = &Type.Void.get(comp).base,
+ .typ = &Type.Void.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
});
@@ -591,7 +608,7 @@ pub const Compilation = struct {
comp.true_value = try comp.arena().create(Value.Bool{
.base = Value{
.id = Value.Id.Bool,
- .typeof = &Type.Bool.get(comp).base,
+ .typ = &Type.Bool.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.x = true,
@@ -600,7 +617,7 @@ pub const Compilation = struct {
comp.false_value = try comp.arena().create(Value.Bool{
.base = Value{
.id = Value.Id.Bool,
- .typeof = &Type.Bool.get(comp).base,
+ .typ = &Type.Bool.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.x = false,
@@ -609,7 +626,7 @@ pub const Compilation = struct {
comp.noreturn_value = try comp.arena().create(Value.NoReturn{
.base = Value{
.id = Value.Id.NoReturn,
- .typeof = &Type.NoReturn.get(comp).base,
+ .typ = &Type.NoReturn.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
});
@@ -620,10 +637,11 @@ pub const Compilation = struct {
.name = cint.zig_name,
.base = Value{
.id = Value.Id.Type,
- .typeof = &Type.MetaType.get(comp).base,
+ .typ = &Type.MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.id = builtin.TypeId.Int,
+ .abi_alignment = Type.AbiAlignment.init(comp.loop),
},
.key = Type.Int.Key{
.is_signed = cint.is_signed,
@@ -634,6 +652,24 @@ pub const Compilation = struct {
comp.c_int_types[i] = c_int_type;
assert((try comp.primitive_type_table.put(cint.zig_name, &c_int_type.base)) == null);
}
+ comp.u8_type = try comp.arena().create(Type.Int{
+ .base = Type{
+ .name = "u8",
+ .base = Value{
+ .id = Value.Id.Type,
+ .typ = &Type.MetaType.get(comp).base,
+ .ref_count = std.atomic.Int(usize).init(1),
+ },
+ .id = builtin.TypeId.Int,
+ .abi_alignment = Type.AbiAlignment.init(comp.loop),
+ },
+ .key = Type.Int.Key{
+ .is_signed = false,
+ .bit_count = 8,
+ },
+ .garbage_node = undefined,
+ });
+ assert((try comp.primitive_type_table.put(comp.u8_type.base.name, &comp.u8_type.base)) == null);
}
/// This function can safely use async/await, because it manages Compilation's lifetime,
@@ -750,7 +786,7 @@ pub const Compilation = struct {
ast.Node.Id.Comptime => {
const comptime_node = @fieldParentPtr(ast.Node.Comptime, "base", decl);
- try decl_group.call(addCompTimeBlock, self, &decls.base, comptime_node);
+ try self.prelink_group.call(addCompTimeBlock, self, &decls.base, comptime_node);
},
ast.Node.Id.VarDecl => @panic("TODO"),
ast.Node.Id.FnProto => {
@@ -770,7 +806,6 @@ pub const Compilation = struct {
.name = name,
.visib = parseVisibToken(tree, fn_proto.visib_token),
.resolution = event.Future(BuildError!void).init(self.loop),
- .resolution_in_progress = 0,
.parent_scope = &decls.base,
},
.value = Decl.Fn.Val{ .Unresolved = {} },
@@ -778,16 +813,22 @@ pub const Compilation = struct {
});
errdefer self.gpa().destroy(fn_decl);
- try decl_group.call(addTopLevelDecl, self, &fn_decl.base);
+ try decl_group.call(addTopLevelDecl, self, decls, &fn_decl.base);
},
ast.Node.Id.TestDecl => @panic("TODO"),
else => unreachable,
}
}
try await (async decl_group.wait() catch unreachable);
+
+ // Now other code can rely on the decls scope having a complete list of names.
+ decls.name_future.resolve();
}
- try await (async self.prelink_group.wait() catch unreachable);
+ (await (async self.prelink_group.wait() catch unreachable)) catch |err| switch (err) {
+ error.SemanticAnalysisFailed => {},
+ else => return err,
+ };
const any_prelink_errors = blk: {
const compile_errors = await (async self.compile_errors.acquire() catch unreachable);
@@ -857,14 +898,31 @@ pub const Compilation = struct {
analyzed_code.destroy(comp.gpa());
}
- async fn addTopLevelDecl(self: *Compilation, decl: *Decl) !void {
+ async fn addTopLevelDecl(self: *Compilation, decls: *Scope.Decls, decl: *Decl) !void {
const tree = &decl.findRootScope().tree;
const is_export = decl.isExported(tree);
+ var add_to_table_resolved = false;
+ const add_to_table = async self.addDeclToTable(decls, decl) catch unreachable;
+ errdefer if (!add_to_table_resolved) cancel add_to_table; // TODO https://github.com/ziglang/zig/issues/1261
+
if (is_export) {
try self.prelink_group.call(verifyUniqueSymbol, self, decl);
try self.prelink_group.call(resolveDecl, self, decl);
}
+
+ add_to_table_resolved = true;
+ try await add_to_table;
+ }
+
+ async fn addDeclToTable(self: *Compilation, decls: *Scope.Decls, decl: *Decl) !void {
+ const held = await (async decls.table.acquire() catch unreachable);
+ defer held.release();
+
+ if (try held.value.put(decl.name, decl)) |other_decl| {
+ try self.addCompileError(decls.base.findRoot(), decl.getSpan(), "redefinition of '{}'", decl.name);
+ // TODO note: other definition here
+ }
}
fn addCompileError(self: *Compilation, root: *Scope.Root, span: Span, comptime fmt: []const u8, args: ...) !void {
@@ -1043,6 +1101,15 @@ pub const Compilation = struct {
return result_val.cast(Type).?;
}
+
+ /// This declaration has been blessed as going into the final code generation.
+ pub async fn resolveDecl(comp: *Compilation, decl: *Decl) !void {
+ if (await (async decl.resolution.start() catch unreachable)) |ptr| return ptr.*;
+
+ decl.resolution.data = try await (async generateDecl(comp, decl) catch unreachable);
+ decl.resolution.resolve();
+ return decl.resolution.data;
+ }
};
fn printError(comptime format: []const u8, args: ...) !void {
@@ -1062,20 +1129,6 @@ fn parseVisibToken(tree: *ast.Tree, optional_token_index: ?ast.TokenIndex) Visib
}
}
-/// This declaration has been blessed as going into the final code generation.
-pub async fn resolveDecl(comp: *Compilation, decl: *Decl) !void {
- if (await (async decl.resolution.start() catch unreachable)) |ptr| return ptr.*;
-
- decl.resolution.data = (await (async generateDecl(comp, decl) catch unreachable)) catch |err| switch (err) {
- // This poison value should not cause the errdefers to run. It simply means
- // that comp.compile_errors is populated.
- error.SemanticAnalysisFailed => {},
- else => err,
- };
- decl.resolution.resolve();
- return decl.resolution.data;
-}
-
/// The function that actually does the generation.
async fn generateDecl(comp: *Compilation, decl: *Decl) !void {
switch (decl.id) {
@@ -1089,34 +1142,27 @@ async fn generateDecl(comp: *Compilation, decl: *Decl) !void {
}
async fn generateDeclFn(comp: *Compilation, fn_decl: *Decl.Fn) !void {
- const body_node = fn_decl.fn_proto.body_node orelse @panic("TODO extern fn proto decl");
+ const body_node = fn_decl.fn_proto.body_node orelse return await (async generateDeclFnProto(comp, fn_decl) catch unreachable);
const fndef_scope = try Scope.FnDef.create(comp, fn_decl.base.parent_scope);
defer fndef_scope.base.deref(comp);
- const return_type_node = switch (fn_decl.fn_proto.return_type) {
- ast.Node.FnProto.ReturnType.Explicit => |n| n,
- ast.Node.FnProto.ReturnType.InferErrorSet => |n| n,
- };
- const return_type = try await (async comp.analyzeTypeExpr(&fndef_scope.base, return_type_node) catch unreachable);
- return_type.base.deref(comp);
-
- const is_var_args = false;
- const params = ([*]Type.Fn.Param)(undefined)[0..0];
- const fn_type = try Type.Fn.create(comp, return_type, params, is_var_args);
+ const fn_type = try await (async analyzeFnType(comp, fn_decl.base.parent_scope, fn_decl.fn_proto) catch unreachable);
defer fn_type.base.base.deref(comp);
var symbol_name = try std.Buffer.init(comp.gpa(), fn_decl.base.name);
- errdefer symbol_name.deinit();
+ var symbol_name_consumed = false;
+ errdefer if (!symbol_name_consumed) symbol_name.deinit();
// The Decl.Fn owns the initial 1 reference count
const fn_val = try Value.Fn.create(comp, fn_type, fndef_scope, symbol_name);
- fn_decl.value = Decl.Fn.Val{ .Ok = fn_val };
+ fn_decl.value = Decl.Fn.Val{ .Fn = fn_val };
+ symbol_name_consumed = true;
const analyzed_code = try await (async comp.genAndAnalyzeCode(
&fndef_scope.base,
body_node,
- return_type,
+ fn_type.return_type,
) catch unreachable);
errdefer analyzed_code.destroy(comp.gpa());
@@ -1141,3 +1187,54 @@ async fn addFnToLinkSet(comp: *Compilation, fn_val: *Value.Fn) void {
fn getZigDir(allocator: *mem.Allocator) ![]u8 {
return os.getAppDataDir(allocator, "zig");
}
+
+async fn analyzeFnType(comp: *Compilation, scope: *Scope, fn_proto: *ast.Node.FnProto) !*Type.Fn {
+ const return_type_node = switch (fn_proto.return_type) {
+ ast.Node.FnProto.ReturnType.Explicit => |n| n,
+ ast.Node.FnProto.ReturnType.InferErrorSet => |n| n,
+ };
+ const return_type = try await (async comp.analyzeTypeExpr(scope, return_type_node) catch unreachable);
+ return_type.base.deref(comp);
+
+ var params = ArrayList(Type.Fn.Param).init(comp.gpa());
+ var params_consumed = false;
+ defer if (params_consumed) {
+ for (params.toSliceConst()) |param| {
+ param.typ.base.deref(comp);
+ }
+ params.deinit();
+ };
+
+ const is_var_args = false;
+ {
+ var it = fn_proto.params.iterator(0);
+ while (it.next()) |param_node_ptr| {
+ const param_node = param_node_ptr.*.cast(ast.Node.ParamDecl).?;
+ const param_type = try await (async comp.analyzeTypeExpr(scope, param_node.type_node) catch unreachable);
+ errdefer param_type.base.deref(comp);
+ try params.append(Type.Fn.Param{
+ .typ = param_type,
+ .is_noalias = param_node.noalias_token != null,
+ });
+ }
+ }
+ const fn_type = try Type.Fn.create(comp, return_type, params.toOwnedSlice(), is_var_args);
+ params_consumed = true;
+ errdefer fn_type.base.base.deref(comp);
+
+ return fn_type;
+}
+
+async fn generateDeclFnProto(comp: *Compilation, fn_decl: *Decl.Fn) !void {
+ const fn_type = try await (async analyzeFnType(comp, fn_decl.base.parent_scope, fn_decl.fn_proto) catch unreachable);
+ defer fn_type.base.base.deref(comp);
+
+ var symbol_name = try std.Buffer.init(comp.gpa(), fn_decl.base.name);
+ var symbol_name_consumed = false;
+ defer if (!symbol_name_consumed) symbol_name.deinit();
+
+ // The Decl.Fn owns the initial 1 reference count
+ const fn_proto_val = try Value.FnProto.create(comp, fn_type, symbol_name);
+ fn_decl.value = Decl.Fn.Val{ .FnProto = fn_proto_val };
+ symbol_name_consumed = true;
+}
src-self-hosted/decl.zig
@@ -15,7 +15,6 @@ pub const Decl = struct {
name: []const u8,
visib: Visib,
resolution: event.Future(Compilation.BuildError!void),
- resolution_in_progress: u8,
parent_scope: *Scope,
pub const Table = std.HashMap([]const u8, *Decl, mem.hash_slice_u8, mem.eql_slice_u8);
@@ -63,12 +62,13 @@ pub const Decl = struct {
pub const Fn = struct {
base: Decl,
value: Val,
- fn_proto: *const ast.Node.FnProto,
+ fn_proto: *ast.Node.FnProto,
// TODO https://github.com/ziglang/zig/issues/683 and then make this anonymous
- pub const Val = union {
+ pub const Val = union(enum) {
Unresolved: void,
- Ok: *Value.Fn,
+ Fn: *Value.Fn,
+ FnProto: *Value.FnProto,
};
pub fn externLibName(self: Fn, tree: *ast.Tree) ?[]const u8 {
src-self-hosted/errmsg.zig
@@ -16,11 +16,18 @@ pub const Span = struct {
last: ast.TokenIndex,
pub fn token(i: TokenIndex) Span {
- return Span {
+ return Span{
.first = i,
.last = i,
};
}
+
+ pub fn node(n: *ast.Node) Span {
+ return Span{
+ .first = n.firstToken(),
+ .last = n.lastToken(),
+ };
+ }
};
pub const Msg = struct {
src-self-hosted/ir.zig
@@ -11,6 +11,7 @@ const Token = std.zig.Token;
const Span = @import("errmsg.zig").Span;
const llvm = @import("llvm.zig");
const ObjectFile = @import("codegen.zig").ObjectFile;
+const Decl = @import("decl.zig").Decl;
pub const LVal = enum {
None,
@@ -30,10 +31,10 @@ pub const IrVal = union(enum) {
pub fn dump(self: IrVal) void {
switch (self) {
- IrVal.Unknown => typeof.dump(),
- IrVal.KnownType => |typeof| {
+ IrVal.Unknown => std.debug.warn("Unknown"),
+ IrVal.KnownType => |typ| {
std.debug.warn("KnownType(");
- typeof.dump();
+ typ.dump();
std.debug.warn(")");
},
IrVal.KnownValue => |value| {
@@ -108,21 +109,29 @@ pub const Inst = struct {
unreachable;
}
- pub fn analyze(base: *Inst, ira: *Analyze) Analyze.Error!*Inst {
- comptime var i = 0;
- inline while (i < @memberCount(Id)) : (i += 1) {
- if (base.id == @field(Id, @memberName(Id, i))) {
- const T = @field(Inst, @memberName(Id, i));
- return @fieldParentPtr(T, "base", base).analyze(ira);
- }
+ pub async fn analyze(base: *Inst, ira: *Analyze) Analyze.Error!*Inst {
+ switch (base.id) {
+ Id.Return => return @fieldParentPtr(Return, "base", base).analyze(ira),
+ Id.Const => return @fieldParentPtr(Const, "base", base).analyze(ira),
+ Id.Call => return @fieldParentPtr(Call, "base", base).analyze(ira),
+ Id.DeclRef => return await (async @fieldParentPtr(DeclRef, "base", base).analyze(ira) catch unreachable),
+ Id.Ref => return await (async @fieldParentPtr(Ref, "base", base).analyze(ira) catch unreachable),
+ Id.DeclVar => return @fieldParentPtr(DeclVar, "base", base).analyze(ira),
+ Id.CheckVoidStmt => return @fieldParentPtr(CheckVoidStmt, "base", base).analyze(ira),
+ Id.Phi => return @fieldParentPtr(Phi, "base", base).analyze(ira),
+ Id.Br => return @fieldParentPtr(Br, "base", base).analyze(ira),
+ Id.AddImplicitReturnType => return @fieldParentPtr(AddImplicitReturnType, "base", base).analyze(ira),
+ Id.PtrType => return await (async @fieldParentPtr(PtrType, "base", base).analyze(ira) catch unreachable),
}
- unreachable;
}
pub fn render(base: *Inst, ofile: *ObjectFile, fn_val: *Value.Fn) (error{OutOfMemory}!?llvm.ValueRef) {
switch (base.id) {
Id.Return => return @fieldParentPtr(Return, "base", base).render(ofile, fn_val),
Id.Const => return @fieldParentPtr(Const, "base", base).render(ofile, fn_val),
+ Id.Call => return @fieldParentPtr(Call, "base", base).render(ofile, fn_val),
+ Id.DeclRef => unreachable,
+ Id.PtrType => unreachable,
Id.Ref => @panic("TODO"),
Id.DeclVar => @panic("TODO"),
Id.CheckVoidStmt => @panic("TODO"),
@@ -135,7 +144,7 @@ pub const Inst = struct {
fn ref(base: *Inst, builder: *Builder) void {
base.ref_count += 1;
if (base.owner_bb != builder.current_basic_block and !base.isCompTime()) {
- base.owner_bb.ref();
+ base.owner_bb.ref(builder);
}
}
@@ -155,11 +164,51 @@ pub const Inst = struct {
}
}
+ fn getConstVal(self: *Inst, ira: *Analyze) !*Value {
+ if (self.isCompTime()) {
+ return self.val.KnownValue;
+ } else {
+ try ira.addCompileError(self.span, "unable to evaluate constant expression");
+ return error.SemanticAnalysisFailed;
+ }
+ }
+
+ fn getAsConstType(param: *Inst, ira: *Analyze) !*Type {
+ const meta_type = Type.MetaType.get(ira.irb.comp);
+ meta_type.base.base.deref(ira.irb.comp);
+
+ const inst = try param.getAsParam();
+ const casted = try ira.implicitCast(inst, &meta_type.base);
+ const val = try casted.getConstVal(ira);
+ return val.cast(Value.Type).?;
+ }
+
+ fn getAsConstAlign(param: *Inst, ira: *Analyze) !u32 {
+ return error.Unimplemented;
+ //const align_type = Type.Int.get_align(ira.irb.comp);
+ //align_type.base.base.deref(ira.irb.comp);
+
+ //const inst = try param.getAsParam();
+ //const casted = try ira.implicitCast(inst, align_type);
+ //const val = try casted.getConstVal(ira);
+
+ //uint32_t align_bytes = bigint_as_unsigned(&const_val->data.x_bigint);
+ //if (align_bytes == 0) {
+ // ir_add_error(ira, value, buf_sprintf("alignment must be >= 1"));
+ // return false;
+ //}
+
+ //if (!is_power_of_2(align_bytes)) {
+ // ir_add_error(ira, value, buf_sprintf("alignment value %" PRIu32 " is not a power of 2", align_bytes));
+ // return false;
+ //}
+ }
+
/// asserts that the type is known
fn getKnownType(self: *Inst) *Type {
switch (self.val) {
- IrVal.KnownType => |typeof| return typeof,
- IrVal.KnownValue => |value| return value.typeof,
+ IrVal.KnownType => |typ| return typ,
+ IrVal.KnownValue => |value| return value.typ,
IrVal.Unknown => unreachable,
}
}
@@ -171,8 +220,8 @@ pub const Inst = struct {
pub fn isNoReturn(base: *const Inst) bool {
switch (base.val) {
IrVal.Unknown => return false,
- IrVal.KnownValue => |x| return x.typeof.id == Type.Id.NoReturn,
- IrVal.KnownType => |typeof| return typeof.id == Type.Id.NoReturn,
+ IrVal.KnownValue => |x| return x.typ.id == Type.Id.NoReturn,
+ IrVal.KnownType => |typ| return typ.id == Type.Id.NoReturn,
}
}
@@ -196,6 +245,85 @@ pub const Inst = struct {
Phi,
Br,
AddImplicitReturnType,
+ Call,
+ DeclRef,
+ PtrType,
+ };
+
+ pub const Call = struct {
+ base: Inst,
+ params: Params,
+
+ const Params = struct {
+ fn_ref: *Inst,
+ args: []*Inst,
+ };
+
+ const ir_val_init = IrVal.Init.Unknown;
+
+ pub fn dump(self: *const Call) void {
+ std.debug.warn("#{}(", self.params.fn_ref.debug_id);
+ for (self.params.args) |arg| {
+ std.debug.warn("#{},", arg.debug_id);
+ }
+ std.debug.warn(")");
+ }
+
+ pub fn hasSideEffects(self: *const Call) bool {
+ return true;
+ }
+
+ pub fn analyze(self: *const Call, ira: *Analyze) !*Inst {
+ const fn_ref = try self.params.fn_ref.getAsParam();
+ const fn_ref_type = fn_ref.getKnownType();
+ const fn_type = fn_ref_type.cast(Type.Fn) orelse {
+ try ira.addCompileError(fn_ref.span, "type '{}' not a function", fn_ref_type.name);
+ return error.SemanticAnalysisFailed;
+ };
+
+ if (fn_type.params.len != self.params.args.len) {
+ try ira.addCompileError(
+ self.base.span,
+ "expected {} arguments, found {}",
+ fn_type.params.len,
+ self.params.args.len,
+ );
+ return error.SemanticAnalysisFailed;
+ }
+
+ const args = try ira.irb.arena().alloc(*Inst, self.params.args.len);
+ for (self.params.args) |arg, i| {
+ args[i] = try arg.getAsParam();
+ }
+ const new_inst = try ira.irb.build(Call, self.base.scope, self.base.span, Params{
+ .fn_ref = fn_ref,
+ .args = args,
+ });
+ new_inst.val = IrVal{ .KnownType = fn_type.return_type };
+ return new_inst;
+ }
+
+ pub fn render(self: *Call, ofile: *ObjectFile, fn_val: *Value.Fn) !?llvm.ValueRef {
+ const fn_ref = self.params.fn_ref.llvm_value.?;
+
+ const args = try ofile.arena.alloc(llvm.ValueRef, self.params.args.len);
+ for (self.params.args) |arg, i| {
+ args[i] = arg.llvm_value.?;
+ }
+
+ const llvm_cc = llvm.CCallConv;
+ const fn_inline = llvm.FnInline.Auto;
+
+ return llvm.BuildCall(
+ ofile.builder,
+ fn_ref,
+ args.ptr,
+ @intCast(c_uint, args.len),
+ llvm_cc,
+ fn_inline,
+ c"",
+ ) orelse error.OutOfMemory;
+ }
};
pub const Const = struct {
@@ -254,14 +382,14 @@ pub const Inst = struct {
return ira.irb.build(Return, self.base.scope, self.base.span, Params{ .return_value = casted_value });
}
- pub fn render(self: *Return, ofile: *ObjectFile, fn_val: *Value.Fn) ?llvm.ValueRef {
+ pub fn render(self: *Return, ofile: *ObjectFile, fn_val: *Value.Fn) !?llvm.ValueRef {
const value = self.params.return_value.llvm_value;
const return_type = self.params.return_value.getKnownType();
if (return_type.handleIsPtr()) {
@panic("TODO");
} else {
- _ = llvm.BuildRet(ofile.builder, value);
+ _ = llvm.BuildRet(ofile.builder, value) orelse return error.OutOfMemory;
}
return null;
}
@@ -285,7 +413,7 @@ pub const Inst = struct {
return false;
}
- pub fn analyze(self: *const Ref, ira: *Analyze) !*Inst {
+ pub async fn analyze(self: *const Ref, ira: *Analyze) !*Inst {
const target = try self.params.target.getAsParam();
if (ira.getCompTimeValOrNullUndefOk(target)) |val| {
@@ -294,7 +422,6 @@ pub const Inst = struct {
Value.Ptr.Mut.CompTimeConst,
self.params.mut,
self.params.volatility,
- val.typeof.getAbiAlignment(ira.irb.comp),
);
}
@@ -304,14 +431,13 @@ pub const Inst = struct {
.volatility = self.params.volatility,
});
const elem_type = target.getKnownType();
- const ptr_type = Type.Pointer.get(
- ira.irb.comp,
- elem_type,
- self.params.mut,
- self.params.volatility,
- Type.Pointer.Size.One,
- elem_type.getAbiAlignment(ira.irb.comp),
- );
+ const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
+ .child_type = elem_type,
+ .mut = self.params.mut,
+ .vol = self.params.volatility,
+ .size = Type.Pointer.Size.One,
+ .alignment = Type.Pointer.Align.Abi,
+ }) catch unreachable);
// TODO: potentially set the hint that this is a stack pointer. But it might not be - this
// could be a ref of a global, for example
new_inst.val = IrVal{ .KnownType = &ptr_type.base };
@@ -320,6 +446,97 @@ pub const Inst = struct {
}
};
+ pub const DeclRef = struct {
+ base: Inst,
+ params: Params,
+
+ const Params = struct {
+ decl: *Decl,
+ lval: LVal,
+ };
+
+ const ir_val_init = IrVal.Init.Unknown;
+
+ pub fn dump(inst: *const DeclRef) void {}
+
+ pub fn hasSideEffects(inst: *const DeclRef) bool {
+ return false;
+ }
+
+ pub async fn analyze(self: *const DeclRef, ira: *Analyze) !*Inst {
+ (await (async ira.irb.comp.resolveDecl(self.params.decl) catch unreachable)) catch |err| switch (err) {
+ error.OutOfMemory => return error.OutOfMemory,
+ else => return error.SemanticAnalysisFailed,
+ };
+ switch (self.params.decl.id) {
+ Decl.Id.CompTime => unreachable,
+ Decl.Id.Var => return error.Unimplemented,
+ Decl.Id.Fn => {
+ const fn_decl = @fieldParentPtr(Decl.Fn, "base", self.params.decl);
+ const decl_val = switch (fn_decl.value) {
+ Decl.Fn.Val.Unresolved => unreachable,
+ Decl.Fn.Val.Fn => |fn_val| &fn_val.base,
+ Decl.Fn.Val.FnProto => |fn_proto| &fn_proto.base,
+ };
+ switch (self.params.lval) {
+ LVal.None => {
+ return ira.irb.buildConstValue(self.base.scope, self.base.span, decl_val);
+ },
+ LVal.Ptr => return error.Unimplemented,
+ }
+ },
+ }
+ }
+ };
+
+ pub const PtrType = struct {
+ base: Inst,
+ params: Params,
+
+ const Params = struct {
+ child_type: *Inst,
+ mut: Type.Pointer.Mut,
+ vol: Type.Pointer.Vol,
+ size: Type.Pointer.Size,
+ alignment: ?*Inst,
+ };
+
+ const ir_val_init = IrVal.Init.Unknown;
+
+ pub fn dump(inst: *const PtrType) void {}
+
+ pub fn hasSideEffects(inst: *const PtrType) bool {
+ return false;
+ }
+
+ pub async fn analyze(self: *const PtrType, ira: *Analyze) !*Inst {
+ const child_type = try self.params.child_type.getAsConstType(ira);
+ // if (child_type->id == TypeTableEntryIdUnreachable) {
+ // ir_add_error(ira, &instruction->base, buf_sprintf("pointer to noreturn not allowed"));
+ // return ira->codegen->builtin_types.entry_invalid;
+ // } else if (child_type->id == TypeTableEntryIdOpaque && instruction->ptr_len == PtrLenUnknown) {
+ // ir_add_error(ira, &instruction->base, buf_sprintf("unknown-length pointer to opaque"));
+ // return ira->codegen->builtin_types.entry_invalid;
+ // }
+ const alignment = if (self.params.alignment) |align_inst| blk: {
+ const amt = try align_inst.getAsConstAlign(ira);
+ break :blk Type.Pointer.Align{ .Override = amt };
+ } else blk: {
+ break :blk Type.Pointer.Align{ .Abi = {} };
+ };
+ const ptr_type = try await (async Type.Pointer.get(ira.irb.comp, Type.Pointer.Key{
+ .child_type = child_type,
+ .mut = self.params.mut,
+ .vol = self.params.vol,
+ .size = self.params.size,
+ .alignment = alignment,
+ }) catch unreachable);
+ ptr_type.base.base.deref(ira.irb.comp);
+
+ return ira.irb.buildConstValue(self.base.scope, self.base.span, &ptr_type.base.base);
+ }
+ };
+
pub const DeclVar = struct {
base: Inst,
params: Params,
@@ -351,14 +568,21 @@ pub const Inst = struct {
const ir_val_init = IrVal.Init.Unknown;
- pub fn dump(inst: *const CheckVoidStmt) void {}
+ pub fn dump(self: *const CheckVoidStmt) void {
+ std.debug.warn("#{}", self.params.target.debug_id);
+ }
pub fn hasSideEffects(inst: *const CheckVoidStmt) bool {
return true;
}
pub fn analyze(self: *const CheckVoidStmt, ira: *Analyze) !*Inst {
- return error.Unimplemented; // TODO
+ const target = try self.params.target.getAsParam();
+ if (target.getKnownType().id != Type.Id.Void) {
+ try ira.addCompileError(self.base.span, "expression value is ignored");
+ return error.SemanticAnalysisFailed;
+ }
+ return ira.irb.buildConstVoid(self.base.scope, self.base.span, true);
}
};
@@ -583,7 +807,7 @@ pub const BasicBlock = struct {
/// the basic block that this one derives from in analysis
parent: ?*BasicBlock,
- pub fn ref(self: *BasicBlock) void {
+ pub fn ref(self: *BasicBlock, builder: *Builder) void {
self.ref_count += 1;
}
@@ -724,8 +948,42 @@ pub const Builder = struct {
ast.Node.Id.VarDecl => return error.Unimplemented,
ast.Node.Id.Defer => return error.Unimplemented,
ast.Node.Id.InfixOp => return error.Unimplemented,
- ast.Node.Id.PrefixOp => return error.Unimplemented,
- ast.Node.Id.SuffixOp => return error.Unimplemented,
+ ast.Node.Id.PrefixOp => {
+ const prefix_op = @fieldParentPtr(ast.Node.PrefixOp, "base", node);
+ switch (prefix_op.op) {
+ ast.Node.PrefixOp.Op.AddressOf => return error.Unimplemented,
+ ast.Node.PrefixOp.Op.ArrayType => |n| return error.Unimplemented,
+ ast.Node.PrefixOp.Op.Await => return error.Unimplemented,
+ ast.Node.PrefixOp.Op.BitNot => return error.Unimplemented,
+ ast.Node.PrefixOp.Op.BoolNot => return error.Unimplemented,
+ ast.Node.PrefixOp.Op.Cancel => return error.Unimplemented,
+ ast.Node.PrefixOp.Op.OptionalType => return error.Unimplemented,
+ ast.Node.PrefixOp.Op.Negation => return error.Unimplemented,
+ ast.Node.PrefixOp.Op.NegationWrap => return error.Unimplemented,
+ ast.Node.PrefixOp.Op.Resume => return error.Unimplemented,
+ ast.Node.PrefixOp.Op.PtrType => |ptr_info| {
+ const inst = try await (async irb.genPtrType(prefix_op, ptr_info, scope) catch unreachable);
+ return irb.lvalWrap(scope, inst, lval);
+ },
+ ast.Node.PrefixOp.Op.SliceType => |ptr_info| return error.Unimplemented,
+ ast.Node.PrefixOp.Op.Try => return error.Unimplemented,
+ }
+ },
+ ast.Node.Id.SuffixOp => {
+ const suffix_op = @fieldParentPtr(ast.Node.SuffixOp, "base", node);
+ switch (suffix_op.op) {
+ @TagType(ast.Node.SuffixOp.Op).Call => |*call| {
+ const inst = try await (async irb.genCall(suffix_op, call, scope) catch unreachable);
+ return irb.lvalWrap(scope, inst, lval);
+ },
+ @TagType(ast.Node.SuffixOp.Op).ArrayAccess => |n| return error.Unimplemented,
+ @TagType(ast.Node.SuffixOp.Op).Slice => |slice| return error.Unimplemented,
+ @TagType(ast.Node.SuffixOp.Op).ArrayInitializer => |init_list| return error.Unimplemented,
+ @TagType(ast.Node.SuffixOp.Op).StructInitializer => |init_list| return error.Unimplemented,
+ @TagType(ast.Node.SuffixOp.Op).Deref => return error.Unimplemented,
+ @TagType(ast.Node.SuffixOp.Op).UnwrapOptional => return error.Unimplemented,
+ }
+ },
ast.Node.Id.Switch => return error.Unimplemented,
ast.Node.Id.While => return error.Unimplemented,
ast.Node.Id.For => return error.Unimplemented,
@@ -744,7 +1002,11 @@ pub const Builder = struct {
return irb.lvalWrap(scope, try irb.genIntLit(int_lit, scope), lval);
},
ast.Node.Id.FloatLiteral => return error.Unimplemented,
- ast.Node.Id.StringLiteral => return error.Unimplemented,
+ ast.Node.Id.StringLiteral => {
+ const str_lit = @fieldParentPtr(ast.Node.StringLiteral, "base", node);
+ const inst = try await (async irb.genStrLit(str_lit, scope) catch unreachable);
+ return irb.lvalWrap(scope, inst, lval);
+ },
ast.Node.Id.MultilineStringLiteral => return error.Unimplemented,
ast.Node.Id.CharLiteral => return error.Unimplemented,
ast.Node.Id.BoolLiteral => return error.Unimplemented,
@@ -789,6 +1051,99 @@ pub const Builder = struct {
}
}
+ async fn genCall(irb: *Builder, suffix_op: *ast.Node.SuffixOp, call: *ast.Node.SuffixOp.Op.Call, scope: *Scope) !*Inst {
+ const fn_ref = try await (async irb.genNode(suffix_op.lhs, scope, LVal.None) catch unreachable);
+
+ const args = try irb.arena().alloc(*Inst, call.params.len);
+ var it = call.params.iterator(0);
+ var i: usize = 0;
+ while (it.next()) |arg_node_ptr| : (i += 1) {
+ args[i] = try await (async irb.genNode(arg_node_ptr.*, scope, LVal.None) catch unreachable);
+ }
+
+ //bool is_async = node->data.fn_call_expr.is_async;
+ //IrInstruction *async_allocator = nullptr;
+ //if (is_async) {
+ // if (node->data.fn_call_expr.async_allocator) {
+ // async_allocator = ir_gen_node(irb, node->data.fn_call_expr.async_allocator, scope);
+ // if (async_allocator == irb->codegen->invalid_instruction)
+ // return async_allocator;
+ // }
+ //}
+
+ return irb.build(Inst.Call, scope, Span.token(suffix_op.rtoken), Inst.Call.Params{
+ .fn_ref = fn_ref,
+ .args = args,
+ });
+ //IrInstruction *fn_call = ir_build_call(irb, scope, node, nullptr, fn_ref, arg_count, args, false, FnInlineAuto, is_async, async_allocator, nullptr);
+ //return ir_lval_wrap(irb, scope, fn_call, lval);
+ }
+
+ async fn genPtrType(
+ irb: *Builder,
+ prefix_op: *ast.Node.PrefixOp,
+ ptr_info: ast.Node.PrefixOp.PtrInfo,
+ scope: *Scope,
+ ) !*Inst {
+ // TODO port more logic
+
+ //assert(node->type == NodeTypePointerType);
+ //PtrLen ptr_len = (node->data.pointer_type.star_token->id == TokenIdStar ||
+ // node->data.pointer_type.star_token->id == TokenIdStarStar) ? PtrLenSingle : PtrLenUnknown;
+ //bool is_const = node->data.pointer_type.is_const;
+ //bool is_volatile = node->data.pointer_type.is_volatile;
+ //AstNode *expr_node = node->data.pointer_type.op_expr;
+ //AstNode *align_expr = node->data.pointer_type.align_expr;
+
+ //IrInstruction *align_value;
+ //if (align_expr != nullptr) {
+ // align_value = ir_gen_node(irb, align_expr, scope);
+ // if (align_value == irb->codegen->invalid_instruction)
+ // return align_value;
+ //} else {
+ // align_value = nullptr;
+ //}
+ const child_type = try await (async irb.genNode(prefix_op.rhs, scope, LVal.None) catch unreachable);
+
+ //uint32_t bit_offset_start = 0;
+ //if (node->data.pointer_type.bit_offset_start != nullptr) {
+ // if (!bigint_fits_in_bits(node->data.pointer_type.bit_offset_start, 32, false)) {
+ // Buf *val_buf = buf_alloc();
+ // bigint_append_buf(val_buf, node->data.pointer_type.bit_offset_start, 10);
+ // exec_add_error_node(irb->codegen, irb->exec, node,
+ // buf_sprintf("value %s too large for u32 bit offset", buf_ptr(val_buf)));
+ // return irb->codegen->invalid_instruction;
+ // }
+ // bit_offset_start = bigint_as_unsigned(node->data.pointer_type.bit_offset_start);
+ //}
+
+ //uint32_t bit_offset_end = 0;
+ //if (node->data.pointer_type.bit_offset_end != nullptr) {
+ // if (!bigint_fits_in_bits(node->data.pointer_type.bit_offset_end, 32, false)) {
+ // Buf *val_buf = buf_alloc();
+ // bigint_append_buf(val_buf, node->data.pointer_type.bit_offset_end, 10);
+ // exec_add_error_node(irb->codegen, irb->exec, node,
+ // buf_sprintf("value %s too large for u32 bit offset", buf_ptr(val_buf)));
+ // return irb->codegen->invalid_instruction;
+ // }
+ // bit_offset_end = bigint_as_unsigned(node->data.pointer_type.bit_offset_end);
+ //}
+
+ //if ((bit_offset_start != 0 || bit_offset_end != 0) && bit_offset_start >= bit_offset_end) {
+ // exec_add_error_node(irb->codegen, irb->exec, node,
+ // buf_sprintf("bit offset start must be less than bit offset end"));
+ // return irb->codegen->invalid_instruction;
+ //}
+
+ return irb.build(Inst.PtrType, scope, Span.node(&prefix_op.base), Inst.PtrType.Params{
+ .child_type = child_type,
+ .mut = Type.Pointer.Mut.Mut,
+ .vol = Type.Pointer.Vol.Non,
+ .size = Type.Pointer.Size.Many,
+ .alignment = null,
+ });
+ }
+
fn isCompTime(irb: *Builder, target_scope: *Scope) bool {
if (irb.is_comptime)
return true;
@@ -847,6 +1202,56 @@ pub const Builder = struct {
return inst;
}
+ pub async fn genStrLit(irb: *Builder, str_lit: *ast.Node.StringLiteral, scope: *Scope) !*Inst {
+ const str_token = irb.root_scope.tree.tokenSlice(str_lit.token);
+ const src_span = Span.token(str_lit.token);
+
+ var bad_index: usize = undefined;
+ var buf = std.zig.parseStringLiteral(irb.comp.gpa(), str_token, &bad_index) catch |err| switch (err) {
+ error.OutOfMemory => return error.OutOfMemory,
+ error.InvalidCharacter => {
+ try irb.comp.addCompileError(
+ irb.root_scope,
+ src_span,
+ "invalid character in string literal: '{c}'",
+ str_token[bad_index],
+ );
+ return error.SemanticAnalysisFailed;
+ },
+ };
+ var buf_cleaned = false;
+ errdefer if (!buf_cleaned) irb.comp.gpa().free(buf);
+
+ if (str_token[0] == 'c') {
+ // first we add a null
+ buf = try irb.comp.gpa().realloc(u8, buf, buf.len + 1);
+ buf[buf.len - 1] = 0;
+
+ // next make an array value
+ const array_val = try await (async Value.Array.createOwnedBuffer(irb.comp, buf) catch unreachable);
+ buf_cleaned = true;
+ defer array_val.base.deref(irb.comp);
+
+ // then make a pointer value pointing at the first element
+ const ptr_val = try await (async Value.Ptr.createArrayElemPtr(
+ irb.comp,
+ array_val,
+ Type.Pointer.Mut.Const,
+ Type.Pointer.Size.Many,
+ 0,
+ ) catch unreachable);
+ defer ptr_val.base.deref(irb.comp);
+
+ return irb.buildConstValue(scope, src_span, &ptr_val.base);
+ } else {
+ const array_val = try await (async Value.Array.createOwnedBuffer(irb.comp, buf) catch unreachable);
+ buf_cleaned = true;
+ defer array_val.base.deref(irb.comp);
+
+ return irb.buildConstValue(scope, src_span, &array_val.base);
+ }
+ }
+
pub async fn genBlock(irb: *Builder, block: *ast.Node.Block, parent_scope: *Scope) !*Inst {
const block_scope = try Scope.Block.create(irb.comp, parent_scope);
@@ -911,7 +1316,10 @@ pub const Builder = struct {
_ = irb.build(
Inst.CheckVoidStmt,
child_scope,
- statement_value.span,
+ Span{
+ .first = statement_node.firstToken(),
+ .last = statement_node.lastToken(),
+ },
Inst.CheckVoidStmt.Params{ .target = statement_value },
);
}
@@ -1068,6 +1476,8 @@ pub const Builder = struct {
if (result) |primitive_type| {
defer primitive_type.base.deref(irb.comp);
switch (lval) {
+ // if (lval == LValPtr) {
+ // return ir_build_ref(irb, scope, node, value, false, false);
LVal.Ptr => return error.Unimplemented,
LVal.None => return irb.buildConstValue(scope, src_span, &primitive_type.base),
}
@@ -1079,15 +1489,6 @@ pub const Builder = struct {
},
error.OutOfMemory => return error.OutOfMemory,
}
- //TypeTableEntry *primitive_type = get_primitive_type(irb->codegen, variable_name);
- //if (primitive_type != nullptr) {
- // IrInstruction *value = ir_build_const_type(irb, scope, node, primitive_type);
- // if (lval == LValPtr) {
- // return ir_build_ref(irb, scope, node, value, false, false);
- // } else {
- // return value;
- // }
- //}
//VariableTableEntry *var = find_variable(irb->codegen, scope, variable_name);
//if (var) {
@@ -1098,9 +1499,12 @@ pub const Builder = struct {
// return ir_build_load_ptr(irb, scope, node, var_ptr);
//}
- //Tld *tld = find_decl(irb->codegen, scope, variable_name);
- //if (tld)
- // return ir_build_decl_ref(irb, scope, node, tld, lval);
+ if (await (async irb.findDecl(scope, name) catch unreachable)) |decl| {
+ return irb.build(Inst.DeclRef, scope, src_span, Inst.DeclRef.Params{
+ .decl = decl,
+ .lval = lval,
+ });
+ }
//if (node->owner->any_imports_failed) {
// // skip the error message since we had a failing import in this file
@@ -1251,8 +1655,26 @@ pub const Builder = struct {
const FieldType = comptime @typeOf(@field(I.Params(undefined), @memberName(I.Params, i)));
switch (FieldType) {
*Inst => @field(inst.params, @memberName(I.Params, i)).ref(self),
+ *BasicBlock => @field(inst.params, @memberName(I.Params, i)).ref(self),
?*Inst => if (@field(inst.params, @memberName(I.Params, i))) |other| other.ref(self),
- else => {},
+ []*Inst => {
+ // TODO https://github.com/ziglang/zig/issues/1269
+ for (@field(inst.params, @memberName(I.Params, i))) |other|
+ other.ref(self);
+ },
+ []*BasicBlock => {
+ // TODO https://github.com/ziglang/zig/issues/1269
+ for (@field(inst.params, @memberName(I.Params, i))) |other|
+ other.ref(self);
+ },
+ Type.Pointer.Mut,
+ Type.Pointer.Vol,
+ Type.Pointer.Size,
+ LVal,
+ *Decl,
+ => {},
+ // it's ok to add more types here, just make sure any instructions are ref'd appropriately
+ else => @compileError("unrecognized type in Params: " ++ @typeName(FieldType)),
}
}
@@ -1348,6 +1770,24 @@ pub const Builder = struct {
// is_comptime);
//// the above blocks are rendered by ir_gen after the rest of codegen
}
+
+ async fn findDecl(irb: *Builder, scope: *Scope, name: []const u8) ?*Decl {
+ var s = scope;
+ while (true) {
+ switch (s.id) {
+ Scope.Id.Decls => {
+ const decls = @fieldParentPtr(Scope.Decls, "base", s);
+ const table = await (async decls.getTableReadOnly() catch unreachable);
+ if (table.get(name)) |entry| {
+ return entry.value;
+ }
+ },
+ Scope.Id.Root => return null,
+ else => {},
+ }
+ s = s.parent.?;
+ }
+ }
};
const Analyze = struct {
@@ -1930,7 +2370,6 @@ const Analyze = struct {
ptr_mut: Value.Ptr.Mut,
mut: Type.Pointer.Mut,
volatility: Type.Pointer.Vol,
- ptr_align: u32,
) Analyze.Error!*Inst {
return error.Unimplemented;
}
@@ -1945,7 +2384,7 @@ pub async fn gen(
errdefer irb.abort();
const entry_block = try irb.createBasicBlock(scope, c"Entry");
- entry_block.ref(); // Entry block gets a reference because we enter it to begin.
+ entry_block.ref(&irb); // Entry block gets a reference because we enter it to begin.
try irb.setCursorAtEndAndAppendBlock(entry_block);
const result = try await (async irb.genNode(body_node, scope, LVal.None) catch unreachable);
@@ -1965,7 +2404,7 @@ pub async fn analyze(comp: *Compilation, old_code: *Code, expected_type: ?*Type)
errdefer ira.abort();
const new_entry_bb = try ira.getNewBasicBlock(old_entry_bb, null);
- new_entry_bb.ref();
+ new_entry_bb.ref(&ira.irb);
ira.irb.current_basic_block = new_entry_bb;
@@ -1979,7 +2418,8 @@ pub async fn analyze(comp: *Compilation, old_code: *Code, expected_type: ?*Type)
continue;
}
- const return_inst = try old_instruction.analyze(&ira);
+ const return_inst = try await (async old_instruction.analyze(&ira) catch unreachable);
+ assert(return_inst.val != IrVal.Unknown); // at least the type should be known at this point
return_inst.linkToParent(old_instruction);
// Note: if we ever modify the above to handle error.CompileError by continuing analysis,
// then here we want to check if ira.isCompTime() and return early if true
src-self-hosted/llvm.zig
@@ -23,12 +23,17 @@ pub const TargetMachineRef = removeNullability(c.LLVMTargetMachineRef);
pub const TargetDataRef = removeNullability(c.LLVMTargetDataRef);
pub const DIBuilder = c.ZigLLVMDIBuilder;
+pub const ABIAlignmentOfType = c.LLVMABIAlignmentOfType;
pub const AddAttributeAtIndex = c.LLVMAddAttributeAtIndex;
pub const AddFunction = c.LLVMAddFunction;
+pub const AddGlobal = c.LLVMAddGlobal;
pub const AddModuleCodeViewFlag = c.ZigLLVMAddModuleCodeViewFlag;
pub const AddModuleDebugInfoFlag = c.ZigLLVMAddModuleDebugInfoFlag;
+pub const ArrayType = c.LLVMArrayType;
pub const ClearCurrentDebugLocation = c.ZigLLVMClearCurrentDebugLocation;
pub const ConstAllOnes = c.LLVMConstAllOnes;
+pub const ConstArray = c.LLVMConstArray;
+pub const ConstBitCast = c.LLVMConstBitCast;
pub const ConstInt = c.LLVMConstInt;
pub const ConstIntOfArbitraryPrecision = c.LLVMConstIntOfArbitraryPrecision;
pub const ConstNeg = c.LLVMConstNeg;
@@ -59,6 +64,7 @@ pub const GetEnumAttributeKindForName = c.LLVMGetEnumAttributeKindForName;
pub const GetHostCPUName = c.ZigLLVMGetHostCPUName;
pub const GetMDKindIDInContext = c.LLVMGetMDKindIDInContext;
pub const GetNativeFeatures = c.ZigLLVMGetNativeFeatures;
+pub const GetUndef = c.LLVMGetUndef;
pub const HalfTypeInContext = c.LLVMHalfTypeInContext;
pub const InitializeAllAsmParsers = c.LLVMInitializeAllAsmParsers;
pub const InitializeAllAsmPrinters = c.LLVMInitializeAllAsmPrinters;
@@ -81,14 +87,24 @@ pub const MDStringInContext = c.LLVMMDStringInContext;
pub const MetadataTypeInContext = c.LLVMMetadataTypeInContext;
pub const ModuleCreateWithNameInContext = c.LLVMModuleCreateWithNameInContext;
pub const PPCFP128TypeInContext = c.LLVMPPCFP128TypeInContext;
+pub const PointerType = c.LLVMPointerType;
+pub const SetAlignment = c.LLVMSetAlignment;
pub const SetDataLayout = c.LLVMSetDataLayout;
+pub const SetGlobalConstant = c.LLVMSetGlobalConstant;
+pub const SetInitializer = c.LLVMSetInitializer;
+pub const SetLinkage = c.LLVMSetLinkage;
pub const SetTarget = c.LLVMSetTarget;
+pub const SetUnnamedAddr = c.LLVMSetUnnamedAddr;
pub const StructTypeInContext = c.LLVMStructTypeInContext;
pub const TokenTypeInContext = c.LLVMTokenTypeInContext;
+pub const TypeOf = c.LLVMTypeOf;
pub const VoidTypeInContext = c.LLVMVoidTypeInContext;
pub const X86FP80TypeInContext = c.LLVMX86FP80TypeInContext;
pub const X86MMXTypeInContext = c.LLVMX86MMXTypeInContext;
+pub const ConstInBoundsGEP = LLVMConstInBoundsGEP;
+pub extern fn LLVMConstInBoundsGEP(ConstantVal: ValueRef, ConstantIndices: [*]ValueRef, NumIndices: c_uint) ?ValueRef;
+
pub const GetTargetFromTriple = LLVMGetTargetFromTriple;
extern fn LLVMGetTargetFromTriple(Triple: [*]const u8, T: *TargetRef, ErrorMessage: ?*[*]u8) Bool;
@@ -145,13 +161,28 @@ pub const EmitBinary = EmitOutputType.ZigLLVM_EmitBinary;
pub const EmitLLVMIr = EmitOutputType.ZigLLVM_EmitLLVMIr;
pub const EmitOutputType = c.ZigLLVM_EmitOutputType;
+pub const CCallConv = c.LLVMCCallConv;
+pub const FastCallConv = c.LLVMFastCallConv;
+pub const ColdCallConv = c.LLVMColdCallConv;
+pub const WebKitJSCallConv = c.LLVMWebKitJSCallConv;
+pub const AnyRegCallConv = c.LLVMAnyRegCallConv;
+pub const X86StdcallCallConv = c.LLVMX86StdcallCallConv;
+pub const X86FastcallCallConv = c.LLVMX86FastcallCallConv;
+pub const CallConv = c.LLVMCallConv;
+
+pub const FnInline = extern enum {
+ Auto,
+ Always,
+ Never,
+};
+
fn removeNullability(comptime T: type) type {
comptime assert(@typeId(T) == builtin.TypeId.Optional);
return T.Child;
}
pub const BuildRet = LLVMBuildRet;
-extern fn LLVMBuildRet(arg0: BuilderRef, V: ?ValueRef) ValueRef;
+extern fn LLVMBuildRet(arg0: BuilderRef, V: ?ValueRef) ?ValueRef;
pub const TargetMachineEmitToFile = ZigLLVMTargetMachineEmitToFile;
extern fn ZigLLVMTargetMachineEmitToFile(
@@ -163,3 +194,8 @@ extern fn ZigLLVMTargetMachineEmitToFile(
is_debug: bool,
is_small: bool,
) bool;
+
+pub const BuildCall = ZigLLVMBuildCall;
+extern fn ZigLLVMBuildCall(B: BuilderRef, Fn: ValueRef, Args: [*]ValueRef, NumArgs: c_uint, CC: c_uint, fn_inline: FnInline, Name: [*]const u8) ?ValueRef;
+
+pub const PrivateLinkage = c.LLVMLinkage.LLVMPrivateLinkage;
src-self-hosted/scope.zig
@@ -9,6 +9,7 @@ const Value = @import("value.zig").Value;
const ir = @import("ir.zig");
const Span = @import("errmsg.zig").Span;
const assert = std.debug.assert;
+const event = std.event;
pub const Scope = struct {
id: Id,
@@ -123,7 +124,15 @@ pub const Scope = struct {
pub const Decls = struct {
base: Scope,
- table: Decl.Table,
+
+ /// The lock must be respected for writing. However once name_future resolves,
+ /// readers can freely access it.
+ table: event.Locked(Decl.Table),
+
+ /// Once this future is resolved, the table is complete and available for unlocked
+ /// read-only access. It does not mean all the decls are resolved; it means only that
+ /// the table has all the names. Each decl in the table has its own resolution state.
+ name_future: event.Future(void),
/// Creates a Decls scope with 1 reference
pub fn create(comp: *Compilation, parent: *Scope) !*Decls {
@@ -133,15 +142,10 @@ pub const Scope = struct {
.parent = parent,
.ref_count = 1,
},
- .table = undefined,
+ .table = event.Locked(Decl.Table).init(comp.loop, Decl.Table.init(comp.gpa())),
+ .name_future = event.Future(void).init(comp.loop),
});
- errdefer comp.gpa().destroy(self);
-
- self.table = Decl.Table.init(comp.gpa());
- errdefer self.table.deinit();
-
parent.ref();
-
return self;
}
@@ -149,6 +153,11 @@ pub const Scope = struct {
self.table.deinit();
comp.gpa().destroy(self);
}
+
+ pub async fn getTableReadOnly(self: *Decls) *Decl.Table {
+ _ = await (async self.name_future.get() catch unreachable);
+ return &self.table.private_data;
+ }
};
pub const Block = struct {
src-self-hosted/test.zig
@@ -14,6 +14,7 @@ test "compile errors" {
defer ctx.deinit();
try @import("../test/stage2/compile_errors.zig").addCases(&ctx);
+ //try @import("../test/stage2/compare_output.zig").addCases(&ctx);
try ctx.run();
}
src-self-hosted/type.zig
@@ -4,12 +4,17 @@ const Scope = @import("scope.zig").Scope;
const Compilation = @import("compilation.zig").Compilation;
const Value = @import("value.zig").Value;
const llvm = @import("llvm.zig");
-const ObjectFile = @import("codegen.zig").ObjectFile;
+const event = std.event;
+const Allocator = std.mem.Allocator;
+const assert = std.debug.assert;
pub const Type = struct {
base: Value,
id: Id,
name: []const u8,
+ abi_alignment: AbiAlignment,
+
+ pub const AbiAlignment = event.Future(error{OutOfMemory}!u32);
pub const Id = builtin.TypeId;
@@ -43,33 +48,37 @@ pub const Type = struct {
}
}
- pub fn getLlvmType(base: *Type, ofile: *ObjectFile) (error{OutOfMemory}!llvm.TypeRef) {
+ pub fn getLlvmType(
+ base: *Type,
+ allocator: *Allocator,
+ llvm_context: llvm.ContextRef,
+ ) (error{OutOfMemory}!llvm.TypeRef) {
switch (base.id) {
- Id.Struct => return @fieldParentPtr(Struct, "base", base).getLlvmType(ofile),
- Id.Fn => return @fieldParentPtr(Fn, "base", base).getLlvmType(ofile),
+ Id.Struct => return @fieldParentPtr(Struct, "base", base).getLlvmType(allocator, llvm_context),
+ Id.Fn => return @fieldParentPtr(Fn, "base", base).getLlvmType(allocator, llvm_context),
Id.Type => unreachable,
Id.Void => unreachable,
- Id.Bool => return @fieldParentPtr(Bool, "base", base).getLlvmType(ofile),
+ Id.Bool => return @fieldParentPtr(Bool, "base", base).getLlvmType(allocator, llvm_context),
Id.NoReturn => unreachable,
- Id.Int => return @fieldParentPtr(Int, "base", base).getLlvmType(ofile),
- Id.Float => return @fieldParentPtr(Float, "base", base).getLlvmType(ofile),
- Id.Pointer => return @fieldParentPtr(Pointer, "base", base).getLlvmType(ofile),
- Id.Array => return @fieldParentPtr(Array, "base", base).getLlvmType(ofile),
+ Id.Int => return @fieldParentPtr(Int, "base", base).getLlvmType(allocator, llvm_context),
+ Id.Float => return @fieldParentPtr(Float, "base", base).getLlvmType(allocator, llvm_context),
+ Id.Pointer => return @fieldParentPtr(Pointer, "base", base).getLlvmType(allocator, llvm_context),
+ Id.Array => return @fieldParentPtr(Array, "base", base).getLlvmType(allocator, llvm_context),
Id.ComptimeFloat => unreachable,
Id.ComptimeInt => unreachable,
Id.Undefined => unreachable,
Id.Null => unreachable,
- Id.Optional => return @fieldParentPtr(Optional, "base", base).getLlvmType(ofile),
- Id.ErrorUnion => return @fieldParentPtr(ErrorUnion, "base", base).getLlvmType(ofile),
- Id.ErrorSet => return @fieldParentPtr(ErrorSet, "base", base).getLlvmType(ofile),
- Id.Enum => return @fieldParentPtr(Enum, "base", base).getLlvmType(ofile),
- Id.Union => return @fieldParentPtr(Union, "base", base).getLlvmType(ofile),
+ Id.Optional => return @fieldParentPtr(Optional, "base", base).getLlvmType(allocator, llvm_context),
+ Id.ErrorUnion => return @fieldParentPtr(ErrorUnion, "base", base).getLlvmType(allocator, llvm_context),
+ Id.ErrorSet => return @fieldParentPtr(ErrorSet, "base", base).getLlvmType(allocator, llvm_context),
+ Id.Enum => return @fieldParentPtr(Enum, "base", base).getLlvmType(allocator, llvm_context),
+ Id.Union => return @fieldParentPtr(Union, "base", base).getLlvmType(allocator, llvm_context),
Id.Namespace => unreachable,
Id.Block => unreachable,
- Id.BoundFn => return @fieldParentPtr(BoundFn, "base", base).getLlvmType(ofile),
+ Id.BoundFn => return @fieldParentPtr(BoundFn, "base", base).getLlvmType(allocator, llvm_context),
Id.ArgTuple => unreachable,
- Id.Opaque => return @fieldParentPtr(Opaque, "base", base).getLlvmType(ofile),
- Id.Promise => return @fieldParentPtr(Promise, "base", base).getLlvmType(ofile),
+ Id.Opaque => return @fieldParentPtr(Opaque, "base", base).getLlvmType(allocator, llvm_context),
+ Id.Promise => return @fieldParentPtr(Promise, "base", base).getLlvmType(allocator, llvm_context),
}
}
@@ -156,16 +165,45 @@ pub const Type = struct {
base.* = Type{
.base = Value{
.id = Value.Id.Type,
- .typeof = &MetaType.get(comp).base,
+ .typ = &MetaType.get(comp).base,
.ref_count = std.atomic.Int(usize).init(1),
},
.id = id,
.name = name,
+ .abi_alignment = AbiAlignment.init(comp.loop),
};
}
- pub fn getAbiAlignment(base: *Type, comp: *Compilation) u32 {
- @panic("TODO getAbiAlignment");
+ /// If you happen to have an llvm context handy, use getAbiAlignmentInContext instead.
+ /// Otherwise, this one will grab one from the pool and then release it.
+ pub async fn getAbiAlignment(base: *Type, comp: *Compilation) !u32 {
+ if (await (async base.abi_alignment.start() catch unreachable)) |ptr| return ptr.*;
+
+ {
+ const held = try comp.event_loop_local.getAnyLlvmContext();
+ defer held.release(comp.event_loop_local);
+
+ const llvm_context = held.node.data;
+
+ base.abi_alignment.data = await (async base.resolveAbiAlignment(comp, llvm_context) catch unreachable);
+ }
+ base.abi_alignment.resolve();
+ return base.abi_alignment.data;
+ }
+
+ /// If you have an llvm conext handy, you can use it here.
+ pub async fn getAbiAlignmentInContext(base: *Type, comp: *Compilation, llvm_context: llvm.ContextRef) !u32 {
+ if (await (async base.abi_alignment.start() catch unreachable)) |ptr| return ptr.*;
+
+ base.abi_alignment.data = await (async base.resolveAbiAlignment(comp, llvm_context) catch unreachable);
+ base.abi_alignment.resolve();
+ return base.abi_alignment.data;
+ }
+
+ /// Lower level function that does the work. See getAbiAlignment.
+ async fn resolveAbiAlignment(base: *Type, comp: *Compilation, llvm_context: llvm.ContextRef) !u32 {
+ const llvm_type = try base.getLlvmType(comp.gpa(), llvm_context);
+ return @intCast(u32, llvm.ABIAlignmentOfType(comp.target_data_ref, llvm_type));
}
pub const Struct = struct {
@@ -176,7 +214,7 @@ pub const Type = struct {
comp.gpa().destroy(self);
}
- pub fn getLlvmType(self: *Struct, ofile: *ObjectFile) llvm.TypeRef {
+ pub fn getLlvmType(self: *Struct, allocator: *Allocator, llvm_context: llvm.ContextRef) llvm.TypeRef {
@panic("TODO");
}
};
@@ -189,7 +227,7 @@ pub const Type = struct {
pub const Param = struct {
is_noalias: bool,
- typeof: *Type,
+ typ: *Type,
};
pub fn create(comp: *Compilation, return_type: *Type, params: []Param, is_var_args: bool) !*Fn {
@@ -205,7 +243,7 @@ pub const Type = struct {
result.return_type.base.ref();
for (result.params) |param| {
- param.typeof.base.ref();
+ param.typ.base.ref();
}
return result;
}
@@ -213,20 +251,20 @@ pub const Type = struct {
pub fn destroy(self: *Fn, comp: *Compilation) void {
self.return_type.base.deref(comp);
for (self.params) |param| {
- param.typeof.base.deref(comp);
+ param.typ.base.deref(comp);
}
comp.gpa().destroy(self);
}
- pub fn getLlvmType(self: *Fn, ofile: *ObjectFile) !llvm.TypeRef {
+ pub fn getLlvmType(self: *Fn, allocator: *Allocator, llvm_context: llvm.ContextRef) !llvm.TypeRef {
const llvm_return_type = switch (self.return_type.id) {
- Type.Id.Void => llvm.VoidTypeInContext(ofile.context) orelse return error.OutOfMemory,
- else => try self.return_type.getLlvmType(ofile),
+ Type.Id.Void => llvm.VoidTypeInContext(llvm_context) orelse return error.OutOfMemory,
+ else => try self.return_type.getLlvmType(allocator, llvm_context),
};
- const llvm_param_types = try ofile.gpa().alloc(llvm.TypeRef, self.params.len);
- defer ofile.gpa().free(llvm_param_types);
+ const llvm_param_types = try allocator.alloc(llvm.TypeRef, self.params.len);
+ defer allocator.free(llvm_param_types);
for (llvm_param_types) |*llvm_param_type, i| {
- llvm_param_type.* = try self.params[i].typeof.getLlvmType(ofile);
+ llvm_param_type.* = try self.params[i].typ.getLlvmType(allocator, llvm_context);
}
return llvm.FunctionType(
@@ -280,7 +318,7 @@ pub const Type = struct {
comp.gpa().destroy(self);
}
- pub fn getLlvmType(self: *Bool, ofile: *ObjectFile) llvm.TypeRef {
+ pub fn getLlvmType(self: *Bool, allocator: *Allocator, llvm_context: llvm.ContextRef) llvm.TypeRef {
@panic("TODO");
}
};
@@ -318,6 +356,11 @@ pub const Type = struct {
}
};
+ pub fn get_u8(comp: *Compilation) *Int {
+ comp.u8_type.base.base.ref();
+ return comp.u8_type;
+ }
+
pub async fn get(comp: *Compilation, key: Key) !*Int {
{
const held = await (async comp.int_type_table.acquire() catch unreachable);
@@ -371,8 +414,8 @@ pub const Type = struct {
comp.gpa().destroy(self);
}
- pub fn getLlvmType(self: *Int, ofile: *ObjectFile) !llvm.TypeRef {
- return llvm.IntTypeInContext(ofile.context, self.key.bit_count) orelse return error.OutOfMemory;
+ pub fn getLlvmType(self: *Int, allocator: *Allocator, llvm_context: llvm.ContextRef) !llvm.TypeRef {
+ return llvm.IntTypeInContext(llvm_context, self.key.bit_count) orelse return error.OutOfMemory;
}
};
@@ -383,56 +426,236 @@ pub const Type = struct {
comp.gpa().destroy(self);
}
- pub fn getLlvmType(self: *Float, ofile: *ObjectFile) llvm.TypeRef {
+ pub fn getLlvmType(self: *Float, allocator: *Allocator, llvm_context: llvm.ContextRef) llvm.TypeRef {
@panic("TODO");
}
};
pub const Pointer = struct {
base: Type,
- mut: Mut,
- vol: Vol,
- size: Size,
- alignment: u32,
+ key: Key,
+ garbage_node: std.atomic.Stack(*Pointer).Node,
+
+ pub const Key = struct {
+ child_type: *Type,
+ mut: Mut,
+ vol: Vol,
+ size: Size,
+ alignment: Align,
+
+ pub fn hash(self: *const Key) u32 {
+ const align_hash = switch (self.alignment) {
+ Align.Abi => 0xf201c090,
+ Align.Override => |x| x,
+ };
+ return hash_usize(@ptrToInt(self.child_type)) *%
+ hash_enum(self.mut) *%
+ hash_enum(self.vol) *%
+ hash_enum(self.size) *%
+ align_hash;
+ }
+
+ pub fn eql(self: *const Key, other: *const Key) bool {
+ if (self.child_type != other.child_type or
+ self.mut != other.mut or
+ self.vol != other.vol or
+ self.size != other.size or
+ @TagType(Align)(self.alignment) != @TagType(Align)(other.alignment))
+ {
+ return false;
+ }
+ switch (self.alignment) {
+ Align.Abi => return true,
+ Align.Override => |x| return x == other.alignment.Override,
+ }
+ }
+ };
pub const Mut = enum {
Mut,
Const,
};
+
pub const Vol = enum {
Non,
Volatile,
};
+
+ pub const Align = union(enum) {
+ Abi,
+ Override: u32,
+ };
+
pub const Size = builtin.TypeInfo.Pointer.Size;
pub fn destroy(self: *Pointer, comp: *Compilation) void {
+ self.garbage_node = std.atomic.Stack(*Pointer).Node{
+ .data = self,
+ .next = undefined,
+ };
+ comp.registerGarbage(Pointer, &self.garbage_node);
+ }
+
+ pub async fn gcDestroy(self: *Pointer, comp: *Compilation) void {
+ {
+ const held = await (async comp.ptr_type_table.acquire() catch unreachable);
+ defer held.release();
+
+ _ = held.value.remove(&self.key).?;
+ }
+ self.key.child_type.base.deref(comp);
comp.gpa().destroy(self);
}
- pub fn get(
+ pub async fn getAlignAsInt(self: *Pointer, comp: *Compilation) u32 {
+ switch (self.key.alignment) {
+ Align.Abi => return await (async self.key.child_type.getAbiAlignment(comp) catch unreachable),
+ Align.Override => |alignment| return alignment,
+ }
+ }
+
+ pub async fn get(
comp: *Compilation,
- elem_type: *Type,
- mut: Mut,
- vol: Vol,
- size: Size,
- alignment: u32,
- ) *Pointer {
- @panic("TODO get pointer");
+ key: Key,
+ ) !*Pointer {
+ var normal_key = key;
+ switch (key.alignment) {
+ Align.Abi => {},
+ Align.Override => |alignment| {
+ const abi_align = try await (async key.child_type.getAbiAlignment(comp) catch unreachable);
+ if (abi_align == alignment) {
+ normal_key.alignment = Align.Abi;
+ }
+ },
+ }
+ {
+ const held = await (async comp.ptr_type_table.acquire() catch unreachable);
+ defer held.release();
+
+ if (held.value.get(&normal_key)) |entry| {
+ entry.value.base.base.ref();
+ return entry.value;
+ }
+ }
+
+ const self = try comp.gpa().create(Pointer{
+ .base = undefined,
+ .key = normal_key,
+ .garbage_node = undefined,
+ });
+ errdefer comp.gpa().destroy(self);
+
+ const size_str = switch (self.key.size) {
+ Size.One => "*",
+ Size.Many => "[*]",
+ Size.Slice => "[]",
+ };
+ const mut_str = switch (self.key.mut) {
+ Mut.Const => "const ",
+ Mut.Mut => "",
+ };
+ const vol_str = switch (self.key.vol) {
+ Vol.Volatile => "volatile ",
+ Vol.Non => "",
+ };
+ const name = switch (self.key.alignment) {
+ Align.Abi => try std.fmt.allocPrint(
+ comp.gpa(),
+ "{}{}{}{}",
+ size_str,
+ mut_str,
+ vol_str,
+ self.key.child_type.name,
+ ),
+ Align.Override => |alignment| try std.fmt.allocPrint(
+ comp.gpa(),
+ "{}align<{}> {}{}{}",
+ size_str,
+ alignment,
+ mut_str,
+ vol_str,
+ self.key.child_type.name,
+ ),
+ };
+ errdefer comp.gpa().free(name);
+
+ self.base.init(comp, Id.Pointer, name);
+
+ {
+ const held = await (async comp.ptr_type_table.acquire() catch unreachable);
+ defer held.release();
+
+ _ = try held.value.put(&self.key, self);
+ }
+ return self;
}
- pub fn getLlvmType(self: *Pointer, ofile: *ObjectFile) llvm.TypeRef {
- @panic("TODO");
+ pub fn getLlvmType(self: *Pointer, allocator: *Allocator, llvm_context: llvm.ContextRef) !llvm.TypeRef {
+ const elem_llvm_type = try self.key.child_type.getLlvmType(allocator, llvm_context);
+ return llvm.PointerType(elem_llvm_type, 0) orelse return error.OutOfMemory;
}
};
pub const Array = struct {
base: Type,
+ key: Key,
+ garbage_node: std.atomic.Stack(*Array).Node,
+
+ pub const Key = struct {
+ elem_type: *Type,
+ len: usize,
+
+ pub fn hash(self: *const Key) u32 {
+ return hash_usize(@ptrToInt(self.elem_type)) *% hash_usize(self.len);
+ }
+
+ pub fn eql(self: *const Key, other: *const Key) bool {
+ return self.elem_type == other.elem_type and self.len == other.len;
+ }
+ };
pub fn destroy(self: *Array, comp: *Compilation) void {
+ self.key.elem_type.base.deref(comp);
comp.gpa().destroy(self);
}
- pub fn getLlvmType(self: *Array, ofile: *ObjectFile) llvm.TypeRef {
- @panic("TODO");
+ pub async fn get(comp: *Compilation, key: Key) !*Array {
+ key.elem_type.base.ref();
+ errdefer key.elem_type.base.deref(comp);
+
+ {
+ const held = await (async comp.array_type_table.acquire() catch unreachable);
+ defer held.release();
+
+ if (held.value.get(&key)) |entry| {
+ entry.value.base.base.ref();
+ return entry.value;
+ }
+ }
+
+ const self = try comp.gpa().create(Array{
+ .base = undefined,
+ .key = key,
+ .garbage_node = undefined,
+ });
+ errdefer comp.gpa().destroy(self);
+
+ const name = try std.fmt.allocPrint(comp.gpa(), "[{}]{}", key.len, key.elem_type.name);
+ errdefer comp.gpa().free(name);
+
+ self.base.init(comp, Id.Array, name);
+
+ {
+ const held = await (async comp.array_type_table.acquire() catch unreachable);
+ defer held.release();
+
+ _ = try held.value.put(&self.key, self);
+ }
+ return self;
+ }
+
+ pub fn getLlvmType(self: *Array, allocator: *Allocator, llvm_context: llvm.ContextRef) !llvm.TypeRef {
+ const elem_llvm_type = try self.key.elem_type.getLlvmType(allocator, llvm_context);
+ return llvm.ArrayType(elem_llvm_type, @intCast(c_uint, self.key.len)) orelse return error.OutOfMemory;
}
};
@@ -481,7 +704,7 @@ pub const Type = struct {
comp.gpa().destroy(self);
}
- pub fn getLlvmType(self: *Optional, ofile: *ObjectFile) llvm.TypeRef {
+ pub fn getLlvmType(self: *Optional, allocator: *Allocator, llvm_context: llvm.ContextRef) llvm.TypeRef {
@panic("TODO");
}
};
@@ -493,7 +716,7 @@ pub const Type = struct {
comp.gpa().destroy(self);
}
- pub fn getLlvmType(self: *ErrorUnion, ofile: *ObjectFile) llvm.TypeRef {
+ pub fn getLlvmType(self: *ErrorUnion, allocator: *Allocator, llvm_context: llvm.ContextRef) llvm.TypeRef {
@panic("TODO");
}
};
@@ -505,7 +728,7 @@ pub const Type = struct {
comp.gpa().destroy(self);
}
- pub fn getLlvmType(self: *ErrorSet, ofile: *ObjectFile) llvm.TypeRef {
+ pub fn getLlvmType(self: *ErrorSet, allocator: *Allocator, llvm_context: llvm.ContextRef) llvm.TypeRef {
@panic("TODO");
}
};
@@ -517,7 +740,7 @@ pub const Type = struct {
comp.gpa().destroy(self);
}
- pub fn getLlvmType(self: *Enum, ofile: *ObjectFile) llvm.TypeRef {
+ pub fn getLlvmType(self: *Enum, allocator: *Allocator, llvm_context: llvm.ContextRef) llvm.TypeRef {
@panic("TODO");
}
};
@@ -529,7 +752,7 @@ pub const Type = struct {
comp.gpa().destroy(self);
}
- pub fn getLlvmType(self: *Union, ofile: *ObjectFile) llvm.TypeRef {
+ pub fn getLlvmType(self: *Union, allocator: *Allocator, llvm_context: llvm.ContextRef) llvm.TypeRef {
@panic("TODO");
}
};
@@ -557,7 +780,7 @@ pub const Type = struct {
comp.gpa().destroy(self);
}
- pub fn getLlvmType(self: *BoundFn, ofile: *ObjectFile) llvm.TypeRef {
+ pub fn getLlvmType(self: *BoundFn, allocator: *Allocator, llvm_context: llvm.ContextRef) llvm.TypeRef {
@panic("TODO");
}
};
@@ -577,7 +800,7 @@ pub const Type = struct {
comp.gpa().destroy(self);
}
- pub fn getLlvmType(self: *Opaque, ofile: *ObjectFile) llvm.TypeRef {
+ pub fn getLlvmType(self: *Opaque, allocator: *Allocator, llvm_context: llvm.ContextRef) llvm.TypeRef {
@panic("TODO");
}
};
@@ -589,8 +812,33 @@ pub const Type = struct {
comp.gpa().destroy(self);
}
- pub fn getLlvmType(self: *Promise, ofile: *ObjectFile) llvm.TypeRef {
+ pub fn getLlvmType(self: *Promise, allocator: *Allocator, llvm_context: llvm.ContextRef) llvm.TypeRef {
@panic("TODO");
}
};
};
+
+fn hash_usize(x: usize) u32 {
+ return switch (@sizeOf(usize)) {
+ 4 => x,
+ 8 => @truncate(u32, x *% 0xad44ee2d8e3fc13d),
+ else => @compileError("implement this hash function"),
+ };
+}
+
+fn hash_enum(x: var) u32 {
+ const rands = []u32{
+ 0x85ebf64f,
+ 0x3fcb3211,
+ 0x240a4e8e,
+ 0x40bb0e3c,
+ 0x78be45af,
+ 0x1ca98e37,
+ 0xec56053a,
+ 0x906adc48,
+ 0xd4fe9763,
+ 0x54c80dac,
+ };
+ comptime assert(@memberCount(@typeOf(x)) < rands.len);
+ return rands[@enumToInt(x)];
+}
src-self-hosted/value.zig
@@ -11,7 +11,7 @@ const assert = std.debug.assert;
/// If there is only 1 ref then write need not copy
pub const Value = struct {
id: Id,
- typeof: *Type,
+ typ: *Type,
ref_count: std.atomic.Int(usize),
/// Thread-safe
@@ -22,23 +22,25 @@ pub const Value = struct {
/// Thread-safe
pub fn deref(base: *Value, comp: *Compilation) void {
if (base.ref_count.decr() == 1) {
- base.typeof.base.deref(comp);
+ base.typ.base.deref(comp);
switch (base.id) {
Id.Type => @fieldParentPtr(Type, "base", base).destroy(comp),
Id.Fn => @fieldParentPtr(Fn, "base", base).destroy(comp),
+ Id.FnProto => @fieldParentPtr(FnProto, "base", base).destroy(comp),
Id.Void => @fieldParentPtr(Void, "base", base).destroy(comp),
Id.Bool => @fieldParentPtr(Bool, "base", base).destroy(comp),
Id.NoReturn => @fieldParentPtr(NoReturn, "base", base).destroy(comp),
Id.Ptr => @fieldParentPtr(Ptr, "base", base).destroy(comp),
Id.Int => @fieldParentPtr(Int, "base", base).destroy(comp),
+ Id.Array => @fieldParentPtr(Array, "base", base).destroy(comp),
}
}
}
pub fn setType(base: *Value, new_type: *Type, comp: *Compilation) void {
- base.typeof.base.deref(comp);
+ base.typ.base.deref(comp);
new_type.base.ref();
- base.typeof = new_type;
+ base.typ = new_type;
}
pub fn getRef(base: *Value) *Value {
@@ -59,11 +61,13 @@ pub const Value = struct {
switch (base.id) {
Id.Type => unreachable,
Id.Fn => @panic("TODO"),
+ Id.FnProto => return @fieldParentPtr(FnProto, "base", base).getLlvmConst(ofile),
Id.Void => return null,
Id.Bool => return @fieldParentPtr(Bool, "base", base).getLlvmConst(ofile),
Id.NoReturn => unreachable,
- Id.Ptr => @panic("TODO"),
+ Id.Ptr => return @fieldParentPtr(Ptr, "base", base).getLlvmConst(ofile),
Id.Int => return @fieldParentPtr(Int, "base", base).getLlvmConst(ofile),
+ Id.Array => return @fieldParentPtr(Array, "base", base).getLlvmConst(ofile),
}
}
@@ -81,26 +85,87 @@ pub const Value = struct {
switch (base.id) {
Id.Type => unreachable,
Id.Fn => unreachable,
+ Id.FnProto => unreachable,
Id.Void => unreachable,
Id.Bool => unreachable,
Id.NoReturn => unreachable,
Id.Ptr => unreachable,
+ Id.Array => unreachable,
Id.Int => return &(try @fieldParentPtr(Int, "base", base).copy(comp)).base,
}
}
+ pub const Parent = union(enum) {
+ None,
+ BaseStruct: BaseStruct,
+ BaseArray: BaseArray,
+ BaseUnion: *Value,
+ BaseScalar: *Value,
+
+ pub const BaseStruct = struct {
+ val: *Value,
+ field_index: usize,
+ };
+
+ pub const BaseArray = struct {
+ val: *Value,
+ elem_index: usize,
+ };
+ };
+
pub const Id = enum {
Type,
Fn,
Void,
Bool,
NoReturn,
+ Array,
Ptr,
Int,
+ FnProto,
};
pub const Type = @import("type.zig").Type;
+ pub const FnProto = struct {
+ base: Value,
+
+ /// The main external name that is used in the .o file.
+ /// TODO https://github.com/ziglang/zig/issues/265
+ symbol_name: Buffer,
+
+ pub fn create(comp: *Compilation, fn_type: *Type.Fn, symbol_name: Buffer) !*FnProto {
+ const self = try comp.gpa().create(FnProto{
+ .base = Value{
+ .id = Value.Id.FnProto,
+ .typ = &fn_type.base,
+ .ref_count = std.atomic.Int(usize).init(1),
+ },
+ .symbol_name = symbol_name,
+ });
+ fn_type.base.base.ref();
+ return self;
+ }
+
+ pub fn destroy(self: *FnProto, comp: *Compilation) void {
+ self.symbol_name.deinit();
+ comp.gpa().destroy(self);
+ }
+
+ pub fn getLlvmConst(self: *FnProto, ofile: *ObjectFile) !?llvm.ValueRef {
+ const llvm_fn_type = try self.base.typ.getLlvmType(ofile.arena, ofile.context);
+ const llvm_fn = llvm.AddFunction(
+ ofile.module,
+ self.symbol_name.ptr(),
+ llvm_fn_type,
+ ) orelse return error.OutOfMemory;
+
+ // TODO port more logic from codegen.cpp:fn_llvm_value
+
+ return llvm_fn;
+ }
+ };
+
pub const Fn = struct {
base: Value,
@@ -135,7 +200,7 @@ pub const Value = struct {
const self = try comp.gpa().create(Fn{
.base = Value{
.id = Value.Id.Fn,
- .typeof = &fn_type.base,
+ .typ = &fn_type.base,
.ref_count = std.atomic.Int(usize).init(1),
},
.fndef_scope = fndef_scope,
@@ -224,6 +289,8 @@ pub const Value = struct {
pub const Ptr = struct {
base: Value,
+ special: Special,
+ mut: Mut,
pub const Mut = enum {
CompTimeConst,
@@ -231,25 +298,210 @@ pub const Value = struct {
RunTime,
};
+ pub const Special = union(enum) {
+ Scalar: *Value,
+ BaseArray: BaseArray,
+ BaseStruct: BaseStruct,
+ HardCodedAddr: u64,
+ Discard,
+ };
+
+ pub const BaseArray = struct {
+ val: *Value,
+ elem_index: usize,
+ };
+
+ pub const BaseStruct = struct {
+ val: *Value,
+ field_index: usize,
+ };
+
+ pub async fn createArrayElemPtr(
+ comp: *Compilation,
+ array_val: *Array,
+ mut: Type.Pointer.Mut,
+ size: Type.Pointer.Size,
+ elem_index: usize,
+ ) !*Ptr {
+ array_val.base.ref();
+ errdefer array_val.base.deref(comp);
+
+ const elem_type = array_val.base.typ.cast(Type.Array).?.key.elem_type;
+ const ptr_type = try await (async Type.Pointer.get(comp, Type.Pointer.Key{
+ .child_type = elem_type,
+ .mut = mut,
+ .vol = Type.Pointer.Vol.Non,
+ .size = size,
+ .alignment = Type.Pointer.Align.Abi,
+ }) catch unreachable);
+ var ptr_type_consumed = false;
+ errdefer if (!ptr_type_consumed) ptr_type.base.base.deref(comp);
+
+ const self = try comp.gpa().create(Value.Ptr{
+ .base = Value{
+ .id = Value.Id.Ptr,
+ .typ = &ptr_type.base,
+ .ref_count = std.atomic.Int(usize).init(1),
+ },
+ .special = Special{
+ .BaseArray = BaseArray{
+ .val = &array_val.base,
+ .elem_index = 0,
+ },
+ },
+ .mut = Mut.CompTimeConst,
+ });
+ ptr_type_consumed = true;
+ errdefer comp.gpa().destroy(self);
+
+ return self;
+ }
+
pub fn destroy(self: *Ptr, comp: *Compilation) void {
comp.gpa().destroy(self);
}
+
+ pub fn getLlvmConst(self: *Ptr, ofile: *ObjectFile) !?llvm.ValueRef {
+ const llvm_type = self.base.typ.getLlvmType(ofile.arena, ofile.context);
+ // TODO carefully port the logic from codegen.cpp:gen_const_val_ptr
+ switch (self.special) {
+ Special.Scalar => |scalar| @panic("TODO"),
+ Special.BaseArray => |base_array| {
+ // TODO put this in one .o file only, and after that, generate extern references to it
+ const array_llvm_value = (try base_array.val.getLlvmConst(ofile)).?;
+ const ptr_bit_count = ofile.comp.target_ptr_bits;
+ const usize_llvm_type = llvm.IntTypeInContext(ofile.context, ptr_bit_count) orelse return error.OutOfMemory;
+ const indices = []llvm.ValueRef{
+ llvm.ConstNull(usize_llvm_type) orelse return error.OutOfMemory,
+ llvm.ConstInt(usize_llvm_type, base_array.elem_index, 0) orelse return error.OutOfMemory,
+ };
+ return llvm.ConstInBoundsGEP(
+ array_llvm_value,
+ &indices,
+ @intCast(c_uint, indices.len),
+ ) orelse return error.OutOfMemory;
+ },
+ Special.BaseStruct => |base_struct| @panic("TODO"),
+ Special.HardCodedAddr => |addr| @panic("TODO"),
+ Special.Discard => unreachable,
+ }
+ }
+ };
+
+ pub const Array = struct {
+ base: Value,
+ special: Special,
+
+ pub const Special = union(enum) {
+ Undefined,
+ OwnedBuffer: []u8,
+ Explicit: Data,
+ };
+
+ pub const Data = struct {
+ parent: Parent,
+ elements: []*Value,
+ };
+
+ /// Takes ownership of buffer
+ pub async fn createOwnedBuffer(comp: *Compilation, buffer: []u8) !*Array {
+ const u8_type = Type.Int.get_u8(comp);
+ defer u8_type.base.base.deref(comp);
+
+ const array_type = try await (async Type.Array.get(comp, Type.Array.Key{
+ .elem_type = &u8_type.base,
+ .len = buffer.len,
+ }) catch unreachable);
+ errdefer array_type.base.base.deref(comp);
+
+ const self = try comp.gpa().create(Value.Array{
+ .base = Value{
+ .id = Value.Id.Array,
+ .typ = &array_type.base,
+ .ref_count = std.atomic.Int(usize).init(1),
+ },
+ .special = Special{ .OwnedBuffer = buffer },
+ });
+ errdefer comp.gpa().destroy(self);
+
+ return self;
+ }
+
+ pub fn destroy(self: *Array, comp: *Compilation) void {
+ switch (self.special) {
+ Special.Undefined => {},
+ Special.OwnedBuffer => |buf| {
+ comp.gpa().free(buf);
+ },
+ Special.Explicit => {},
+ }
+ comp.gpa().destroy(self);
+ }
+
+ pub fn getLlvmConst(self: *Array, ofile: *ObjectFile) !?llvm.ValueRef {
+ switch (self.special) {
+ Special.Undefined => {
+ const llvm_type = try self.base.typ.getLlvmType(ofile.arena, ofile.context);
+ return llvm.GetUndef(llvm_type);
+ },
+ Special.OwnedBuffer => |buf| {
+ const dont_null_terminate = 1;
+ const llvm_str_init = llvm.ConstStringInContext(
+ ofile.context,
+ buf.ptr,
+ @intCast(c_uint, buf.len),
+ dont_null_terminate,
+ ) orelse return error.OutOfMemory;
+ const str_init_type = llvm.TypeOf(llvm_str_init);
+ const global = llvm.AddGlobal(ofile.module, str_init_type, c"") orelse return error.OutOfMemory;
+ llvm.SetInitializer(global, llvm_str_init);
+ llvm.SetLinkage(global, llvm.PrivateLinkage);
+ llvm.SetGlobalConstant(global, 1);
+ llvm.SetUnnamedAddr(global, 1);
+ llvm.SetAlignment(global, llvm.ABIAlignmentOfType(ofile.comp.target_data_ref, str_init_type));
+ return global;
+ },
+ Special.Explicit => @panic("TODO"),
+ }
+
+ //{
+ // uint64_t len = type_entry->data.array.len;
+ // if (const_val->data.x_array.special == ConstArraySpecialUndef) {
+ // return LLVMGetUndef(type_entry->type_ref);
+ // }
+
+ // LLVMValueRef *values = allocate<LLVMValueRef>(len);
+ // LLVMTypeRef element_type_ref = type_entry->data.array.child_type->type_ref;
+ // bool make_unnamed_struct = false;
+ // for (uint64_t i = 0; i < len; i += 1) {
+ // ConstExprValue *elem_value = &const_val->data.x_array.s_none.elements[i];
+ // LLVMValueRef val = gen_const_val(g, elem_value, "");
+ // values[i] = val;
+ // make_unnamed_struct = make_unnamed_struct || is_llvm_value_unnamed_type(elem_value->type, val);
+ // }
+ // if (make_unnamed_struct) {
+ // return LLVMConstStruct(values, len, true);
+ // } else {
+ // return LLVMConstArray(element_type_ref, values, (unsigned)len);
+ // }
+ //}
+ }
};
pub const Int = struct {
base: Value,
big_int: std.math.big.Int,
- pub fn createFromString(comp: *Compilation, typeof: *Type, base: u8, value: []const u8) !*Int {
+ pub fn createFromString(comp: *Compilation, typ: *Type, base: u8, value: []const u8) !*Int {
const self = try comp.gpa().create(Value.Int{
.base = Value{
.id = Value.Id.Int,
- .typeof = typeof,
+ .typ = typ,
.ref_count = std.atomic.Int(usize).init(1),
},
.big_int = undefined,
});
- typeof.base.ref();
+ typ.base.ref();
errdefer comp.gpa().destroy(self);
self.big_int = try std.math.big.Int.init(comp.gpa());
@@ -261,9 +513,9 @@ pub const Value = struct {
}
pub fn getLlvmConst(self: *Int, ofile: *ObjectFile) !?llvm.ValueRef {
- switch (self.base.typeof.id) {
+ switch (self.base.typ.id) {
Type.Id.Int => {
- const type_ref = try self.base.typeof.getLlvmType(ofile);
+ const type_ref = try self.base.typ.getLlvmType(ofile.arena, ofile.context);
if (self.big_int.len == 0) {
return llvm.ConstNull(type_ref);
}
@@ -286,13 +538,13 @@ pub const Value = struct {
}
pub fn copy(old: *Int, comp: *Compilation) !*Int {
- old.base.typeof.base.ref();
- errdefer old.base.typeof.base.deref(comp);
+ old.base.typ.base.ref();
+ errdefer old.base.typ.base.deref(comp);
const new = try comp.gpa().create(Value.Int{
.base = Value{
.id = Value.Id.Int,
- .typeof = old.base.typeof,
+ .typ = old.base.typ,
.ref_count = std.atomic.Int(usize).init(1),
},
.big_int = undefined,
std/event/group.zig
@@ -76,6 +76,7 @@ pub fn Group(comptime ReturnType: type) type {
/// Wait for all the calls and promises of the group to complete.
/// Thread-safe.
+ /// Safe to call any number of times.
pub async fn wait(self: *Self) ReturnType {
// TODO catch unreachable because the allocation can be grouped with
// the coro frame allocation
std/zig/index.zig
@@ -2,6 +2,7 @@ const tokenizer = @import("tokenizer.zig");
pub const Token = tokenizer.Token;
pub const Tokenizer = tokenizer.Tokenizer;
pub const parse = @import("parse.zig").parse;
+pub const parseStringLiteral = @import("parse_string_literal.zig").parseStringLiteral;
pub const render = @import("render.zig").render;
pub const ast = @import("ast.zig");
@@ -10,4 +11,6 @@ test "std.zig tests" {
_ = @import("parse.zig");
_ = @import("render.zig");
_ = @import("tokenizer.zig");
+ _ = @import("parse_string_literal.zig");
}
+
std/zig/parse_string_literal.zig
@@ -0,0 +1,76 @@
+const std = @import("../index.zig");
+const assert = std.debug.assert;
+
+const State = enum {
+ Start,
+ Backslash,
+};
+
+pub const ParseStringLiteralError = error{
+ OutOfMemory,
+
+ /// When this is returned, index will be the position of the character.
+ InvalidCharacter,
+};
+
+/// caller owns returned memory
+pub fn parseStringLiteral(
+ allocator: *std.mem.Allocator,
+ bytes: []const u8,
+ bad_index: *usize, // populated if error.InvalidCharacter is returned
+) ParseStringLiteralError![]u8 {
+ const first_index = if (bytes[0] == 'c') usize(2) else usize(1);
+ assert(bytes[bytes.len - 1] == '"');
+
+ var list = std.ArrayList(u8).init(allocator);
+ errdefer list.deinit();
+
+ const slice = bytes[first_index..];
+ try list.ensureCapacity(slice.len - 1);
+
+ var state = State.Start;
+ for (slice) |b, index| {
+ switch (state) {
+ State.Start => switch (b) {
+ '\\' => state = State.Backslash,
+ '\n' => {
+ bad_index.* = index;
+ return error.InvalidCharacter;
+ },
+ '"' => return list.toOwnedSlice(),
+ else => try list.append(b),
+ },
+ State.Backslash => switch (b) {
+ 'x' => @panic("TODO"),
+ 'u' => @panic("TODO"),
+ 'U' => @panic("TODO"),
+ 'n' => {
+ try list.append('\n');
+ state = State.Start;
+ },
+ 'r' => {
+ try list.append('\r');
+ state = State.Start;
+ },
+ '\\' => {
+ try list.append('\\');
+ state = State.Start;
+ },
+ 't' => {
+ try list.append('\t');
+ state = State.Start;
+ },
+ '"' => {
+ try list.append('"');
+ state = State.Start;
+ },
+ else => {
+ bad_index.* = index;
+ return error.InvalidCharacter;
+ },
+ },
+ else => unreachable,
+ }
+ }
+ unreachable;
+}
std/zig/tokenizer.zig
@@ -73,6 +73,7 @@ pub const Token = struct {
return null;
}
+ /// TODO remove this enum
const StrLitKind = enum {
Normal,
C,
CMakeLists.txt
@@ -624,6 +624,7 @@ set(ZIG_STD_FILES
"zig/ast.zig"
"zig/index.zig"
"zig/parse.zig"
+ "zig/parse_string_literal.zig"
"zig/render.zig"
"zig/tokenizer.zig"
)