Commit 757d13d784
Changed files (3)
src-self-hosted
src-self-hosted/ir/text.zig
@@ -31,6 +31,7 @@ pub const Inst = struct {
primitive,
fntype,
intcast,
+ bitcast,
};
pub fn TagToType(tag: Tag) type {
@@ -48,6 +49,7 @@ pub const Inst = struct {
.primitive => Primitive,
.fntype => FnType,
.intcast => IntCast,
+ .bitcast => BitCast,
};
}
@@ -258,6 +260,17 @@ pub const Inst = struct {
},
kw_args: struct {},
};
+
+ pub const BitCast = struct {
+ pub const base_tag = Tag.bitcast;
+ base: Inst,
+
+ positionals: struct {
+ dest_type: *Inst,
+ operand: *Inst,
+ },
+ kw_args: struct {},
+ };
};
pub const ErrorMsg = struct {
@@ -331,6 +344,7 @@ pub const Module = struct {
.primitive => return self.writeInstToStreamGeneric(stream, .primitive, decl, inst_table),
.fntype => return self.writeInstToStreamGeneric(stream, .fntype, decl, inst_table),
.intcast => return self.writeInstToStreamGeneric(stream, .intcast, decl, inst_table),
+ .bitcast => return self.writeInstToStreamGeneric(stream, .bitcast, decl, inst_table),
}
}
@@ -957,6 +971,19 @@ const EmitZIR = struct {
};
break :blk &new_inst.base;
},
+ .bitcast => blk: {
+ const old_inst = inst.cast(ir.Inst.BitCast).?;
+ const new_inst = try self.arena.allocator.create(Inst.BitCast);
+ new_inst.* = .{
+ .base = .{ .src = inst.src, .tag = Inst.BitCast.base_tag },
+ .positionals = .{
+ .dest_type = try self.emitType(inst.src, inst.ty),
+ .operand = try self.resolveInst(&inst_table, old_inst.args.operand),
+ },
+ .kw_args = .{},
+ };
+ break :blk &new_inst.base;
+ },
};
try instructions.append(new_inst);
try inst_table.putNoClobber(inst, new_inst);
src-self-hosted/codegen.zig
@@ -81,6 +81,7 @@ const Function = struct {
.constant => unreachable, // excluded from function bodies
.assembly => return self.genAsm(inst.cast(ir.Inst.Assembly).?),
.ptrtoint => return self.genPtrToInt(inst.cast(ir.Inst.PtrToInt).?),
+ .bitcast => return self.genBitCast(inst.cast(ir.Inst.BitCast).?),
}
}
@@ -282,7 +283,26 @@ const Function = struct {
.rsi => switch (mcv) {
.none, .unreach => unreachable,
.immediate => return self.fail(src, "TODO implement x86_64 genSetReg %rsi = immediate", .{}),
- .embedded_in_code => return self.fail(src, "TODO implement x86_64 genSetReg %rsi = embedded_in_code", .{}),
+ .embedded_in_code => |code_offset| {
+ // Examples:
+ // lea rsi, [rip + 0x01020304]
+ // lea rsi, [rip - 7]
+ // f: 48 8d 35 04 03 02 01 lea rsi,[rip+0x1020304] # 102031a <_start+0x102031a>
+ // 16: 48 8d 35 f9 ff ff ff lea rsi,[rip+0xfffffffffffffff9] # 16 <_start+0x16>
+ //
+ // We need the offset from RIP in a signed i32 twos complement.
+ // The instruction is 7 bytes long and RIP points to the next instruction.
+ try self.code.resize(self.code.items.len + 7);
+ const rip = self.code.items.len;
+ const big_offset = @intCast(i64, code_offset) - @intCast(i64, rip);
+ const offset = @intCast(i32, big_offset);
+ self.code.items[self.code.items.len - 7] = 0x48;
+ self.code.items[self.code.items.len - 6] = 0x8d;
+ self.code.items[self.code.items.len - 5] = 0x35;
+ const imm_ptr = self.code.items[self.code.items.len - 4 ..][0..4];
+ mem.writeIntLittle(i32, imm_ptr, offset);
+ return;
+ },
.register => return self.fail(src, "TODO implement x86_64 genSetReg %rsi = register", .{}),
},
.rdx => switch (mcv) {
@@ -302,6 +322,11 @@ const Function = struct {
return self.resolveInst(inst.args.ptr);
}
+ fn genBitCast(self: *Function, inst: *ir.Inst.BitCast) !MCValue {
+ const operand = try self.resolveInst(inst.args.operand);
+ return operand;
+ }
+
fn resolveInst(self: *Function, inst: *ir.Inst) !MCValue {
if (self.inst_table.getValue(inst)) |mcv| {
return mcv;
@@ -344,6 +369,8 @@ const Function = struct {
}
return MCValue{ .immediate = typed_value.val.toUnsignedInt() };
},
+ .ComptimeInt => unreachable, // semantic analysis prevents this
+ .ComptimeFloat => unreachable, // semantic analysis prevents this
else => return self.fail(src, "TODO implement const of type '{}'", .{typed_value.ty}),
}
}
src-self-hosted/ir.zig
@@ -24,6 +24,7 @@ pub const Inst = struct {
constant,
assembly,
ptrtoint,
+ bitcast,
};
pub fn cast(base: *Inst, comptime T: type) ?*T {
@@ -45,6 +46,7 @@ pub const Inst = struct {
.assembly,
.ptrtoint,
+ .bitcast,
=> null,
};
}
@@ -84,6 +86,15 @@ pub const Inst = struct {
ptr: *Inst,
},
};
+
+ pub const BitCast = struct {
+ pub const base_tag = Tag.bitcast;
+
+ base: Inst,
+ args: struct {
+ operand: *Inst,
+ },
+ };
};
pub const TypedValue = struct {
@@ -234,7 +245,7 @@ const Analyze = struct {
fn resolveConstString(self: *Analyze, func: ?*Fn, old_inst: *text.Inst) ![]u8 {
const new_inst = try self.resolveInst(func, old_inst);
const wanted_type = Type.initTag(.const_slice_u8);
- const coerced_inst = try self.coerce(wanted_type, new_inst);
+ const coerced_inst = try self.coerce(func, wanted_type, new_inst);
const val = try self.resolveConstValue(coerced_inst);
return val.toAllocatedBytes(&self.arena.allocator);
}
@@ -242,7 +253,7 @@ const Analyze = struct {
fn resolveType(self: *Analyze, func: ?*Fn, old_inst: *text.Inst) !Type {
const new_inst = try self.resolveInst(func, old_inst);
const wanted_type = Type.initTag(.@"type");
- const coerced_inst = try self.coerce(wanted_type, new_inst);
+ const coerced_inst = try self.coerce(func, wanted_type, new_inst);
const val = try self.resolveConstValue(coerced_inst);
return val.toType();
}
@@ -409,6 +420,7 @@ const Analyze = struct {
.primitive => return self.analyzeInstPrimitive(func, old_inst.cast(text.Inst.Primitive).?),
.fntype => return self.analyzeInstFnType(func, old_inst.cast(text.Inst.FnType).?),
.intcast => return self.analyzeInstIntCast(func, old_inst.cast(text.Inst.IntCast).?),
+ .bitcast => return self.analyzeInstBitCast(func, old_inst.cast(text.Inst.BitCast).?),
}
}
@@ -472,7 +484,7 @@ const Analyze = struct {
fn analyzeInstAs(self: *Analyze, func: ?*Fn, as: *text.Inst.As) InnerError!*Inst {
const dest_type = try self.resolveType(func, as.positionals.dest_type);
const new_inst = try self.resolveInst(func, as.positionals.value);
- return self.coerce(dest_type, new_inst);
+ return self.coerce(func, dest_type, new_inst);
}
fn analyzeInstPtrToInt(self: *Analyze, func: ?*Fn, ptrtoint: *text.Inst.PtrToInt) InnerError!*Inst {
@@ -545,12 +557,18 @@ const Analyze = struct {
}
if (dest_is_comptime_int or new_inst.value() != null) {
- return self.coerce(dest_type, new_inst);
+ return self.coerce(func, dest_type, new_inst);
}
return self.fail(intcast.base.src, "TODO implement analyze widen or shorten int", .{});
}
+ fn analyzeInstBitCast(self: *Analyze, func: ?*Fn, inst: *text.Inst.BitCast) InnerError!*Inst {
+ const dest_type = try self.resolveType(func, inst.positionals.dest_type);
+ const operand = try self.resolveInst(func, inst.positionals.operand);
+ return self.bitcast(func, dest_type, operand);
+ }
+
fn analyzeInstDeref(self: *Analyze, func: ?*Fn, deref: *text.Inst.Deref) InnerError!*Inst {
const ptr = try self.resolveInst(func, deref.positionals.ptr);
const elem_ty = switch (ptr.ty.zigTypeTag()) {
@@ -583,7 +601,8 @@ const Analyze = struct {
elem.* = try self.resolveConstString(func, assembly.kw_args.clobbers[i]);
}
for (args) |*elem, i| {
- elem.* = try self.resolveInst(func, assembly.kw_args.args[i]);
+ const arg = try self.resolveInst(func, assembly.kw_args.args[i]);
+ elem.* = try self.coerce(func, Type.initTag(.usize), arg);
}
const f = try self.requireFunctionBody(func, assembly.base.src);
@@ -602,10 +621,14 @@ const Analyze = struct {
return self.addNewInstArgs(f, unreach.base.src, Type.initTag(.noreturn), Inst.Unreach, {});
}
- fn coerce(self: *Analyze, dest_type: Type, inst: *Inst) !*Inst {
+ fn coerce(self: *Analyze, func: ?*Fn, dest_type: Type, inst: *Inst) !*Inst {
+ // If the types are the same, we can return the operand.
+ if (dest_type.eql(inst.ty))
+ return inst;
+
const in_memory_result = coerceInMemoryAllowed(dest_type, inst.ty);
if (in_memory_result == .ok) {
- return self.bitcast(dest_type, inst);
+ return self.bitcast(func, dest_type, inst);
}
// *[N]T to []T
@@ -634,12 +657,14 @@ const Analyze = struct {
return self.fail(inst.src, "TODO implement type coercion", .{});
}
- fn bitcast(self: *Analyze, dest_type: Type, inst: *Inst) !*Inst {
+ fn bitcast(self: *Analyze, func: ?*Fn, dest_type: Type, inst: *Inst) !*Inst {
if (inst.value()) |val| {
// Keep the comptime Value representation; take the new type.
return self.constInst(inst.src, .{ .ty = dest_type, .val = val });
}
- return self.fail(inst.src, "TODO implement runtime bitcast", .{});
+ // TODO validate the type size and other compile errors
+ const f = try self.requireFunctionBody(func, inst.src);
+ return self.addNewInstArgs(f, inst.src, dest_type, Inst.BitCast, Inst.Args(Inst.BitCast){ .operand = inst });
}
fn coerceArrayPtrToSlice(self: *Analyze, dest_type: Type, inst: *Inst) !*Inst {
@@ -713,7 +738,7 @@ pub fn main() anyerror!void {
std.process.exit(1);
}
- const output_zir = false;
+ const output_zir = true;
if (output_zir) {
var new_zir_module = try text.emit_zir(allocator, analyzed_module);
defer new_zir_module.deinit(allocator);