Commit 821eb595f4

Jacob Young <jacobly0@users.noreply.github.com>
2023-04-03 20:14:55
x86_64: implement cmp_lt_errors_len
1 parent 9fd4608
Changed files (3)
src
test
behavior
src/arch/x86_64/CodeGen.zig
@@ -5650,9 +5650,62 @@ fn airCmpVector(self: *Self, inst: Air.Inst.Index) !void {
 
 fn airCmpLtErrorsLen(self: *Self, inst: Air.Inst.Index) !void {
     const un_op = self.air.instructions.items(.data)[inst].un_op;
-    const operand = try self.resolveInst(un_op);
-    _ = operand;
-    const result: MCValue = if (self.liveness.isUnused(inst)) .dead else return self.fail("TODO implement airCmpLtErrorsLen for {}", .{self.target.cpu.arch});
+    const result: MCValue = if (self.liveness.isUnused(inst)) .dead else result: {
+        const addr_reg = try self.register_manager.allocReg(null, gp);
+        const addr_lock = self.register_manager.lockRegAssumeUnused(addr_reg);
+        defer self.register_manager.unlockReg(addr_lock);
+
+        if (self.bin_file.cast(link.File.Elf)) |elf_file| {
+            const atom_index = try elf_file.getOrCreateAtomForLazySymbol(
+                .{ .kind = .const_data, .ty = Type.anyerror },
+                4, // dword alignment
+            );
+            const got_addr = elf_file.getAtom(atom_index).getOffsetTableAddress(elf_file);
+            try self.asmRegisterMemory(.mov, addr_reg.to64(), Memory.sib(.qword, .{
+                .base = .ds,
+                .disp = @intCast(i32, got_addr),
+            }));
+        } else if (self.bin_file.cast(link.File.Coff)) |coff_file| {
+            const atom_index = try coff_file.getOrCreateAtomForLazySymbol(
+                .{ .kind = .const_data, .ty = Type.anyerror },
+                4, // dword alignment
+            );
+            const sym_index = coff_file.getAtom(atom_index).getSymbolIndex().?;
+            try self.genSetReg(Type.usize, addr_reg, .{ .linker_load = .{
+                .type = .got,
+                .sym_index = sym_index,
+            } });
+        } else if (self.bin_file.cast(link.File.MachO)) |macho_file| {
+            const atom_index = try macho_file.getOrCreateAtomForLazySymbol(
+                .{ .kind = .const_data, .ty = Type.anyerror },
+                4, // dword alignment
+            );
+            const sym_index = macho_file.getAtom(atom_index).getSymbolIndex().?;
+            try self.genSetReg(Type.usize, addr_reg, .{ .linker_load = .{
+                .type = .got,
+                .sym_index = sym_index,
+            } });
+        } else {
+            return self.fail("TODO implement airErrorName for x86_64 {s}", .{@tagName(self.bin_file.tag)});
+        }
+
+        try self.spillEflagsIfOccupied();
+        self.eflags_inst = inst;
+
+        const op_ty = self.air.typeOf(un_op);
+        const op_abi_size = @intCast(u32, op_ty.abiSize(self.target.*));
+        const op_mcv = try self.resolveInst(un_op);
+        const dst_reg = switch (op_mcv) {
+            .register => |reg| reg,
+            else => try self.copyToTmpRegister(op_ty, op_mcv),
+        };
+        try self.asmRegisterMemory(
+            .cmp,
+            registerAlias(dst_reg, op_abi_size),
+            Memory.sib(Memory.PtrSize.fromSize(op_abi_size), .{ .base = addr_reg }),
+        );
+        break :result .{ .eflags = .b };
+    };
     return self.finishAir(inst, result, .{ un_op, .none, .none });
 }
 
@@ -8027,12 +8080,12 @@ fn airErrorName(self: *Self, inst: Air.Inst.Index) !void {
         try self.asmRegisterMemory(.mov, start_reg.to32(), Memory.sib(.dword, .{
             .base = addr_reg.to64(),
             .scale_index = .{ .scale = 4, .index = err_reg.to64() },
-            .disp = 0,
+            .disp = 4,
         }));
         try self.asmRegisterMemory(.mov, end_reg.to32(), Memory.sib(.dword, .{
             .base = addr_reg.to64(),
             .scale_index = .{ .scale = 4, .index = err_reg.to64() },
-            .disp = 4,
+            .disp = 8,
         }));
         try self.asmRegisterRegister(.sub, end_reg.to32(), start_reg.to32());
         try self.asmRegisterMemory(.lea, start_reg.to64(), Memory.sib(.byte, .{
src/codegen.zig
@@ -124,13 +124,17 @@ pub fn generateLazySymbol(
 
     if (lazy_sym.kind == .const_data and lazy_sym.ty.isAnyError()) {
         const err_names = mod.error_name_list.items;
-        try code.resize(err_names.len * 4);
-        for (err_names, 0..) |err_name, index| {
-            mem.writeInt(u32, code.items[index * 4 ..][0..4], @intCast(u32, code.items.len), endian);
+        mem.writeInt(u32, try code.addManyAsArray(4), @intCast(u32, err_names.len), endian);
+        var offset = code.items.len;
+        try code.resize((1 + err_names.len + 1) * 4);
+        for (err_names) |err_name| {
+            mem.writeInt(u32, code.items[offset..][0..4], @intCast(u32, code.items.len), endian);
+            offset += 4;
             try code.ensureUnusedCapacity(err_name.len + 1);
             code.appendSliceAssumeCapacity(err_name);
             code.appendAssumeCapacity(0);
         }
+        mem.writeInt(u32, code.items[offset..][0..4], @intCast(u32, code.items.len), endian);
         return Result.ok;
     } else return .{ .fail = try ErrorMsg.create(
         bin_file.allocator,
test/behavior/cast.zig
@@ -402,7 +402,6 @@ test "expected [*c]const u8, found [*:0]const u8" {
 
 test "explicit cast from integer to error type" {
     if (builtin.zig_backend == .stage2_wasm) return error.SkipZigTest;
-    if (builtin.zig_backend == .stage2_x86_64) return error.SkipZigTest; // TODO
     if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
     if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
     if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO